openai-agents 0.2.6__py3-none-any.whl → 0.6.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agents/__init__.py +105 -4
- agents/_debug.py +15 -4
- agents/_run_impl.py +1203 -96
- agents/agent.py +294 -21
- agents/apply_diff.py +329 -0
- agents/editor.py +47 -0
- agents/exceptions.py +35 -0
- agents/extensions/experimental/__init__.py +6 -0
- agents/extensions/experimental/codex/__init__.py +92 -0
- agents/extensions/experimental/codex/codex.py +89 -0
- agents/extensions/experimental/codex/codex_options.py +35 -0
- agents/extensions/experimental/codex/codex_tool.py +1142 -0
- agents/extensions/experimental/codex/events.py +162 -0
- agents/extensions/experimental/codex/exec.py +263 -0
- agents/extensions/experimental/codex/items.py +245 -0
- agents/extensions/experimental/codex/output_schema_file.py +50 -0
- agents/extensions/experimental/codex/payloads.py +31 -0
- agents/extensions/experimental/codex/thread.py +214 -0
- agents/extensions/experimental/codex/thread_options.py +54 -0
- agents/extensions/experimental/codex/turn_options.py +36 -0
- agents/extensions/handoff_filters.py +13 -1
- agents/extensions/memory/__init__.py +120 -0
- agents/extensions/memory/advanced_sqlite_session.py +1285 -0
- agents/extensions/memory/async_sqlite_session.py +239 -0
- agents/extensions/memory/dapr_session.py +423 -0
- agents/extensions/memory/encrypt_session.py +185 -0
- agents/extensions/memory/redis_session.py +261 -0
- agents/extensions/memory/sqlalchemy_session.py +334 -0
- agents/extensions/models/litellm_model.py +449 -36
- agents/extensions/models/litellm_provider.py +3 -1
- agents/function_schema.py +47 -5
- agents/guardrail.py +16 -2
- agents/{handoffs.py → handoffs/__init__.py} +89 -47
- agents/handoffs/history.py +268 -0
- agents/items.py +238 -13
- agents/lifecycle.py +75 -14
- agents/mcp/server.py +280 -37
- agents/mcp/util.py +24 -3
- agents/memory/__init__.py +22 -2
- agents/memory/openai_conversations_session.py +91 -0
- agents/memory/openai_responses_compaction_session.py +249 -0
- agents/memory/session.py +19 -261
- agents/memory/sqlite_session.py +275 -0
- agents/memory/util.py +20 -0
- agents/model_settings.py +18 -3
- agents/models/__init__.py +13 -0
- agents/models/chatcmpl_converter.py +303 -50
- agents/models/chatcmpl_helpers.py +63 -0
- agents/models/chatcmpl_stream_handler.py +290 -68
- agents/models/default_models.py +58 -0
- agents/models/interface.py +4 -0
- agents/models/openai_chatcompletions.py +103 -48
- agents/models/openai_provider.py +10 -4
- agents/models/openai_responses.py +167 -46
- agents/realtime/__init__.py +4 -0
- agents/realtime/_util.py +14 -3
- agents/realtime/agent.py +7 -0
- agents/realtime/audio_formats.py +53 -0
- agents/realtime/config.py +78 -10
- agents/realtime/events.py +18 -0
- agents/realtime/handoffs.py +2 -2
- agents/realtime/items.py +17 -1
- agents/realtime/model.py +13 -0
- agents/realtime/model_events.py +12 -0
- agents/realtime/model_inputs.py +18 -1
- agents/realtime/openai_realtime.py +700 -151
- agents/realtime/session.py +309 -32
- agents/repl.py +7 -3
- agents/result.py +197 -38
- agents/run.py +1053 -178
- agents/run_context.py +13 -2
- agents/stream_events.py +1 -0
- agents/strict_schema.py +14 -0
- agents/tool.py +413 -15
- agents/tool_context.py +22 -1
- agents/tool_guardrails.py +279 -0
- agents/tracing/__init__.py +2 -0
- agents/tracing/config.py +9 -0
- agents/tracing/create.py +4 -0
- agents/tracing/processor_interface.py +84 -11
- agents/tracing/processors.py +65 -54
- agents/tracing/provider.py +64 -7
- agents/tracing/spans.py +105 -0
- agents/tracing/traces.py +116 -16
- agents/usage.py +134 -12
- agents/util/_json.py +19 -1
- agents/util/_transforms.py +12 -2
- agents/voice/input.py +5 -4
- agents/voice/models/openai_stt.py +17 -9
- agents/voice/pipeline.py +2 -0
- agents/voice/pipeline_config.py +4 -0
- {openai_agents-0.2.6.dist-info → openai_agents-0.6.8.dist-info}/METADATA +44 -19
- openai_agents-0.6.8.dist-info/RECORD +134 -0
- {openai_agents-0.2.6.dist-info → openai_agents-0.6.8.dist-info}/WHEEL +1 -1
- openai_agents-0.2.6.dist-info/RECORD +0 -103
- {openai_agents-0.2.6.dist-info → openai_agents-0.6.8.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import tempfile
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from typing import Any, Callable
|
|
9
|
+
|
|
10
|
+
from agents.exceptions import UserError
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class OutputSchemaFile:
|
|
15
|
+
# Holds the on-disk schema path and cleanup callback.
|
|
16
|
+
schema_path: str | None
|
|
17
|
+
cleanup: Callable[[], None]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _is_plain_json_object(schema: Any) -> bool:
|
|
21
|
+
return isinstance(schema, dict)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def create_output_schema_file(schema: dict[str, Any] | None) -> OutputSchemaFile:
|
|
25
|
+
"""Materialize a JSON schema into a temp file for the Codex CLI."""
|
|
26
|
+
if schema is None:
|
|
27
|
+
# No schema means there is no temp file to manage.
|
|
28
|
+
return OutputSchemaFile(schema_path=None, cleanup=lambda: None)
|
|
29
|
+
|
|
30
|
+
if not _is_plain_json_object(schema):
|
|
31
|
+
raise UserError("output_schema must be a plain JSON object")
|
|
32
|
+
|
|
33
|
+
# The Codex CLI expects a schema file path, so write to a temp directory.
|
|
34
|
+
schema_dir = tempfile.mkdtemp(prefix="codex-output-schema-")
|
|
35
|
+
schema_path = os.path.join(schema_dir, "schema.json")
|
|
36
|
+
|
|
37
|
+
def cleanup() -> None:
|
|
38
|
+
# Best-effort cleanup since this runs in finally blocks.
|
|
39
|
+
try:
|
|
40
|
+
shutil.rmtree(schema_dir, ignore_errors=True)
|
|
41
|
+
except Exception:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
with open(schema_path, "w", encoding="utf-8") as handle:
|
|
46
|
+
json.dump(schema, handle)
|
|
47
|
+
return OutputSchemaFile(schema_path=schema_path, cleanup=cleanup)
|
|
48
|
+
except Exception:
|
|
49
|
+
cleanup()
|
|
50
|
+
raise
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import dataclasses
|
|
4
|
+
from collections.abc import Iterable
|
|
5
|
+
from typing import Any, cast
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class _DictLike:
|
|
9
|
+
def __getitem__(self, key: str) -> Any:
|
|
10
|
+
if key in self._field_names():
|
|
11
|
+
return getattr(self, key)
|
|
12
|
+
raise KeyError(key)
|
|
13
|
+
|
|
14
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
15
|
+
if key in self._field_names():
|
|
16
|
+
return getattr(self, key)
|
|
17
|
+
return default
|
|
18
|
+
|
|
19
|
+
def __contains__(self, key: object) -> bool:
|
|
20
|
+
if not isinstance(key, str):
|
|
21
|
+
return False
|
|
22
|
+
return key in self._field_names()
|
|
23
|
+
|
|
24
|
+
def keys(self) -> Iterable[str]:
|
|
25
|
+
return iter(self._field_names())
|
|
26
|
+
|
|
27
|
+
def as_dict(self) -> dict[str, Any]:
|
|
28
|
+
return dataclasses.asdict(cast(Any, self))
|
|
29
|
+
|
|
30
|
+
def _field_names(self) -> list[str]:
|
|
31
|
+
return [field.name for field in dataclasses.fields(cast(Any, self))]
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import contextlib
|
|
5
|
+
from collections.abc import AsyncGenerator
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Any, Union, cast
|
|
8
|
+
|
|
9
|
+
from typing_extensions import Literal, TypeAlias, TypedDict
|
|
10
|
+
|
|
11
|
+
from .codex_options import CodexOptions
|
|
12
|
+
from .events import (
|
|
13
|
+
ItemCompletedEvent,
|
|
14
|
+
ThreadError,
|
|
15
|
+
ThreadErrorEvent,
|
|
16
|
+
ThreadEvent,
|
|
17
|
+
ThreadStartedEvent,
|
|
18
|
+
TurnCompletedEvent,
|
|
19
|
+
TurnFailedEvent,
|
|
20
|
+
Usage,
|
|
21
|
+
coerce_thread_event,
|
|
22
|
+
)
|
|
23
|
+
from .exec import CodexExec, CodexExecArgs
|
|
24
|
+
from .items import ThreadItem, is_agent_message_item
|
|
25
|
+
from .output_schema_file import create_output_schema_file
|
|
26
|
+
from .thread_options import ThreadOptions
|
|
27
|
+
from .turn_options import TurnOptions
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@contextlib.asynccontextmanager
|
|
31
|
+
async def _aclosing(
|
|
32
|
+
generator: AsyncGenerator[str, None],
|
|
33
|
+
) -> AsyncGenerator[AsyncGenerator[str, None], None]:
|
|
34
|
+
try:
|
|
35
|
+
yield generator
|
|
36
|
+
finally:
|
|
37
|
+
await generator.aclose()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class TextInput(TypedDict):
|
|
41
|
+
type: Literal["text"]
|
|
42
|
+
text: str
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class LocalImageInput(TypedDict):
|
|
46
|
+
type: Literal["local_image"]
|
|
47
|
+
path: str
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
UserInput: TypeAlias = Union[TextInput, LocalImageInput]
|
|
51
|
+
Input: TypeAlias = Union[str, list[UserInput]]
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@dataclass(frozen=True)
|
|
55
|
+
class Turn:
|
|
56
|
+
items: list[ThreadItem]
|
|
57
|
+
final_response: str
|
|
58
|
+
usage: Usage | None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
RunResult = Turn
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass(frozen=True)
|
|
65
|
+
class StreamedTurn:
|
|
66
|
+
events: AsyncGenerator[ThreadEvent, None]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
RunStreamedResult = StreamedTurn
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class Thread:
|
|
73
|
+
def __init__(
|
|
74
|
+
self,
|
|
75
|
+
*,
|
|
76
|
+
exec_client: CodexExec,
|
|
77
|
+
options: CodexOptions,
|
|
78
|
+
thread_options: ThreadOptions,
|
|
79
|
+
thread_id: str | None = None,
|
|
80
|
+
) -> None:
|
|
81
|
+
self._exec = exec_client
|
|
82
|
+
self._options = options
|
|
83
|
+
self._id = thread_id
|
|
84
|
+
self._thread_options = thread_options
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def id(self) -> str | None:
|
|
88
|
+
return self._id
|
|
89
|
+
|
|
90
|
+
async def run_streamed(
|
|
91
|
+
self, input: Input, turn_options: TurnOptions | None = None
|
|
92
|
+
) -> StreamedTurn:
|
|
93
|
+
options = turn_options or TurnOptions()
|
|
94
|
+
return StreamedTurn(events=self._run_streamed_internal(input, options))
|
|
95
|
+
|
|
96
|
+
async def _run_streamed_internal(
|
|
97
|
+
self, input: Input, turn_options: TurnOptions
|
|
98
|
+
) -> AsyncGenerator[ThreadEvent, None]:
|
|
99
|
+
# The Codex CLI expects an output schema file path for structured output.
|
|
100
|
+
output_schema_file = create_output_schema_file(turn_options.output_schema)
|
|
101
|
+
options = self._thread_options
|
|
102
|
+
prompt, images = _normalize_input(input)
|
|
103
|
+
idle_timeout = turn_options.idle_timeout_seconds
|
|
104
|
+
signal = turn_options.signal
|
|
105
|
+
if idle_timeout is not None and signal is None:
|
|
106
|
+
signal = asyncio.Event()
|
|
107
|
+
generator = self._exec.run(
|
|
108
|
+
CodexExecArgs(
|
|
109
|
+
input=prompt,
|
|
110
|
+
base_url=self._options.base_url,
|
|
111
|
+
api_key=self._options.api_key,
|
|
112
|
+
thread_id=self._id,
|
|
113
|
+
images=images,
|
|
114
|
+
model=options.model,
|
|
115
|
+
sandbox_mode=options.sandbox_mode,
|
|
116
|
+
working_directory=options.working_directory,
|
|
117
|
+
skip_git_repo_check=options.skip_git_repo_check,
|
|
118
|
+
output_schema_file=output_schema_file.schema_path,
|
|
119
|
+
model_reasoning_effort=options.model_reasoning_effort,
|
|
120
|
+
signal=signal,
|
|
121
|
+
idle_timeout_seconds=idle_timeout,
|
|
122
|
+
network_access_enabled=options.network_access_enabled,
|
|
123
|
+
web_search_mode=options.web_search_mode,
|
|
124
|
+
web_search_enabled=options.web_search_enabled,
|
|
125
|
+
approval_policy=options.approval_policy,
|
|
126
|
+
additional_directories=list(options.additional_directories)
|
|
127
|
+
if options.additional_directories
|
|
128
|
+
else None,
|
|
129
|
+
)
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
async with _aclosing(generator) as stream:
|
|
134
|
+
while True:
|
|
135
|
+
try:
|
|
136
|
+
if idle_timeout is None or isinstance(self._exec, CodexExec):
|
|
137
|
+
item = await stream.__anext__()
|
|
138
|
+
else:
|
|
139
|
+
item = await asyncio.wait_for(
|
|
140
|
+
stream.__anext__(),
|
|
141
|
+
timeout=idle_timeout,
|
|
142
|
+
)
|
|
143
|
+
except StopAsyncIteration:
|
|
144
|
+
break
|
|
145
|
+
except asyncio.TimeoutError as exc:
|
|
146
|
+
if signal is not None:
|
|
147
|
+
signal.set()
|
|
148
|
+
raise RuntimeError(
|
|
149
|
+
f"Codex stream idle for {idle_timeout} seconds."
|
|
150
|
+
) from exc
|
|
151
|
+
try:
|
|
152
|
+
parsed = _parse_event(item)
|
|
153
|
+
except Exception as exc: # noqa: BLE001
|
|
154
|
+
raise RuntimeError(f"Failed to parse event: {item}") from exc
|
|
155
|
+
if isinstance(parsed, ThreadStartedEvent):
|
|
156
|
+
# Capture the thread id so callers can resume later.
|
|
157
|
+
self._id = parsed.thread_id
|
|
158
|
+
yield parsed
|
|
159
|
+
finally:
|
|
160
|
+
output_schema_file.cleanup()
|
|
161
|
+
|
|
162
|
+
async def run(self, input: Input, turn_options: TurnOptions | None = None) -> Turn:
|
|
163
|
+
# Aggregate events into a single Turn result (matching the TS SDK behavior).
|
|
164
|
+
options = turn_options or TurnOptions()
|
|
165
|
+
generator = self._run_streamed_internal(input, options)
|
|
166
|
+
items: list[ThreadItem] = []
|
|
167
|
+
final_response = ""
|
|
168
|
+
usage: Usage | None = None
|
|
169
|
+
turn_failure: ThreadError | None = None
|
|
170
|
+
|
|
171
|
+
async for event in generator:
|
|
172
|
+
if isinstance(event, ItemCompletedEvent):
|
|
173
|
+
item = event.item
|
|
174
|
+
if is_agent_message_item(item):
|
|
175
|
+
final_response = item.text
|
|
176
|
+
items.append(item)
|
|
177
|
+
elif isinstance(event, TurnCompletedEvent):
|
|
178
|
+
usage = event.usage
|
|
179
|
+
elif isinstance(event, TurnFailedEvent):
|
|
180
|
+
turn_failure = event.error
|
|
181
|
+
break
|
|
182
|
+
elif isinstance(event, ThreadErrorEvent):
|
|
183
|
+
raise RuntimeError(f"Codex stream error: {event.message}")
|
|
184
|
+
|
|
185
|
+
if turn_failure:
|
|
186
|
+
raise RuntimeError(turn_failure.message)
|
|
187
|
+
|
|
188
|
+
return Turn(items=items, final_response=final_response, usage=usage)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _normalize_input(input: Input) -> tuple[str, list[str]]:
|
|
192
|
+
# Merge text items into a single prompt and collect image paths.
|
|
193
|
+
if isinstance(input, str):
|
|
194
|
+
return input, []
|
|
195
|
+
|
|
196
|
+
prompt_parts: list[str] = []
|
|
197
|
+
images: list[str] = []
|
|
198
|
+
for item in input:
|
|
199
|
+
if item["type"] == "text":
|
|
200
|
+
text = item.get("text", "")
|
|
201
|
+
prompt_parts.append(text)
|
|
202
|
+
elif item["type"] == "local_image":
|
|
203
|
+
path = item.get("path", "")
|
|
204
|
+
if path:
|
|
205
|
+
images.append(path)
|
|
206
|
+
|
|
207
|
+
return "\n\n".join(prompt_parts), images
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _parse_event(raw: str) -> ThreadEvent:
|
|
211
|
+
import json
|
|
212
|
+
|
|
213
|
+
parsed = json.loads(raw)
|
|
214
|
+
return coerce_thread_event(cast(dict[str, Any], parsed))
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Mapping, Sequence
|
|
4
|
+
from dataclasses import dataclass, fields
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from typing_extensions import Literal
|
|
8
|
+
|
|
9
|
+
from agents.exceptions import UserError
|
|
10
|
+
|
|
11
|
+
ApprovalMode = Literal["never", "on-request", "on-failure", "untrusted"]
|
|
12
|
+
SandboxMode = Literal["read-only", "workspace-write", "danger-full-access"]
|
|
13
|
+
ModelReasoningEffort = Literal["minimal", "low", "medium", "high", "xhigh"]
|
|
14
|
+
WebSearchMode = Literal["disabled", "cached", "live"]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass(frozen=True)
|
|
18
|
+
class ThreadOptions:
|
|
19
|
+
# Model identifier passed to the Codex CLI (--model).
|
|
20
|
+
model: str | None = None
|
|
21
|
+
# Sandbox permissions for filesystem/network access.
|
|
22
|
+
sandbox_mode: SandboxMode | None = None
|
|
23
|
+
# Working directory for the Codex CLI process.
|
|
24
|
+
working_directory: str | None = None
|
|
25
|
+
# Allow running outside a Git repository.
|
|
26
|
+
skip_git_repo_check: bool | None = None
|
|
27
|
+
# Configure model reasoning effort.
|
|
28
|
+
model_reasoning_effort: ModelReasoningEffort | None = None
|
|
29
|
+
# Toggle network access in sandboxed workspace writes.
|
|
30
|
+
network_access_enabled: bool | None = None
|
|
31
|
+
# Configure web search mode via codex config.
|
|
32
|
+
web_search_mode: WebSearchMode | None = None
|
|
33
|
+
# Legacy toggle for web search behavior.
|
|
34
|
+
web_search_enabled: bool | None = None
|
|
35
|
+
# Approval policy for tool invocations within Codex.
|
|
36
|
+
approval_policy: ApprovalMode | None = None
|
|
37
|
+
# Additional filesystem roots available to Codex.
|
|
38
|
+
additional_directories: Sequence[str] | None = None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def coerce_thread_options(
|
|
42
|
+
options: ThreadOptions | Mapping[str, Any] | None,
|
|
43
|
+
) -> ThreadOptions | None:
|
|
44
|
+
if options is None or isinstance(options, ThreadOptions):
|
|
45
|
+
return options
|
|
46
|
+
if not isinstance(options, Mapping):
|
|
47
|
+
raise UserError("ThreadOptions must be a ThreadOptions or a mapping.")
|
|
48
|
+
|
|
49
|
+
allowed = {field.name for field in fields(ThreadOptions)}
|
|
50
|
+
unknown = set(options.keys()) - allowed
|
|
51
|
+
if unknown:
|
|
52
|
+
raise UserError(f"Unknown ThreadOptions field(s): {sorted(unknown)}")
|
|
53
|
+
|
|
54
|
+
return ThreadOptions(**dict(options))
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
from collections.abc import Mapping
|
|
5
|
+
from dataclasses import dataclass, fields
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from agents.exceptions import UserError
|
|
9
|
+
|
|
10
|
+
AbortSignal = asyncio.Event
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class TurnOptions:
|
|
15
|
+
# JSON schema used by Codex for structured output.
|
|
16
|
+
output_schema: dict[str, Any] | None = None
|
|
17
|
+
# Cancellation signal for the Codex CLI subprocess.
|
|
18
|
+
signal: AbortSignal | None = None
|
|
19
|
+
# Abort the Codex CLI if no events arrive within this many seconds.
|
|
20
|
+
idle_timeout_seconds: float | None = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def coerce_turn_options(
|
|
24
|
+
options: TurnOptions | Mapping[str, Any] | None,
|
|
25
|
+
) -> TurnOptions | None:
|
|
26
|
+
if options is None or isinstance(options, TurnOptions):
|
|
27
|
+
return options
|
|
28
|
+
if not isinstance(options, Mapping):
|
|
29
|
+
raise UserError("TurnOptions must be a TurnOptions or a mapping.")
|
|
30
|
+
|
|
31
|
+
allowed = {field.name for field in fields(TurnOptions)}
|
|
32
|
+
unknown = set(options.keys()) - allowed
|
|
33
|
+
if unknown:
|
|
34
|
+
raise UserError(f"Unknown TurnOptions field(s): {sorted(unknown)}")
|
|
35
|
+
|
|
36
|
+
return TurnOptions(**dict(options))
|
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from ..handoffs import
|
|
3
|
+
from ..handoffs import (
|
|
4
|
+
HandoffInputData,
|
|
5
|
+
default_handoff_history_mapper,
|
|
6
|
+
nest_handoff_history,
|
|
7
|
+
)
|
|
4
8
|
from ..items import (
|
|
5
9
|
HandoffCallItem,
|
|
6
10
|
HandoffOutputItem,
|
|
11
|
+
ReasoningItem,
|
|
7
12
|
RunItem,
|
|
8
13
|
ToolCallItem,
|
|
9
14
|
ToolCallOutputItem,
|
|
@@ -12,6 +17,12 @@ from ..items import (
|
|
|
12
17
|
|
|
13
18
|
"""Contains common handoff input filters, for convenience. """
|
|
14
19
|
|
|
20
|
+
__all__ = [
|
|
21
|
+
"remove_all_tools",
|
|
22
|
+
"nest_handoff_history",
|
|
23
|
+
"default_handoff_history_mapper",
|
|
24
|
+
]
|
|
25
|
+
|
|
15
26
|
|
|
16
27
|
def remove_all_tools(handoff_input_data: HandoffInputData) -> HandoffInputData:
|
|
17
28
|
"""Filters out all tool items: file search, web search and function calls+output."""
|
|
@@ -41,6 +52,7 @@ def _remove_tools_from_items(items: tuple[RunItem, ...]) -> tuple[RunItem, ...]:
|
|
|
41
52
|
or isinstance(item, HandoffOutputItem)
|
|
42
53
|
or isinstance(item, ToolCallItem)
|
|
43
54
|
or isinstance(item, ToolCallOutputItem)
|
|
55
|
+
or isinstance(item, ReasoningItem)
|
|
44
56
|
):
|
|
45
57
|
continue
|
|
46
58
|
filtered_items.append(item)
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Session memory backends living in the extensions namespace.
|
|
2
|
+
|
|
3
|
+
This package contains optional, production-grade session implementations that
|
|
4
|
+
introduce extra third-party dependencies (database drivers, ORMs, etc.). They
|
|
5
|
+
conform to the :class:`agents.memory.session.Session` protocol so they can be
|
|
6
|
+
used as a drop-in replacement for :class:`agents.memory.session.SQLiteSession`.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from .advanced_sqlite_session import AdvancedSQLiteSession
|
|
15
|
+
from .async_sqlite_session import AsyncSQLiteSession
|
|
16
|
+
from .dapr_session import (
|
|
17
|
+
DAPR_CONSISTENCY_EVENTUAL,
|
|
18
|
+
DAPR_CONSISTENCY_STRONG,
|
|
19
|
+
DaprSession,
|
|
20
|
+
)
|
|
21
|
+
from .encrypt_session import EncryptedSession
|
|
22
|
+
from .redis_session import RedisSession
|
|
23
|
+
from .sqlalchemy_session import SQLAlchemySession
|
|
24
|
+
|
|
25
|
+
__all__: list[str] = [
|
|
26
|
+
"AdvancedSQLiteSession",
|
|
27
|
+
"AsyncSQLiteSession",
|
|
28
|
+
"DAPR_CONSISTENCY_EVENTUAL",
|
|
29
|
+
"DAPR_CONSISTENCY_STRONG",
|
|
30
|
+
"DaprSession",
|
|
31
|
+
"EncryptedSession",
|
|
32
|
+
"RedisSession",
|
|
33
|
+
"SQLAlchemySession",
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def __getattr__(name: str) -> Any:
|
|
38
|
+
if name == "EncryptedSession":
|
|
39
|
+
try:
|
|
40
|
+
from .encrypt_session import EncryptedSession # noqa: F401
|
|
41
|
+
|
|
42
|
+
return EncryptedSession
|
|
43
|
+
except ModuleNotFoundError as e:
|
|
44
|
+
raise ImportError(
|
|
45
|
+
"EncryptedSession requires the 'cryptography' extra. "
|
|
46
|
+
"Install it with: pip install openai-agents[encrypt]"
|
|
47
|
+
) from e
|
|
48
|
+
|
|
49
|
+
if name == "RedisSession":
|
|
50
|
+
try:
|
|
51
|
+
from .redis_session import RedisSession # noqa: F401
|
|
52
|
+
|
|
53
|
+
return RedisSession
|
|
54
|
+
except ModuleNotFoundError as e:
|
|
55
|
+
raise ImportError(
|
|
56
|
+
"RedisSession requires the 'redis' extra. "
|
|
57
|
+
"Install it with: pip install openai-agents[redis]"
|
|
58
|
+
) from e
|
|
59
|
+
|
|
60
|
+
if name == "SQLAlchemySession":
|
|
61
|
+
try:
|
|
62
|
+
from .sqlalchemy_session import SQLAlchemySession # noqa: F401
|
|
63
|
+
|
|
64
|
+
return SQLAlchemySession
|
|
65
|
+
except ModuleNotFoundError as e:
|
|
66
|
+
raise ImportError(
|
|
67
|
+
"SQLAlchemySession requires the 'sqlalchemy' extra. "
|
|
68
|
+
"Install it with: pip install openai-agents[sqlalchemy]"
|
|
69
|
+
) from e
|
|
70
|
+
|
|
71
|
+
if name == "AdvancedSQLiteSession":
|
|
72
|
+
try:
|
|
73
|
+
from .advanced_sqlite_session import AdvancedSQLiteSession # noqa: F401
|
|
74
|
+
|
|
75
|
+
return AdvancedSQLiteSession
|
|
76
|
+
except ModuleNotFoundError as e:
|
|
77
|
+
raise ImportError(f"Failed to import AdvancedSQLiteSession: {e}") from e
|
|
78
|
+
|
|
79
|
+
if name == "AsyncSQLiteSession":
|
|
80
|
+
try:
|
|
81
|
+
from .async_sqlite_session import AsyncSQLiteSession # noqa: F401
|
|
82
|
+
|
|
83
|
+
return AsyncSQLiteSession
|
|
84
|
+
except ModuleNotFoundError as e:
|
|
85
|
+
raise ImportError(f"Failed to import AsyncSQLiteSession: {e}") from e
|
|
86
|
+
|
|
87
|
+
if name == "DaprSession":
|
|
88
|
+
try:
|
|
89
|
+
from .dapr_session import DaprSession # noqa: F401
|
|
90
|
+
|
|
91
|
+
return DaprSession
|
|
92
|
+
except ModuleNotFoundError as e:
|
|
93
|
+
raise ImportError(
|
|
94
|
+
"DaprSession requires the 'dapr' extra. "
|
|
95
|
+
"Install it with: pip install openai-agents[dapr]"
|
|
96
|
+
) from e
|
|
97
|
+
|
|
98
|
+
if name == "DAPR_CONSISTENCY_EVENTUAL":
|
|
99
|
+
try:
|
|
100
|
+
from .dapr_session import DAPR_CONSISTENCY_EVENTUAL # noqa: F401
|
|
101
|
+
|
|
102
|
+
return DAPR_CONSISTENCY_EVENTUAL
|
|
103
|
+
except ModuleNotFoundError as e:
|
|
104
|
+
raise ImportError(
|
|
105
|
+
"DAPR_CONSISTENCY_EVENTUAL requires the 'dapr' extra. "
|
|
106
|
+
"Install it with: pip install openai-agents[dapr]"
|
|
107
|
+
) from e
|
|
108
|
+
|
|
109
|
+
if name == "DAPR_CONSISTENCY_STRONG":
|
|
110
|
+
try:
|
|
111
|
+
from .dapr_session import DAPR_CONSISTENCY_STRONG # noqa: F401
|
|
112
|
+
|
|
113
|
+
return DAPR_CONSISTENCY_STRONG
|
|
114
|
+
except ModuleNotFoundError as e:
|
|
115
|
+
raise ImportError(
|
|
116
|
+
"DAPR_CONSISTENCY_STRONG requires the 'dapr' extra. "
|
|
117
|
+
"Install it with: pip install openai-agents[dapr]"
|
|
118
|
+
) from e
|
|
119
|
+
|
|
120
|
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|