openai-agents 0.2.8__py3-none-any.whl → 0.6.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agents/__init__.py +105 -4
- agents/_debug.py +15 -4
- agents/_run_impl.py +1203 -96
- agents/agent.py +164 -19
- agents/apply_diff.py +329 -0
- agents/editor.py +47 -0
- agents/exceptions.py +35 -0
- agents/extensions/experimental/__init__.py +6 -0
- agents/extensions/experimental/codex/__init__.py +92 -0
- agents/extensions/experimental/codex/codex.py +89 -0
- agents/extensions/experimental/codex/codex_options.py +35 -0
- agents/extensions/experimental/codex/codex_tool.py +1142 -0
- agents/extensions/experimental/codex/events.py +162 -0
- agents/extensions/experimental/codex/exec.py +263 -0
- agents/extensions/experimental/codex/items.py +245 -0
- agents/extensions/experimental/codex/output_schema_file.py +50 -0
- agents/extensions/experimental/codex/payloads.py +31 -0
- agents/extensions/experimental/codex/thread.py +214 -0
- agents/extensions/experimental/codex/thread_options.py +54 -0
- agents/extensions/experimental/codex/turn_options.py +36 -0
- agents/extensions/handoff_filters.py +13 -1
- agents/extensions/memory/__init__.py +120 -0
- agents/extensions/memory/advanced_sqlite_session.py +1285 -0
- agents/extensions/memory/async_sqlite_session.py +239 -0
- agents/extensions/memory/dapr_session.py +423 -0
- agents/extensions/memory/encrypt_session.py +185 -0
- agents/extensions/memory/redis_session.py +261 -0
- agents/extensions/memory/sqlalchemy_session.py +334 -0
- agents/extensions/models/litellm_model.py +449 -36
- agents/extensions/models/litellm_provider.py +3 -1
- agents/function_schema.py +47 -5
- agents/guardrail.py +16 -2
- agents/{handoffs.py → handoffs/__init__.py} +89 -47
- agents/handoffs/history.py +268 -0
- agents/items.py +237 -11
- agents/lifecycle.py +75 -14
- agents/mcp/server.py +280 -37
- agents/mcp/util.py +24 -3
- agents/memory/__init__.py +22 -2
- agents/memory/openai_conversations_session.py +91 -0
- agents/memory/openai_responses_compaction_session.py +249 -0
- agents/memory/session.py +19 -261
- agents/memory/sqlite_session.py +275 -0
- agents/memory/util.py +20 -0
- agents/model_settings.py +14 -3
- agents/models/__init__.py +13 -0
- agents/models/chatcmpl_converter.py +303 -50
- agents/models/chatcmpl_helpers.py +63 -0
- agents/models/chatcmpl_stream_handler.py +290 -68
- agents/models/default_models.py +58 -0
- agents/models/interface.py +4 -0
- agents/models/openai_chatcompletions.py +103 -49
- agents/models/openai_provider.py +10 -4
- agents/models/openai_responses.py +162 -46
- agents/realtime/__init__.py +4 -0
- agents/realtime/_util.py +14 -3
- agents/realtime/agent.py +7 -0
- agents/realtime/audio_formats.py +53 -0
- agents/realtime/config.py +78 -10
- agents/realtime/events.py +18 -0
- agents/realtime/handoffs.py +2 -2
- agents/realtime/items.py +17 -1
- agents/realtime/model.py +13 -0
- agents/realtime/model_events.py +12 -0
- agents/realtime/model_inputs.py +18 -1
- agents/realtime/openai_realtime.py +696 -150
- agents/realtime/session.py +243 -23
- agents/repl.py +7 -3
- agents/result.py +197 -38
- agents/run.py +949 -168
- agents/run_context.py +13 -2
- agents/stream_events.py +1 -0
- agents/strict_schema.py +14 -0
- agents/tool.py +413 -15
- agents/tool_context.py +22 -1
- agents/tool_guardrails.py +279 -0
- agents/tracing/__init__.py +2 -0
- agents/tracing/config.py +9 -0
- agents/tracing/create.py +4 -0
- agents/tracing/processor_interface.py +84 -11
- agents/tracing/processors.py +65 -54
- agents/tracing/provider.py +64 -7
- agents/tracing/spans.py +105 -0
- agents/tracing/traces.py +116 -16
- agents/usage.py +134 -12
- agents/util/_json.py +19 -1
- agents/util/_transforms.py +12 -2
- agents/voice/input.py +5 -4
- agents/voice/models/openai_stt.py +17 -9
- agents/voice/pipeline.py +2 -0
- agents/voice/pipeline_config.py +4 -0
- {openai_agents-0.2.8.dist-info → openai_agents-0.6.8.dist-info}/METADATA +44 -19
- openai_agents-0.6.8.dist-info/RECORD +134 -0
- {openai_agents-0.2.8.dist-info → openai_agents-0.6.8.dist-info}/WHEEL +1 -1
- openai_agents-0.2.8.dist-info/RECORD +0 -103
- {openai_agents-0.2.8.dist-info → openai_agents-0.6.8.dist-info}/licenses/LICENSE +0 -0
agents/memory/session.py
CHANGED
|
@@ -1,13 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
|
-
import json
|
|
5
|
-
import sqlite3
|
|
6
|
-
import threading
|
|
7
3
|
from abc import ABC, abstractmethod
|
|
8
|
-
from pathlib import Path
|
|
9
4
|
from typing import TYPE_CHECKING, Protocol, runtime_checkable
|
|
10
5
|
|
|
6
|
+
from typing_extensions import TypedDict, TypeGuard
|
|
7
|
+
|
|
11
8
|
if TYPE_CHECKING:
|
|
12
9
|
from ..items import TResponseInputItem
|
|
13
10
|
|
|
@@ -104,266 +101,27 @@ class SessionABC(ABC):
|
|
|
104
101
|
...
|
|
105
102
|
|
|
106
103
|
|
|
107
|
-
class
|
|
108
|
-
"""
|
|
109
|
-
|
|
110
|
-
This implementation stores conversation history in a SQLite database.
|
|
111
|
-
By default, uses an in-memory database that is lost when the process ends.
|
|
112
|
-
For persistent storage, provide a file path.
|
|
113
|
-
"""
|
|
114
|
-
|
|
115
|
-
def __init__(
|
|
116
|
-
self,
|
|
117
|
-
session_id: str,
|
|
118
|
-
db_path: str | Path = ":memory:",
|
|
119
|
-
sessions_table: str = "agent_sessions",
|
|
120
|
-
messages_table: str = "agent_messages",
|
|
121
|
-
):
|
|
122
|
-
"""Initialize the SQLite session.
|
|
123
|
-
|
|
124
|
-
Args:
|
|
125
|
-
session_id: Unique identifier for the conversation session
|
|
126
|
-
db_path: Path to the SQLite database file. Defaults to ':memory:' (in-memory database)
|
|
127
|
-
sessions_table: Name of the table to store session metadata. Defaults to
|
|
128
|
-
'agent_sessions'
|
|
129
|
-
messages_table: Name of the table to store message data. Defaults to 'agent_messages'
|
|
130
|
-
"""
|
|
131
|
-
self.session_id = session_id
|
|
132
|
-
self.db_path = db_path
|
|
133
|
-
self.sessions_table = sessions_table
|
|
134
|
-
self.messages_table = messages_table
|
|
135
|
-
self._local = threading.local()
|
|
136
|
-
self._lock = threading.Lock()
|
|
137
|
-
|
|
138
|
-
# For in-memory databases, we need a shared connection to avoid thread isolation
|
|
139
|
-
# For file databases, we use thread-local connections for better concurrency
|
|
140
|
-
self._is_memory_db = str(db_path) == ":memory:"
|
|
141
|
-
if self._is_memory_db:
|
|
142
|
-
self._shared_connection = sqlite3.connect(":memory:", check_same_thread=False)
|
|
143
|
-
self._shared_connection.execute("PRAGMA journal_mode=WAL")
|
|
144
|
-
self._init_db_for_connection(self._shared_connection)
|
|
145
|
-
else:
|
|
146
|
-
# For file databases, initialize the schema once since it persists
|
|
147
|
-
init_conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
|
|
148
|
-
init_conn.execute("PRAGMA journal_mode=WAL")
|
|
149
|
-
self._init_db_for_connection(init_conn)
|
|
150
|
-
init_conn.close()
|
|
151
|
-
|
|
152
|
-
def _get_connection(self) -> sqlite3.Connection:
|
|
153
|
-
"""Get a database connection."""
|
|
154
|
-
if self._is_memory_db:
|
|
155
|
-
# Use shared connection for in-memory database to avoid thread isolation
|
|
156
|
-
return self._shared_connection
|
|
157
|
-
else:
|
|
158
|
-
# Use thread-local connections for file databases
|
|
159
|
-
if not hasattr(self._local, "connection"):
|
|
160
|
-
self._local.connection = sqlite3.connect(
|
|
161
|
-
str(self.db_path),
|
|
162
|
-
check_same_thread=False,
|
|
163
|
-
)
|
|
164
|
-
self._local.connection.execute("PRAGMA journal_mode=WAL")
|
|
165
|
-
assert isinstance(self._local.connection, sqlite3.Connection), (
|
|
166
|
-
f"Expected sqlite3.Connection, got {type(self._local.connection)}"
|
|
167
|
-
)
|
|
168
|
-
return self._local.connection
|
|
169
|
-
|
|
170
|
-
def _init_db_for_connection(self, conn: sqlite3.Connection) -> None:
|
|
171
|
-
"""Initialize the database schema for a specific connection."""
|
|
172
|
-
conn.execute(
|
|
173
|
-
f"""
|
|
174
|
-
CREATE TABLE IF NOT EXISTS {self.sessions_table} (
|
|
175
|
-
session_id TEXT PRIMARY KEY,
|
|
176
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
177
|
-
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
178
|
-
)
|
|
179
|
-
"""
|
|
180
|
-
)
|
|
181
|
-
|
|
182
|
-
conn.execute(
|
|
183
|
-
f"""
|
|
184
|
-
CREATE TABLE IF NOT EXISTS {self.messages_table} (
|
|
185
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
186
|
-
session_id TEXT NOT NULL,
|
|
187
|
-
message_data TEXT NOT NULL,
|
|
188
|
-
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
189
|
-
FOREIGN KEY (session_id) REFERENCES {self.sessions_table} (session_id)
|
|
190
|
-
ON DELETE CASCADE
|
|
191
|
-
)
|
|
192
|
-
"""
|
|
193
|
-
)
|
|
194
|
-
|
|
195
|
-
conn.execute(
|
|
196
|
-
f"""
|
|
197
|
-
CREATE INDEX IF NOT EXISTS idx_{self.messages_table}_session_id
|
|
198
|
-
ON {self.messages_table} (session_id, created_at)
|
|
199
|
-
"""
|
|
200
|
-
)
|
|
201
|
-
|
|
202
|
-
conn.commit()
|
|
203
|
-
|
|
204
|
-
async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
|
|
205
|
-
"""Retrieve the conversation history for this session.
|
|
206
|
-
|
|
207
|
-
Args:
|
|
208
|
-
limit: Maximum number of items to retrieve. If None, retrieves all items.
|
|
209
|
-
When specified, returns the latest N items in chronological order.
|
|
210
|
-
|
|
211
|
-
Returns:
|
|
212
|
-
List of input items representing the conversation history
|
|
213
|
-
"""
|
|
214
|
-
|
|
215
|
-
def _get_items_sync():
|
|
216
|
-
conn = self._get_connection()
|
|
217
|
-
with self._lock if self._is_memory_db else threading.Lock():
|
|
218
|
-
if limit is None:
|
|
219
|
-
# Fetch all items in chronological order
|
|
220
|
-
cursor = conn.execute(
|
|
221
|
-
f"""
|
|
222
|
-
SELECT message_data FROM {self.messages_table}
|
|
223
|
-
WHERE session_id = ?
|
|
224
|
-
ORDER BY created_at ASC
|
|
225
|
-
""",
|
|
226
|
-
(self.session_id,),
|
|
227
|
-
)
|
|
228
|
-
else:
|
|
229
|
-
# Fetch the latest N items in chronological order
|
|
230
|
-
cursor = conn.execute(
|
|
231
|
-
f"""
|
|
232
|
-
SELECT message_data FROM {self.messages_table}
|
|
233
|
-
WHERE session_id = ?
|
|
234
|
-
ORDER BY created_at DESC
|
|
235
|
-
LIMIT ?
|
|
236
|
-
""",
|
|
237
|
-
(self.session_id, limit),
|
|
238
|
-
)
|
|
239
|
-
|
|
240
|
-
rows = cursor.fetchall()
|
|
241
|
-
|
|
242
|
-
# Reverse to get chronological order when using DESC
|
|
243
|
-
if limit is not None:
|
|
244
|
-
rows = list(reversed(rows))
|
|
245
|
-
|
|
246
|
-
items = []
|
|
247
|
-
for (message_data,) in rows:
|
|
248
|
-
try:
|
|
249
|
-
item = json.loads(message_data)
|
|
250
|
-
items.append(item)
|
|
251
|
-
except json.JSONDecodeError:
|
|
252
|
-
# Skip invalid JSON entries
|
|
253
|
-
continue
|
|
254
|
-
|
|
255
|
-
return items
|
|
256
|
-
|
|
257
|
-
return await asyncio.to_thread(_get_items_sync)
|
|
104
|
+
class OpenAIResponsesCompactionArgs(TypedDict, total=False):
|
|
105
|
+
"""Arguments for the run_compaction method."""
|
|
258
106
|
|
|
259
|
-
|
|
260
|
-
|
|
107
|
+
response_id: str
|
|
108
|
+
"""The ID of the last response to use for compaction."""
|
|
261
109
|
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
"""
|
|
265
|
-
if not items:
|
|
266
|
-
return
|
|
267
|
-
|
|
268
|
-
def _add_items_sync():
|
|
269
|
-
conn = self._get_connection()
|
|
270
|
-
|
|
271
|
-
with self._lock if self._is_memory_db else threading.Lock():
|
|
272
|
-
# Ensure session exists
|
|
273
|
-
conn.execute(
|
|
274
|
-
f"""
|
|
275
|
-
INSERT OR IGNORE INTO {self.sessions_table} (session_id) VALUES (?)
|
|
276
|
-
""",
|
|
277
|
-
(self.session_id,),
|
|
278
|
-
)
|
|
279
|
-
|
|
280
|
-
# Add items
|
|
281
|
-
message_data = [(self.session_id, json.dumps(item)) for item in items]
|
|
282
|
-
conn.executemany(
|
|
283
|
-
f"""
|
|
284
|
-
INSERT INTO {self.messages_table} (session_id, message_data) VALUES (?, ?)
|
|
285
|
-
""",
|
|
286
|
-
message_data,
|
|
287
|
-
)
|
|
288
|
-
|
|
289
|
-
# Update session timestamp
|
|
290
|
-
conn.execute(
|
|
291
|
-
f"""
|
|
292
|
-
UPDATE {self.sessions_table}
|
|
293
|
-
SET updated_at = CURRENT_TIMESTAMP
|
|
294
|
-
WHERE session_id = ?
|
|
295
|
-
""",
|
|
296
|
-
(self.session_id,),
|
|
297
|
-
)
|
|
298
|
-
|
|
299
|
-
conn.commit()
|
|
300
|
-
|
|
301
|
-
await asyncio.to_thread(_add_items_sync)
|
|
110
|
+
force: bool
|
|
111
|
+
"""Whether to force compaction even if the threshold is not met."""
|
|
302
112
|
|
|
303
|
-
async def pop_item(self) -> TResponseInputItem | None:
|
|
304
|
-
"""Remove and return the most recent item from the session.
|
|
305
113
|
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
114
|
+
@runtime_checkable
|
|
115
|
+
class OpenAIResponsesCompactionAwareSession(Session, Protocol):
|
|
116
|
+
"""Protocol for session implementations that support responses compaction."""
|
|
309
117
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
# Use DELETE with RETURNING to atomically delete and return the most recent item
|
|
314
|
-
cursor = conn.execute(
|
|
315
|
-
f"""
|
|
316
|
-
DELETE FROM {self.messages_table}
|
|
317
|
-
WHERE id = (
|
|
318
|
-
SELECT id FROM {self.messages_table}
|
|
319
|
-
WHERE session_id = ?
|
|
320
|
-
ORDER BY created_at DESC
|
|
321
|
-
LIMIT 1
|
|
322
|
-
)
|
|
323
|
-
RETURNING message_data
|
|
324
|
-
""",
|
|
325
|
-
(self.session_id,),
|
|
326
|
-
)
|
|
327
|
-
|
|
328
|
-
result = cursor.fetchone()
|
|
329
|
-
conn.commit()
|
|
330
|
-
|
|
331
|
-
if result:
|
|
332
|
-
message_data = result[0]
|
|
333
|
-
try:
|
|
334
|
-
item = json.loads(message_data)
|
|
335
|
-
return item
|
|
336
|
-
except json.JSONDecodeError:
|
|
337
|
-
# Return None for corrupted JSON entries (already deleted)
|
|
338
|
-
return None
|
|
339
|
-
|
|
340
|
-
return None
|
|
341
|
-
|
|
342
|
-
return await asyncio.to_thread(_pop_item_sync)
|
|
118
|
+
async def run_compaction(self, args: OpenAIResponsesCompactionArgs | None = None) -> None:
|
|
119
|
+
"""Run the compaction process for the session."""
|
|
120
|
+
...
|
|
343
121
|
|
|
344
|
-
async def clear_session(self) -> None:
|
|
345
|
-
"""Clear all items for this session."""
|
|
346
122
|
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
(self.session_id,),
|
|
353
|
-
)
|
|
354
|
-
conn.execute(
|
|
355
|
-
f"DELETE FROM {self.sessions_table} WHERE session_id = ?",
|
|
356
|
-
(self.session_id,),
|
|
357
|
-
)
|
|
358
|
-
conn.commit()
|
|
359
|
-
|
|
360
|
-
await asyncio.to_thread(_clear_session_sync)
|
|
361
|
-
|
|
362
|
-
def close(self) -> None:
|
|
363
|
-
"""Close the database connection."""
|
|
364
|
-
if self._is_memory_db:
|
|
365
|
-
if hasattr(self, "_shared_connection"):
|
|
366
|
-
self._shared_connection.close()
|
|
367
|
-
else:
|
|
368
|
-
if hasattr(self._local, "connection"):
|
|
369
|
-
self._local.connection.close()
|
|
123
|
+
def is_openai_responses_compaction_aware_session(
|
|
124
|
+
session: Session | None,
|
|
125
|
+
) -> TypeGuard[OpenAIResponsesCompactionAwareSession]:
|
|
126
|
+
"""Check if a session supports responses compaction."""
|
|
127
|
+
return isinstance(session, OpenAIResponsesCompactionAwareSession)
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import sqlite3
|
|
6
|
+
import threading
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from ..items import TResponseInputItem
|
|
10
|
+
from .session import SessionABC
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SQLiteSession(SessionABC):
|
|
14
|
+
"""SQLite-based implementation of session storage.
|
|
15
|
+
|
|
16
|
+
This implementation stores conversation history in a SQLite database.
|
|
17
|
+
By default, uses an in-memory database that is lost when the process ends.
|
|
18
|
+
For persistent storage, provide a file path.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
session_id: str,
|
|
24
|
+
db_path: str | Path = ":memory:",
|
|
25
|
+
sessions_table: str = "agent_sessions",
|
|
26
|
+
messages_table: str = "agent_messages",
|
|
27
|
+
):
|
|
28
|
+
"""Initialize the SQLite session.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
session_id: Unique identifier for the conversation session
|
|
32
|
+
db_path: Path to the SQLite database file. Defaults to ':memory:' (in-memory database)
|
|
33
|
+
sessions_table: Name of the table to store session metadata. Defaults to
|
|
34
|
+
'agent_sessions'
|
|
35
|
+
messages_table: Name of the table to store message data. Defaults to 'agent_messages'
|
|
36
|
+
"""
|
|
37
|
+
self.session_id = session_id
|
|
38
|
+
self.db_path = db_path
|
|
39
|
+
self.sessions_table = sessions_table
|
|
40
|
+
self.messages_table = messages_table
|
|
41
|
+
self._local = threading.local()
|
|
42
|
+
self._lock = threading.Lock()
|
|
43
|
+
|
|
44
|
+
# For in-memory databases, we need a shared connection to avoid thread isolation
|
|
45
|
+
# For file databases, we use thread-local connections for better concurrency
|
|
46
|
+
self._is_memory_db = str(db_path) == ":memory:"
|
|
47
|
+
if self._is_memory_db:
|
|
48
|
+
self._shared_connection = sqlite3.connect(":memory:", check_same_thread=False)
|
|
49
|
+
self._shared_connection.execute("PRAGMA journal_mode=WAL")
|
|
50
|
+
self._init_db_for_connection(self._shared_connection)
|
|
51
|
+
else:
|
|
52
|
+
# For file databases, initialize the schema once since it persists
|
|
53
|
+
init_conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
|
|
54
|
+
init_conn.execute("PRAGMA journal_mode=WAL")
|
|
55
|
+
self._init_db_for_connection(init_conn)
|
|
56
|
+
init_conn.close()
|
|
57
|
+
|
|
58
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
59
|
+
"""Get a database connection."""
|
|
60
|
+
if self._is_memory_db:
|
|
61
|
+
# Use shared connection for in-memory database to avoid thread isolation
|
|
62
|
+
return self._shared_connection
|
|
63
|
+
else:
|
|
64
|
+
# Use thread-local connections for file databases
|
|
65
|
+
if not hasattr(self._local, "connection"):
|
|
66
|
+
self._local.connection = sqlite3.connect(
|
|
67
|
+
str(self.db_path),
|
|
68
|
+
check_same_thread=False,
|
|
69
|
+
)
|
|
70
|
+
self._local.connection.execute("PRAGMA journal_mode=WAL")
|
|
71
|
+
assert isinstance(self._local.connection, sqlite3.Connection), (
|
|
72
|
+
f"Expected sqlite3.Connection, got {type(self._local.connection)}"
|
|
73
|
+
)
|
|
74
|
+
return self._local.connection
|
|
75
|
+
|
|
76
|
+
def _init_db_for_connection(self, conn: sqlite3.Connection) -> None:
|
|
77
|
+
"""Initialize the database schema for a specific connection."""
|
|
78
|
+
conn.execute(
|
|
79
|
+
f"""
|
|
80
|
+
CREATE TABLE IF NOT EXISTS {self.sessions_table} (
|
|
81
|
+
session_id TEXT PRIMARY KEY,
|
|
82
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
83
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
84
|
+
)
|
|
85
|
+
"""
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
conn.execute(
|
|
89
|
+
f"""
|
|
90
|
+
CREATE TABLE IF NOT EXISTS {self.messages_table} (
|
|
91
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
92
|
+
session_id TEXT NOT NULL,
|
|
93
|
+
message_data TEXT NOT NULL,
|
|
94
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
95
|
+
FOREIGN KEY (session_id) REFERENCES {self.sessions_table} (session_id)
|
|
96
|
+
ON DELETE CASCADE
|
|
97
|
+
)
|
|
98
|
+
"""
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
conn.execute(
|
|
102
|
+
f"""
|
|
103
|
+
CREATE INDEX IF NOT EXISTS idx_{self.messages_table}_session_id
|
|
104
|
+
ON {self.messages_table} (session_id, id)
|
|
105
|
+
"""
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
conn.commit()
|
|
109
|
+
|
|
110
|
+
async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
|
|
111
|
+
"""Retrieve the conversation history for this session.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
limit: Maximum number of items to retrieve. If None, retrieves all items.
|
|
115
|
+
When specified, returns the latest N items in chronological order.
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
List of input items representing the conversation history
|
|
119
|
+
"""
|
|
120
|
+
|
|
121
|
+
def _get_items_sync():
|
|
122
|
+
conn = self._get_connection()
|
|
123
|
+
with self._lock if self._is_memory_db else threading.Lock():
|
|
124
|
+
if limit is None:
|
|
125
|
+
# Fetch all items in chronological order
|
|
126
|
+
cursor = conn.execute(
|
|
127
|
+
f"""
|
|
128
|
+
SELECT message_data FROM {self.messages_table}
|
|
129
|
+
WHERE session_id = ?
|
|
130
|
+
ORDER BY id ASC
|
|
131
|
+
""",
|
|
132
|
+
(self.session_id,),
|
|
133
|
+
)
|
|
134
|
+
else:
|
|
135
|
+
# Fetch the latest N items in chronological order
|
|
136
|
+
cursor = conn.execute(
|
|
137
|
+
f"""
|
|
138
|
+
SELECT message_data FROM {self.messages_table}
|
|
139
|
+
WHERE session_id = ?
|
|
140
|
+
ORDER BY id DESC
|
|
141
|
+
LIMIT ?
|
|
142
|
+
""",
|
|
143
|
+
(self.session_id, limit),
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
rows = cursor.fetchall()
|
|
147
|
+
|
|
148
|
+
# Reverse to get chronological order when using DESC
|
|
149
|
+
if limit is not None:
|
|
150
|
+
rows = list(reversed(rows))
|
|
151
|
+
|
|
152
|
+
items = []
|
|
153
|
+
for (message_data,) in rows:
|
|
154
|
+
try:
|
|
155
|
+
item = json.loads(message_data)
|
|
156
|
+
items.append(item)
|
|
157
|
+
except json.JSONDecodeError:
|
|
158
|
+
# Skip invalid JSON entries
|
|
159
|
+
continue
|
|
160
|
+
|
|
161
|
+
return items
|
|
162
|
+
|
|
163
|
+
return await asyncio.to_thread(_get_items_sync)
|
|
164
|
+
|
|
165
|
+
async def add_items(self, items: list[TResponseInputItem]) -> None:
|
|
166
|
+
"""Add new items to the conversation history.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
items: List of input items to add to the history
|
|
170
|
+
"""
|
|
171
|
+
if not items:
|
|
172
|
+
return
|
|
173
|
+
|
|
174
|
+
def _add_items_sync():
|
|
175
|
+
conn = self._get_connection()
|
|
176
|
+
|
|
177
|
+
with self._lock if self._is_memory_db else threading.Lock():
|
|
178
|
+
# Ensure session exists
|
|
179
|
+
conn.execute(
|
|
180
|
+
f"""
|
|
181
|
+
INSERT OR IGNORE INTO {self.sessions_table} (session_id) VALUES (?)
|
|
182
|
+
""",
|
|
183
|
+
(self.session_id,),
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# Add items
|
|
187
|
+
message_data = [(self.session_id, json.dumps(item)) for item in items]
|
|
188
|
+
conn.executemany(
|
|
189
|
+
f"""
|
|
190
|
+
INSERT INTO {self.messages_table} (session_id, message_data) VALUES (?, ?)
|
|
191
|
+
""",
|
|
192
|
+
message_data,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
# Update session timestamp
|
|
196
|
+
conn.execute(
|
|
197
|
+
f"""
|
|
198
|
+
UPDATE {self.sessions_table}
|
|
199
|
+
SET updated_at = CURRENT_TIMESTAMP
|
|
200
|
+
WHERE session_id = ?
|
|
201
|
+
""",
|
|
202
|
+
(self.session_id,),
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
conn.commit()
|
|
206
|
+
|
|
207
|
+
await asyncio.to_thread(_add_items_sync)
|
|
208
|
+
|
|
209
|
+
async def pop_item(self) -> TResponseInputItem | None:
|
|
210
|
+
"""Remove and return the most recent item from the session.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
The most recent item if it exists, None if the session is empty
|
|
214
|
+
"""
|
|
215
|
+
|
|
216
|
+
def _pop_item_sync():
|
|
217
|
+
conn = self._get_connection()
|
|
218
|
+
with self._lock if self._is_memory_db else threading.Lock():
|
|
219
|
+
# Use DELETE with RETURNING to atomically delete and return the most recent item
|
|
220
|
+
cursor = conn.execute(
|
|
221
|
+
f"""
|
|
222
|
+
DELETE FROM {self.messages_table}
|
|
223
|
+
WHERE id = (
|
|
224
|
+
SELECT id FROM {self.messages_table}
|
|
225
|
+
WHERE session_id = ?
|
|
226
|
+
ORDER BY id DESC
|
|
227
|
+
LIMIT 1
|
|
228
|
+
)
|
|
229
|
+
RETURNING message_data
|
|
230
|
+
""",
|
|
231
|
+
(self.session_id,),
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
result = cursor.fetchone()
|
|
235
|
+
conn.commit()
|
|
236
|
+
|
|
237
|
+
if result:
|
|
238
|
+
message_data = result[0]
|
|
239
|
+
try:
|
|
240
|
+
item = json.loads(message_data)
|
|
241
|
+
return item
|
|
242
|
+
except json.JSONDecodeError:
|
|
243
|
+
# Return None for corrupted JSON entries (already deleted)
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
return None
|
|
247
|
+
|
|
248
|
+
return await asyncio.to_thread(_pop_item_sync)
|
|
249
|
+
|
|
250
|
+
async def clear_session(self) -> None:
|
|
251
|
+
"""Clear all items for this session."""
|
|
252
|
+
|
|
253
|
+
def _clear_session_sync():
|
|
254
|
+
conn = self._get_connection()
|
|
255
|
+
with self._lock if self._is_memory_db else threading.Lock():
|
|
256
|
+
conn.execute(
|
|
257
|
+
f"DELETE FROM {self.messages_table} WHERE session_id = ?",
|
|
258
|
+
(self.session_id,),
|
|
259
|
+
)
|
|
260
|
+
conn.execute(
|
|
261
|
+
f"DELETE FROM {self.sessions_table} WHERE session_id = ?",
|
|
262
|
+
(self.session_id,),
|
|
263
|
+
)
|
|
264
|
+
conn.commit()
|
|
265
|
+
|
|
266
|
+
await asyncio.to_thread(_clear_session_sync)
|
|
267
|
+
|
|
268
|
+
def close(self) -> None:
|
|
269
|
+
"""Close the database connection."""
|
|
270
|
+
if self._is_memory_db:
|
|
271
|
+
if hasattr(self, "_shared_connection"):
|
|
272
|
+
self._shared_connection.close()
|
|
273
|
+
else:
|
|
274
|
+
if hasattr(self._local, "connection"):
|
|
275
|
+
self._local.connection.close()
|
agents/memory/util.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
from ..items import TResponseInputItem
|
|
6
|
+
from ..util._types import MaybeAwaitable
|
|
7
|
+
|
|
8
|
+
SessionInputCallback = Callable[
|
|
9
|
+
[list[TResponseInputItem], list[TResponseInputItem]],
|
|
10
|
+
MaybeAwaitable[list[TResponseInputItem]],
|
|
11
|
+
]
|
|
12
|
+
"""A function that combines session history with new input items.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
history_items: The list of items from the session history.
|
|
16
|
+
new_items: The list of new input items for the current turn.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
A list of combined items to be used as input for the agent. Can be sync or async.
|
|
20
|
+
"""
|
agents/model_settings.py
CHANGED
|
@@ -55,7 +55,6 @@ Headers: TypeAlias = Mapping[str, Union[str, Omit]]
|
|
|
55
55
|
ToolChoice: TypeAlias = Union[Literal["auto", "required", "none"], str, MCPToolChoice, None]
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
|
|
59
58
|
@dataclass
|
|
60
59
|
class ModelSettings:
|
|
61
60
|
"""Settings to use when calling an LLM.
|
|
@@ -92,7 +91,10 @@ class ModelSettings:
|
|
|
92
91
|
"""
|
|
93
92
|
|
|
94
93
|
truncation: Literal["auto", "disabled"] | None = None
|
|
95
|
-
"""The truncation strategy to use when calling the model.
|
|
94
|
+
"""The truncation strategy to use when calling the model.
|
|
95
|
+
See [Responses API documentation](https://platform.openai.com/docs/api-reference/responses/create#responses_create-truncation)
|
|
96
|
+
for more details.
|
|
97
|
+
"""
|
|
96
98
|
|
|
97
99
|
max_tokens: int | None = None
|
|
98
100
|
"""The maximum number of output tokens to generate."""
|
|
@@ -114,11 +116,20 @@ class ModelSettings:
|
|
|
114
116
|
For Responses API: automatically enabled when not specified.
|
|
115
117
|
For Chat Completions API: disabled when not specified."""
|
|
116
118
|
|
|
119
|
+
prompt_cache_retention: Literal["in_memory", "24h"] | None = None
|
|
120
|
+
"""The retention policy for the prompt cache. Set to `24h` to enable extended
|
|
121
|
+
prompt caching, which keeps cached prefixes active for longer, up to a maximum
|
|
122
|
+
of 24 hours.
|
|
123
|
+
[Learn more](https://platform.openai.com/docs/guides/prompt-caching#prompt-cache-retention)."""
|
|
124
|
+
|
|
117
125
|
include_usage: bool | None = None
|
|
118
126
|
"""Whether to include usage chunk.
|
|
119
127
|
Only available for Chat Completions API."""
|
|
120
128
|
|
|
121
|
-
|
|
129
|
+
# TODO: revisit ResponseIncludable | str if ResponseIncludable covers more cases
|
|
130
|
+
# We've added str to support missing ones like
|
|
131
|
+
# "web_search_call.action.sources" etc.
|
|
132
|
+
response_include: list[ResponseIncludable | str] | None = None
|
|
122
133
|
"""Additional output data to include in the model response.
|
|
123
134
|
[include parameter](https://platform.openai.com/docs/api-reference/responses/create#responses-create-include)"""
|
|
124
135
|
|
agents/models/__init__.py
CHANGED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from .default_models import (
|
|
2
|
+
get_default_model,
|
|
3
|
+
get_default_model_settings,
|
|
4
|
+
gpt_5_reasoning_settings_required,
|
|
5
|
+
is_gpt_5_default,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"get_default_model",
|
|
10
|
+
"get_default_model_settings",
|
|
11
|
+
"gpt_5_reasoning_settings_required",
|
|
12
|
+
"is_gpt_5_default",
|
|
13
|
+
]
|