openai-agents 0.4.2__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openai-agents might be problematic. Click here for more details.

@@ -11,10 +11,13 @@ from __future__ import annotations
11
11
  from typing import Any
12
12
 
13
13
  __all__: list[str] = [
14
+ "AdvancedSQLiteSession",
15
+ "DAPR_CONSISTENCY_EVENTUAL",
16
+ "DAPR_CONSISTENCY_STRONG",
17
+ "DaprSession",
14
18
  "EncryptedSession",
15
19
  "RedisSession",
16
20
  "SQLAlchemySession",
17
- "AdvancedSQLiteSession",
18
21
  ]
19
22
 
20
23
 
@@ -60,4 +63,37 @@ def __getattr__(name: str) -> Any:
60
63
  except ModuleNotFoundError as e:
61
64
  raise ImportError(f"Failed to import AdvancedSQLiteSession: {e}") from e
62
65
 
66
+ if name == "DaprSession":
67
+ try:
68
+ from .dapr_session import DaprSession # noqa: F401
69
+
70
+ return DaprSession
71
+ except ModuleNotFoundError as e:
72
+ raise ImportError(
73
+ "DaprSession requires the 'dapr' extra. "
74
+ "Install it with: pip install openai-agents[dapr]"
75
+ ) from e
76
+
77
+ if name == "DAPR_CONSISTENCY_EVENTUAL":
78
+ try:
79
+ from .dapr_session import DAPR_CONSISTENCY_EVENTUAL # noqa: F401
80
+
81
+ return DAPR_CONSISTENCY_EVENTUAL
82
+ except ModuleNotFoundError as e:
83
+ raise ImportError(
84
+ "DAPR_CONSISTENCY_EVENTUAL requires the 'dapr' extra. "
85
+ "Install it with: pip install openai-agents[dapr]"
86
+ ) from e
87
+
88
+ if name == "DAPR_CONSISTENCY_STRONG":
89
+ try:
90
+ from .dapr_session import DAPR_CONSISTENCY_STRONG # noqa: F401
91
+
92
+ return DAPR_CONSISTENCY_STRONG
93
+ except ModuleNotFoundError as e:
94
+ raise ImportError(
95
+ "DAPR_CONSISTENCY_STRONG requires the 'dapr' extra. "
96
+ "Install it with: pip install openai-agents[dapr]"
97
+ ) from e
98
+
63
99
  raise AttributeError(f"module {__name__} has no attribute {name}")
@@ -0,0 +1,423 @@
1
+ """Dapr State Store-powered Session backend.
2
+
3
+ Usage::
4
+
5
+ from agents.extensions.memory import DaprSession
6
+
7
+ # Create from Dapr sidecar address
8
+ session = DaprSession.from_address(
9
+ session_id="user-123",
10
+ state_store_name="statestore",
11
+ dapr_address="localhost:50001",
12
+ )
13
+
14
+ # Or pass an existing Dapr client that your application already manages
15
+ session = DaprSession(
16
+ session_id="user-123",
17
+ state_store_name="statestore",
18
+ dapr_client=my_dapr_client,
19
+ )
20
+
21
+ await Runner.run(agent, "Hello", session=session)
22
+ """
23
+
24
+ from __future__ import annotations
25
+
26
+ import asyncio
27
+ import json
28
+ import random
29
+ import time
30
+ from typing import Any, Final, Literal
31
+
32
+ try:
33
+ from dapr.aio.clients import DaprClient
34
+ from dapr.clients.grpc._state import Concurrency, Consistency, StateOptions
35
+ except ImportError as e:
36
+ raise ImportError(
37
+ "DaprSession requires the 'dapr' package. Install it with: pip install dapr"
38
+ ) from e
39
+
40
+ from ...items import TResponseInputItem
41
+ from ...logger import logger
42
+ from ...memory.session import SessionABC
43
+
44
+ # Type alias for consistency levels
45
+ ConsistencyLevel = Literal["eventual", "strong"]
46
+
47
+ # Consistency level constants
48
+ DAPR_CONSISTENCY_EVENTUAL: ConsistencyLevel = "eventual"
49
+ DAPR_CONSISTENCY_STRONG: ConsistencyLevel = "strong"
50
+
51
+ _MAX_WRITE_ATTEMPTS: Final[int] = 5
52
+ _RETRY_BASE_DELAY_SECONDS: Final[float] = 0.05
53
+ _RETRY_MAX_DELAY_SECONDS: Final[float] = 1.0
54
+
55
+
56
+ class DaprSession(SessionABC):
57
+ """Dapr State Store implementation of :pyclass:`agents.memory.session.Session`."""
58
+
59
+ def __init__(
60
+ self,
61
+ session_id: str,
62
+ *,
63
+ state_store_name: str,
64
+ dapr_client: DaprClient,
65
+ ttl: int | None = None,
66
+ consistency: ConsistencyLevel = DAPR_CONSISTENCY_EVENTUAL,
67
+ ):
68
+ """Initializes a new DaprSession.
69
+
70
+ Args:
71
+ session_id (str): Unique identifier for the conversation.
72
+ state_store_name (str): Name of the Dapr state store component.
73
+ dapr_client (DaprClient): A pre-configured Dapr client.
74
+ ttl (int | None, optional): Time-to-live in seconds for session data.
75
+ If None, data persists indefinitely. Note that TTL support depends on
76
+ the underlying state store implementation. Defaults to None.
77
+ consistency (ConsistencyLevel, optional): Consistency level for state operations.
78
+ Use DAPR_CONSISTENCY_EVENTUAL or DAPR_CONSISTENCY_STRONG constants.
79
+ Defaults to DAPR_CONSISTENCY_EVENTUAL.
80
+ """
81
+ self.session_id = session_id
82
+ self._dapr_client = dapr_client
83
+ self._state_store_name = state_store_name
84
+ self._ttl = ttl
85
+ self._consistency = consistency
86
+ self._lock = asyncio.Lock()
87
+ self._owns_client = False # Track if we own the Dapr client
88
+
89
+ # State keys
90
+ self._messages_key = f"{self.session_id}:messages"
91
+ self._metadata_key = f"{self.session_id}:metadata"
92
+
93
+ @classmethod
94
+ def from_address(
95
+ cls,
96
+ session_id: str,
97
+ *,
98
+ state_store_name: str,
99
+ dapr_address: str = "localhost:50001",
100
+ **kwargs: Any,
101
+ ) -> DaprSession:
102
+ """Create a session from a Dapr sidecar address.
103
+
104
+ Args:
105
+ session_id (str): Conversation ID.
106
+ state_store_name (str): Name of the Dapr state store component.
107
+ dapr_address (str): Dapr sidecar gRPC address. Defaults to "localhost:50001".
108
+ **kwargs: Additional keyword arguments forwarded to the main constructor
109
+ (e.g., ttl, consistency).
110
+
111
+ Returns:
112
+ DaprSession: An instance of DaprSession connected to the specified Dapr sidecar.
113
+
114
+ Note:
115
+ The Dapr Python SDK performs health checks on the HTTP endpoint (default: http://localhost:3500).
116
+ Ensure the Dapr sidecar is started with --dapr-http-port 3500. Alternatively, set one of
117
+ these environment variables: DAPR_HTTP_ENDPOINT (e.g., "http://localhost:3500") or
118
+ DAPR_HTTP_PORT (e.g., "3500") to avoid connection errors.
119
+ """
120
+ dapr_client = DaprClient(address=dapr_address)
121
+ session = cls(
122
+ session_id, state_store_name=state_store_name, dapr_client=dapr_client, **kwargs
123
+ )
124
+ session._owns_client = True # We created the client, so we own it
125
+ return session
126
+
127
+ def _get_read_metadata(self) -> dict[str, str]:
128
+ """Get metadata for read operations including consistency.
129
+
130
+ The consistency level is passed through state_metadata as per Dapr's state API.
131
+ """
132
+ metadata: dict[str, str] = {}
133
+ # Add consistency level to metadata for read operations
134
+ if self._consistency:
135
+ metadata["consistency"] = self._consistency
136
+ return metadata
137
+
138
+ def _get_state_options(self, *, concurrency: Concurrency | None = None) -> StateOptions | None:
139
+ """Get StateOptions configured with consistency and optional concurrency."""
140
+ options_kwargs: dict[str, Any] = {}
141
+ if self._consistency == DAPR_CONSISTENCY_STRONG:
142
+ options_kwargs["consistency"] = Consistency.strong
143
+ elif self._consistency == DAPR_CONSISTENCY_EVENTUAL:
144
+ options_kwargs["consistency"] = Consistency.eventual
145
+ if concurrency is not None:
146
+ options_kwargs["concurrency"] = concurrency
147
+ if options_kwargs:
148
+ return StateOptions(**options_kwargs)
149
+ return None
150
+
151
+ def _get_metadata(self) -> dict[str, str]:
152
+ """Get metadata for state operations including TTL if configured."""
153
+ metadata = {}
154
+ if self._ttl is not None:
155
+ metadata["ttlInSeconds"] = str(self._ttl)
156
+ return metadata
157
+
158
+ async def _serialize_item(self, item: TResponseInputItem) -> str:
159
+ """Serialize an item to JSON string. Can be overridden by subclasses."""
160
+ return json.dumps(item, separators=(",", ":"))
161
+
162
+ async def _deserialize_item(self, item: str) -> TResponseInputItem:
163
+ """Deserialize a JSON string to an item. Can be overridden by subclasses."""
164
+ return json.loads(item) # type: ignore[no-any-return]
165
+
166
+ def _decode_messages(self, data: bytes | None) -> list[Any]:
167
+ if not data:
168
+ return []
169
+ try:
170
+ messages_json = data.decode("utf-8")
171
+ messages = json.loads(messages_json)
172
+ if isinstance(messages, list):
173
+ return list(messages)
174
+ except (json.JSONDecodeError, UnicodeDecodeError):
175
+ return []
176
+ return []
177
+
178
+ def _calculate_retry_delay(self, attempt: int) -> float:
179
+ base: float = _RETRY_BASE_DELAY_SECONDS * (2 ** max(0, attempt - 1))
180
+ delay: float = min(base, _RETRY_MAX_DELAY_SECONDS)
181
+ # Add jitter (10%) similar to tracing processors to avoid thundering herd.
182
+ return delay + random.uniform(0, 0.1 * delay)
183
+
184
+ def _is_concurrency_conflict(self, error: Exception) -> bool:
185
+ code_attr = getattr(error, "code", None)
186
+ if callable(code_attr):
187
+ try:
188
+ status_code = code_attr()
189
+ except Exception:
190
+ status_code = None
191
+ if status_code is not None:
192
+ status_name = getattr(status_code, "name", str(status_code))
193
+ if status_name in {"ABORTED", "FAILED_PRECONDITION"}:
194
+ return True
195
+ message = str(error).lower()
196
+ conflict_markers = (
197
+ "etag mismatch",
198
+ "etag does not match",
199
+ "precondition failed",
200
+ "concurrency conflict",
201
+ "invalid etag",
202
+ "failed to set key", # Redis state store Lua script error during conditional write
203
+ "user_script", # Redis script failure hint
204
+ )
205
+ return any(marker in message for marker in conflict_markers)
206
+
207
+ async def _handle_concurrency_conflict(self, error: Exception, attempt: int) -> bool:
208
+ if not self._is_concurrency_conflict(error):
209
+ return False
210
+ if attempt >= _MAX_WRITE_ATTEMPTS:
211
+ return False
212
+ delay = self._calculate_retry_delay(attempt)
213
+ if delay > 0:
214
+ await asyncio.sleep(delay)
215
+ return True
216
+
217
+ # ------------------------------------------------------------------
218
+ # Session protocol implementation
219
+ # ------------------------------------------------------------------
220
+
221
+ async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
222
+ """Retrieve the conversation history for this session.
223
+
224
+ Args:
225
+ limit: Maximum number of items to retrieve. If None, retrieves all items.
226
+ When specified, returns the latest N items in chronological order.
227
+
228
+ Returns:
229
+ List of input items representing the conversation history
230
+ """
231
+ async with self._lock:
232
+ # Get messages from state store with consistency level
233
+ response = await self._dapr_client.get_state(
234
+ store_name=self._state_store_name,
235
+ key=self._messages_key,
236
+ state_metadata=self._get_read_metadata(),
237
+ )
238
+
239
+ messages = self._decode_messages(response.data)
240
+ if not messages:
241
+ return []
242
+ if limit is not None:
243
+ if limit <= 0:
244
+ return []
245
+ messages = messages[-limit:]
246
+ items: list[TResponseInputItem] = []
247
+ for msg in messages:
248
+ try:
249
+ if isinstance(msg, str):
250
+ item = await self._deserialize_item(msg)
251
+ else:
252
+ item = msg
253
+ items.append(item)
254
+ except (json.JSONDecodeError, TypeError):
255
+ continue
256
+ return items
257
+
258
+ async def add_items(self, items: list[TResponseInputItem]) -> None:
259
+ """Add new items to the conversation history.
260
+
261
+ Args:
262
+ items: List of input items to add to the history
263
+ """
264
+ if not items:
265
+ return
266
+
267
+ async with self._lock:
268
+ serialized_items: list[str] = [await self._serialize_item(item) for item in items]
269
+ attempt = 0
270
+ while True:
271
+ attempt += 1
272
+ response = await self._dapr_client.get_state(
273
+ store_name=self._state_store_name,
274
+ key=self._messages_key,
275
+ state_metadata=self._get_read_metadata(),
276
+ )
277
+ existing_messages = self._decode_messages(response.data)
278
+ updated_messages = existing_messages + serialized_items
279
+ messages_json = json.dumps(updated_messages, separators=(",", ":"))
280
+ etag = response.etag
281
+ try:
282
+ await self._dapr_client.save_state(
283
+ store_name=self._state_store_name,
284
+ key=self._messages_key,
285
+ value=messages_json,
286
+ etag=etag,
287
+ state_metadata=self._get_metadata(),
288
+ options=self._get_state_options(concurrency=Concurrency.first_write),
289
+ )
290
+ break
291
+ except Exception as error:
292
+ should_retry = await self._handle_concurrency_conflict(error, attempt)
293
+ if should_retry:
294
+ continue
295
+ raise
296
+
297
+ # Update metadata
298
+ metadata = {
299
+ "session_id": self.session_id,
300
+ "created_at": str(int(time.time())),
301
+ "updated_at": str(int(time.time())),
302
+ }
303
+ await self._dapr_client.save_state(
304
+ store_name=self._state_store_name,
305
+ key=self._metadata_key,
306
+ value=json.dumps(metadata),
307
+ state_metadata=self._get_metadata(),
308
+ options=self._get_state_options(),
309
+ )
310
+
311
+ async def pop_item(self) -> TResponseInputItem | None:
312
+ """Remove and return the most recent item from the session.
313
+
314
+ Returns:
315
+ The most recent item if it exists, None if the session is empty
316
+ """
317
+ async with self._lock:
318
+ attempt = 0
319
+ while True:
320
+ attempt += 1
321
+ response = await self._dapr_client.get_state(
322
+ store_name=self._state_store_name,
323
+ key=self._messages_key,
324
+ state_metadata=self._get_read_metadata(),
325
+ )
326
+ messages = self._decode_messages(response.data)
327
+ if not messages:
328
+ return None
329
+ last_item = messages.pop()
330
+ messages_json = json.dumps(messages, separators=(",", ":"))
331
+ etag = getattr(response, "etag", None) or None
332
+ etag = getattr(response, "etag", None) or None
333
+ try:
334
+ await self._dapr_client.save_state(
335
+ store_name=self._state_store_name,
336
+ key=self._messages_key,
337
+ value=messages_json,
338
+ etag=etag,
339
+ state_metadata=self._get_metadata(),
340
+ options=self._get_state_options(concurrency=Concurrency.first_write),
341
+ )
342
+ break
343
+ except Exception as error:
344
+ should_retry = await self._handle_concurrency_conflict(error, attempt)
345
+ if should_retry:
346
+ continue
347
+ raise
348
+ try:
349
+ if isinstance(last_item, str):
350
+ return await self._deserialize_item(last_item)
351
+ return last_item # type: ignore[no-any-return]
352
+ except (json.JSONDecodeError, TypeError):
353
+ return None
354
+
355
+ async def clear_session(self) -> None:
356
+ """Clear all items for this session."""
357
+ async with self._lock:
358
+ # Delete messages and metadata keys
359
+ await self._dapr_client.delete_state(
360
+ store_name=self._state_store_name,
361
+ key=self._messages_key,
362
+ options=self._get_state_options(),
363
+ )
364
+
365
+ await self._dapr_client.delete_state(
366
+ store_name=self._state_store_name,
367
+ key=self._metadata_key,
368
+ options=self._get_state_options(),
369
+ )
370
+
371
+ async def close(self) -> None:
372
+ """Close the Dapr client connection.
373
+
374
+ Only closes the connection if this session owns the Dapr client
375
+ (i.e., created via from_address). If the client was injected externally,
376
+ the caller is responsible for managing its lifecycle.
377
+ """
378
+ if self._owns_client:
379
+ await self._dapr_client.close()
380
+
381
+ async def __aenter__(self) -> DaprSession:
382
+ """Enter async context manager."""
383
+ return self
384
+
385
+ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
386
+ """Exit async context manager and close the connection."""
387
+ await self.close()
388
+
389
+ async def ping(self) -> bool:
390
+ """Test Dapr connectivity by checking metadata.
391
+
392
+ Returns:
393
+ True if Dapr is reachable, False otherwise.
394
+ """
395
+ try:
396
+ # First attempt a read; some stores may not be initialized yet.
397
+ await self._dapr_client.get_state(
398
+ store_name=self._state_store_name,
399
+ key="__ping__",
400
+ state_metadata=self._get_read_metadata(),
401
+ )
402
+ return True
403
+ except Exception as initial_error:
404
+ # If relation/table is missing or store isn't initialized,
405
+ # attempt a write to initialize it, then read again.
406
+ try:
407
+ await self._dapr_client.save_state(
408
+ store_name=self._state_store_name,
409
+ key="__ping__",
410
+ value="ok",
411
+ state_metadata=self._get_metadata(),
412
+ options=self._get_state_options(),
413
+ )
414
+ # Read again after write.
415
+ await self._dapr_client.get_state(
416
+ store_name=self._state_store_name,
417
+ key="__ping__",
418
+ state_metadata=self._get_read_metadata(),
419
+ )
420
+ return True
421
+ except Exception:
422
+ logger.error("Dapr connection failed: %s", initial_error)
423
+ return False
agents/lifecycle.py CHANGED
@@ -62,7 +62,7 @@ class RunHooksBase(Generic[TContext, TAgent]):
62
62
  agent: TAgent,
63
63
  tool: Tool,
64
64
  ) -> None:
65
- """Called concurrently with tool invocation."""
65
+ """Called immediately before a local tool is invoked."""
66
66
  pass
67
67
 
68
68
  async def on_tool_end(
@@ -72,7 +72,7 @@ class RunHooksBase(Generic[TContext, TAgent]):
72
72
  tool: Tool,
73
73
  result: str,
74
74
  ) -> None:
75
- """Called after a tool is invoked."""
75
+ """Called immediately after a local tool is invoked."""
76
76
  pass
77
77
 
78
78
 
@@ -113,7 +113,7 @@ class AgentHooksBase(Generic[TContext, TAgent]):
113
113
  agent: TAgent,
114
114
  tool: Tool,
115
115
  ) -> None:
116
- """Called concurrently with tool invocation."""
116
+ """Called immediately before a local tool is invoked."""
117
117
  pass
118
118
 
119
119
  async def on_tool_end(
@@ -123,7 +123,7 @@ class AgentHooksBase(Generic[TContext, TAgent]):
123
123
  tool: Tool,
124
124
  result: str,
125
125
  ) -> None:
126
- """Called after a tool is invoked."""
126
+ """Called immediately after a local tool is invoked."""
127
127
  pass
128
128
 
129
129
  async def on_llm_start(
@@ -51,6 +51,8 @@ from ..model_settings import MCPToolChoice
51
51
  from ..tool import FunctionTool, Tool
52
52
  from .fake_id import FAKE_RESPONSES_ID
53
53
 
54
+ ResponseInputContentWithAudioParam = Union[ResponseInputContentParam, ResponseInputAudioParam]
55
+
54
56
 
55
57
  class Converter:
56
58
  @classmethod
@@ -136,7 +138,9 @@ class Converter:
136
138
  )
137
139
  if message.content:
138
140
  message_item.content.append(
139
- ResponseOutputText(text=message.content, type="output_text", annotations=[])
141
+ ResponseOutputText(
142
+ text=message.content, type="output_text", annotations=[], logprobs=[]
143
+ )
140
144
  )
141
145
  if message.refusal:
142
146
  message_item.content.append(
@@ -246,7 +250,7 @@ class Converter:
246
250
 
247
251
  @classmethod
248
252
  def extract_text_content(
249
- cls, content: str | Iterable[ResponseInputContentParam]
253
+ cls, content: str | Iterable[ResponseInputContentWithAudioParam]
250
254
  ) -> str | list[ChatCompletionContentPartTextParam]:
251
255
  all_content = cls.extract_all_content(content)
252
256
  if isinstance(all_content, str):
@@ -259,7 +263,7 @@ class Converter:
259
263
 
260
264
  @classmethod
261
265
  def extract_all_content(
262
- cls, content: str | Iterable[ResponseInputContentParam]
266
+ cls, content: str | Iterable[ResponseInputContentWithAudioParam]
263
267
  ) -> str | list[ChatCompletionContentPartParam]:
264
268
  if isinstance(content, str):
265
269
  return content
@@ -535,7 +539,7 @@ class Converter:
535
539
  elif func_output := cls.maybe_function_tool_call_output(item):
536
540
  flush_assistant_message()
537
541
  output_content = cast(
538
- Union[str, Iterable[ResponseInputContentParam]], func_output["output"]
542
+ Union[str, Iterable[ResponseInputContentWithAudioParam]], func_output["output"]
539
543
  )
540
544
  msg: ChatCompletionToolMessageParam = {
541
545
  "role": "tool",
@@ -231,6 +231,7 @@ class ChatCmplStreamHandler:
231
231
  text="",
232
232
  type="output_text",
233
233
  annotations=[],
234
+ logprobs=[],
234
235
  ),
235
236
  )
236
237
  # Start a new assistant message stream
@@ -258,6 +259,7 @@ class ChatCmplStreamHandler:
258
259
  text="",
259
260
  type="output_text",
260
261
  annotations=[],
262
+ logprobs=[],
261
263
  ),
262
264
  type="response.content_part.added",
263
265
  sequence_number=sequence_number.get_and_increment(),
@@ -309,12 +311,10 @@ class ChatCmplStreamHandler:
309
311
  yield ResponseContentPartAddedEvent(
310
312
  content_index=state.refusal_content_index_and_output[0],
311
313
  item_id=FAKE_RESPONSES_ID,
312
- output_index=state.reasoning_content_index_and_output
313
- is not None, # fixed 0 -> 0 or 1
314
- part=ResponseOutputText(
315
- text="",
316
- type="output_text",
317
- annotations=[],
314
+ output_index=(1 if state.reasoning_content_index_and_output else 0),
315
+ part=ResponseOutputRefusal(
316
+ refusal="",
317
+ type="refusal",
318
318
  ),
319
319
  type="response.content_part.added",
320
320
  sequence_number=sequence_number.get_and_increment(),
agents/realtime/events.py CHANGED
@@ -69,6 +69,10 @@ class RealtimeToolStart:
69
69
  """The agent that updated."""
70
70
 
71
71
  tool: Tool
72
+ """The tool being called."""
73
+
74
+ arguments: str
75
+ """The arguments passed to the tool as a JSON string."""
72
76
 
73
77
  info: RealtimeEventInfo
74
78
  """Common info for all events, such as the context."""
@@ -86,6 +90,9 @@ class RealtimeToolEnd:
86
90
  tool: Tool
87
91
  """The tool that was called."""
88
92
 
93
+ arguments: str
94
+ """The arguments passed to the tool as a JSON string."""
95
+
89
96
  output: Any
90
97
  """The output of the tool call."""
91
98
 
agents/realtime/model.py CHANGED
@@ -139,6 +139,13 @@ class RealtimeModelConfig(TypedDict):
139
139
  is played to the user.
140
140
  """
141
141
 
142
+ call_id: NotRequired[str]
143
+ """Attach to an existing realtime call instead of creating a new session.
144
+
145
+ When provided, the transport connects using the `call_id` query string parameter rather than a
146
+ model name. This is used for SIP-originated calls that are accepted via the Realtime Calls API.
147
+ """
148
+
142
149
 
143
150
  class RealtimeModel(abc.ABC):
144
151
  """Interface for connecting to a realtime model and sending/receiving events."""
@@ -208,7 +208,18 @@ class OpenAIRealtimeWebSocketModel(RealtimeModel):
208
208
 
209
209
  self._playback_tracker = options.get("playback_tracker", None)
210
210
 
211
- self.model = model_settings.get("model_name", self.model)
211
+ call_id = options.get("call_id")
212
+ model_name = model_settings.get("model_name")
213
+ if call_id and model_name:
214
+ error_message = (
215
+ "Cannot specify both `call_id` and `model_name` "
216
+ "when attaching to an existing realtime call."
217
+ )
218
+ raise UserError(error_message)
219
+
220
+ if model_name:
221
+ self.model = model_name
222
+
212
223
  api_key = await get_api_key(options.get("api_key"))
213
224
 
214
225
  if "tracing" in model_settings:
@@ -216,7 +227,10 @@ class OpenAIRealtimeWebSocketModel(RealtimeModel):
216
227
  else:
217
228
  self._tracing_config = "auto"
218
229
 
219
- url = options.get("url", f"wss://api.openai.com/v1/realtime?model={self.model}")
230
+ if call_id:
231
+ url = options.get("url", f"wss://api.openai.com/v1/realtime?call_id={call_id}")
232
+ else:
233
+ url = options.get("url", f"wss://api.openai.com/v1/realtime?model={self.model}")
220
234
 
221
235
  headers: dict[str, str] = {}
222
236
  if options.get("headers") is not None:
@@ -629,8 +643,9 @@ class OpenAIRealtimeWebSocketModel(RealtimeModel):
629
643
  )
630
644
  if not automatic_response_cancellation_enabled:
631
645
  await self._cancel_response()
632
- # Avoid sending conversation.item.truncate here; when GA is set to
633
- # interrupt on VAD start, the server will handle truncation.
646
+ # Avoid sending conversation.item.truncate here. When the session's
647
+ # turn_detection.interrupt_response is enabled (GA default), the server emits
648
+ # conversation.item.truncated after the VAD start and takes care of history updates.
634
649
  elif parsed.type == "response.created":
635
650
  self._ongoing_response = True
636
651
  await self._emit_event(RealtimeModelTurnStartedEvent())
@@ -929,6 +944,18 @@ class OpenAIRealtimeWebSocketModel(RealtimeModel):
929
944
  return converted_tools
930
945
 
931
946
 
947
+ class OpenAIRealtimeSIPModel(OpenAIRealtimeWebSocketModel):
948
+ """Realtime model that attaches to SIP-originated calls using a call ID."""
949
+
950
+ async def connect(self, options: RealtimeModelConfig) -> None:
951
+ call_id = options.get("call_id")
952
+ if not call_id:
953
+ raise UserError("OpenAIRealtimeSIPModel requires `call_id` in the model configuration.")
954
+
955
+ sip_options = options.copy()
956
+ await super().connect(sip_options)
957
+
958
+
932
959
  class _ConversionHelper:
933
960
  @classmethod
934
961
  def conversation_item_to_realtime_message_item(
@@ -411,6 +411,7 @@ class RealtimeSession(RealtimeModelListener):
411
411
  info=self._event_info,
412
412
  tool=function_map[event.name],
413
413
  agent=agent,
414
+ arguments=event.arguments,
414
415
  )
415
416
  )
416
417
 
@@ -436,6 +437,7 @@ class RealtimeSession(RealtimeModelListener):
436
437
  tool=func_tool,
437
438
  output=result,
438
439
  agent=agent,
440
+ arguments=event.arguments,
439
441
  )
440
442
  )
441
443
  elif event.name in handoff_map:
agents/run.py CHANGED
@@ -1,8 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
+ import contextlib
4
5
  import inspect
5
6
  import os
7
+ import warnings
6
8
  from dataclasses import dataclass, field
7
9
  from typing import Any, Callable, Generic, cast, get_args
8
10
 
@@ -720,7 +722,40 @@ class AgentRunner:
720
722
  conversation_id = kwargs.get("conversation_id")
721
723
  session = kwargs.get("session")
722
724
 
723
- return asyncio.get_event_loop().run_until_complete(
725
+ # Python 3.14 stopped implicitly wiring up a default event loop
726
+ # when synchronous code touches asyncio APIs for the first time.
727
+ # Several of our synchronous entry points (for example the Redis/SQLAlchemy session helpers)
728
+ # construct asyncio primitives like asyncio.Lock during __init__,
729
+ # which binds them to whatever loop happens to be the thread's default at that moment.
730
+ # To keep those locks usable we must ensure that run_sync reuses that same default loop
731
+ # instead of hopping over to a brand-new asyncio.run() loop.
732
+ try:
733
+ already_running_loop = asyncio.get_running_loop()
734
+ except RuntimeError:
735
+ already_running_loop = None
736
+
737
+ if already_running_loop is not None:
738
+ # This method is only expected to run when no loop is already active.
739
+ # (Each thread has its own default loop; concurrent sync runs should happen on
740
+ # different threads. In a single thread use the async API to interleave work.)
741
+ raise RuntimeError(
742
+ "AgentRunner.run_sync() cannot be called when an event loop is already running."
743
+ )
744
+
745
+ policy = asyncio.get_event_loop_policy()
746
+ with warnings.catch_warnings():
747
+ warnings.simplefilter("ignore", DeprecationWarning)
748
+ try:
749
+ default_loop = policy.get_event_loop()
750
+ except RuntimeError:
751
+ default_loop = policy.new_event_loop()
752
+ policy.set_event_loop(default_loop)
753
+
754
+ # We intentionally leave the default loop open even if we had to create one above. Session
755
+ # instances and other helpers stash loop-bound primitives between calls and expect to find
756
+ # the same default loop every time run_sync is invoked on this thread.
757
+ # Schedule the async run on the default loop so that we can manage cancellation explicitly.
758
+ task = default_loop.create_task(
724
759
  self.run(
725
760
  starting_agent,
726
761
  input,
@@ -734,6 +769,24 @@ class AgentRunner:
734
769
  )
735
770
  )
736
771
 
772
+ try:
773
+ # Drive the coroutine to completion, harvesting the final RunResult.
774
+ return default_loop.run_until_complete(task)
775
+ except BaseException:
776
+ # If the sync caller aborts (KeyboardInterrupt, etc.), make sure the scheduled task
777
+ # does not linger on the shared loop by cancelling it and waiting for completion.
778
+ if not task.done():
779
+ task.cancel()
780
+ with contextlib.suppress(asyncio.CancelledError):
781
+ default_loop.run_until_complete(task)
782
+ raise
783
+ finally:
784
+ if not default_loop.is_closed():
785
+ # The loop stays open for subsequent runs, but we still need to flush any pending
786
+ # async generators so their cleanup code executes promptly.
787
+ with contextlib.suppress(RuntimeError):
788
+ default_loop.run_until_complete(default_loop.shutdown_asyncgens())
789
+
737
790
  def run_streamed(
738
791
  self,
739
792
  starting_agent: Agent[TContext],
agents/usage.py CHANGED
@@ -4,6 +4,26 @@ from openai.types.responses.response_usage import InputTokensDetails, OutputToke
4
4
  from pydantic.dataclasses import dataclass
5
5
 
6
6
 
7
+ @dataclass
8
+ class RequestUsage:
9
+ """Usage details for a single API request."""
10
+
11
+ input_tokens: int
12
+ """Input tokens for this individual request."""
13
+
14
+ output_tokens: int
15
+ """Output tokens for this individual request."""
16
+
17
+ total_tokens: int
18
+ """Total tokens (input + output) for this individual request."""
19
+
20
+ input_tokens_details: InputTokensDetails
21
+ """Details about the input tokens for this individual request."""
22
+
23
+ output_tokens_details: OutputTokensDetails
24
+ """Details about the output tokens for this individual request."""
25
+
26
+
7
27
  @dataclass
8
28
  class Usage:
9
29
  requests: int = 0
@@ -27,7 +47,37 @@ class Usage:
27
47
  total_tokens: int = 0
28
48
  """Total tokens sent and received, across all requests."""
29
49
 
50
+ request_usage_entries: list[RequestUsage] = field(default_factory=list)
51
+ """List of RequestUsage entries for accurate per-request cost calculation.
52
+
53
+ Each call to `add()` automatically creates an entry in this list if the added usage
54
+ represents a new request (i.e., has non-zero tokens).
55
+
56
+ Example:
57
+ For a run that makes 3 API calls with 100K, 150K, and 80K input tokens each,
58
+ the aggregated `input_tokens` would be 330K, but `request_usage_entries` would
59
+ preserve the [100K, 150K, 80K] breakdown, which could be helpful for detailed
60
+ cost calculation or context window management.
61
+ """
62
+
63
+ def __post_init__(self) -> None:
64
+ # Some providers don't populate optional token detail fields
65
+ # (cached_tokens, reasoning_tokens), and the OpenAI SDK's generated
66
+ # code can bypass Pydantic validation (e.g., via model_construct),
67
+ # allowing None values. We normalize these to 0 to prevent TypeErrors.
68
+ if self.input_tokens_details.cached_tokens is None:
69
+ self.input_tokens_details = InputTokensDetails(cached_tokens=0)
70
+ if self.output_tokens_details.reasoning_tokens is None:
71
+ self.output_tokens_details = OutputTokensDetails(reasoning_tokens=0)
72
+
30
73
  def add(self, other: "Usage") -> None:
74
+ """Add another Usage object to this one, aggregating all fields.
75
+
76
+ This method automatically preserves request_usage_entries.
77
+
78
+ Args:
79
+ other: The Usage object to add to this one.
80
+ """
31
81
  self.requests += other.requests if other.requests else 0
32
82
  self.input_tokens += other.input_tokens if other.input_tokens else 0
33
83
  self.output_tokens += other.output_tokens if other.output_tokens else 0
@@ -41,3 +91,18 @@ class Usage:
41
91
  reasoning_tokens=self.output_tokens_details.reasoning_tokens
42
92
  + other.output_tokens_details.reasoning_tokens
43
93
  )
94
+
95
+ # Automatically preserve request_usage_entries.
96
+ # If the other Usage represents a single request with tokens, record it.
97
+ if other.requests == 1 and other.total_tokens > 0:
98
+ request_usage = RequestUsage(
99
+ input_tokens=other.input_tokens,
100
+ output_tokens=other.output_tokens,
101
+ total_tokens=other.total_tokens,
102
+ input_tokens_details=other.input_tokens_details,
103
+ output_tokens_details=other.output_tokens_details,
104
+ )
105
+ self.request_usage_entries.append(request_usage)
106
+ elif other.request_usage_entries:
107
+ # If the other Usage already has individual request breakdowns, merge them.
108
+ self.request_usage_entries.extend(other.request_usage_entries)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openai-agents
3
- Version: 0.4.2
3
+ Version: 0.5.0
4
4
  Summary: OpenAI Agents SDK
5
5
  Project-URL: Homepage, https://openai.github.io/openai-agents-python/
6
6
  Project-URL: Repository, https://github.com/openai/openai-agents-python
@@ -16,16 +16,20 @@ Classifier: Programming Language :: Python :: 3.10
16
16
  Classifier: Programming Language :: Python :: 3.11
17
17
  Classifier: Programming Language :: Python :: 3.12
18
18
  Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Programming Language :: Python :: 3.14
19
20
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
20
21
  Classifier: Typing :: Typed
21
22
  Requires-Python: >=3.9
22
23
  Requires-Dist: griffe<2,>=1.5.6
23
24
  Requires-Dist: mcp<2,>=1.11.0; python_version >= '3.10'
24
- Requires-Dist: openai<3,>=2.2
25
- Requires-Dist: pydantic<3,>=2.10
25
+ Requires-Dist: openai<3,>=2.7.1
26
+ Requires-Dist: pydantic<3,>=2.12.3
26
27
  Requires-Dist: requests<3,>=2.0
27
28
  Requires-Dist: types-requests<3,>=2.0
28
29
  Requires-Dist: typing-extensions<5,>=4.12.2
30
+ Provides-Extra: dapr
31
+ Requires-Dist: dapr>=1.16.0; extra == 'dapr'
32
+ Requires-Dist: grpcio>=1.60.0; extra == 'dapr'
29
33
  Provides-Extra: encrypt
30
34
  Requires-Dist: cryptography<46,>=45.0; extra == 'encrypt'
31
35
  Provides-Extra: litellm
@@ -352,6 +356,12 @@ make lint # run linter
352
356
  make format-check # run style checker
353
357
  ```
354
358
 
359
+ Format code if `make format-check` fails above by running:
360
+
361
+ ```
362
+ make format
363
+ ```
364
+
355
365
  ## Acknowledgements
356
366
 
357
367
  We'd like to acknowledge the excellent work of the open-source community, especially:
@@ -10,28 +10,29 @@ agents/function_schema.py,sha256=njtbLt44DOkIU0a0U8TeDNEx-iQZU8oohwy3k7-k4A8,148
10
10
  agents/guardrail.py,sha256=7P-kd9rKPhgB8rtI31MCV5ho4ZrEaNCQxHvE8IK3EOk,9582
11
11
  agents/handoffs.py,sha256=kDTM3nj3E_0khiJPMJAIN00gektMTRNbaYSbc5ZCnBM,11411
12
12
  agents/items.py,sha256=YoAhxwb2PSgClGGWrkTIWufAxg0F1cS1KohLYonwz6I,14370
13
- agents/lifecycle.py,sha256=hGsqzumOSaal6oAjTqTfvBXl-ShAOkC42sthJigB5Fg,4308
13
+ agents/lifecycle.py,sha256=2dhFi8CBH-EHDiAnj-_h1UmZ2ayC8Tmv5K61P-Kem9w,4362
14
14
  agents/logger.py,sha256=p_ef7vWKpBev5FFybPJjhrCCQizK08Yy1A2EDO1SNNg,60
15
15
  agents/model_settings.py,sha256=7Ul-Xg-aNVXIbK6V4Rm2t5EEfNR0tsy_A9ac_wFqLLk,6828
16
16
  agents/prompts.py,sha256=Ss5y_7s2HFcRAOAKu4WTxQszs5ybI8TfbxgEYdnj9sg,2231
17
17
  agents/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
18
18
  agents/repl.py,sha256=NX0BE5YDnmGQ2rdQsmLm3CKkQZ5m4GC95xXmUsAXJVs,2539
19
19
  agents/result.py,sha256=FW3-fsYOIJrn7pjiDjWPHN58pPpYfNoFNTympFV_96k,13963
20
- agents/run.py,sha256=I3KwURXqqQwDYF3gCN9KmpIpxakJhwIj6NnYvxvvd5I,74925
20
+ agents/run.py,sha256=g4Z8I28pKaefbMxB0rNVqT4JBbLqHNYSH8_W1WHHmmM,77816
21
21
  agents/run_context.py,sha256=vuSUQM8O4CLensQY27-22fOqECnw7yvwL9U3WO8b_bk,851
22
22
  agents/stream_events.py,sha256=vW7O5T6iwFuRFvds1Bq3zMB60fRCz7lWryMkHSL-bAo,1733
23
23
  agents/strict_schema.py,sha256=HFm4j753-UKDfJ0zSiQYf5V1qGuHY6TRm2zzwI0f0E0,6382
24
24
  agents/tool.py,sha256=yNCqv4gu1g0Gxrt9YPYdSkZaWlO3d84vlEwxOegVRng,20304
25
25
  agents/tool_context.py,sha256=g53mgaeX7kCwPaIReiwuUejD8qC7QejMS-F3Wnkuhhg,1866
26
26
  agents/tool_guardrails.py,sha256=2uXEr_R5AWy9NHtBjd7G7upc3uZSuoP86Hfsc-qTadM,8344
27
- agents/usage.py,sha256=Tb5udGd3DPgD0JBdRD8fDctTE4M-zKML5uRn8ZG1yBc,1675
27
+ agents/usage.py,sha256=azQmy1Oyx0vk6dpq-gSAFPvP5L54zIM2CwbJbozTOR0,4517
28
28
  agents/version.py,sha256=_1knUwzSK-HUeZTpRUkk6Z-CIcurqXuEplbV5TLJ08E,230
29
29
  agents/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
30
  agents/extensions/handoff_filters.py,sha256=CS-k7TGCtT8TW3GeXb04OoFBXKdjg8-85QXswWAYBmI,2095
31
31
  agents/extensions/handoff_prompt.py,sha256=oGWN0uNh3Z1L7E-Ev2up8W084fFrDNOsLDy7P6bcmic,1006
32
32
  agents/extensions/visualization.py,sha256=sf9D_C-HMwkbWdZccTZvvMPRy_NSiwbm48tRJlESQBI,5144
33
- agents/extensions/memory/__init__.py,sha256=jhx0pa1TVRFooiI0ERRhklkaeiZ-NDGA_b9tJXp3VfM,2201
33
+ agents/extensions/memory/__init__.py,sha256=ckEgIcbSh7ptmJzoHtVI7YeC6TjI3IkxrYKG-NQApBA,3498
34
34
  agents/extensions/memory/advanced_sqlite_session.py,sha256=rCrXM878foAuBN-rN2fibP2GHs-1hTtRx-TQcDKIfGI,52883
35
+ agents/extensions/memory/dapr_session.py,sha256=AZ2fOsHLqDIcS8Ash9jRYRsqB7lpcV1zJLeIMNVe41I,16663
35
36
  agents/extensions/memory/encrypt_session.py,sha256=PVnZIEj50bjUq16OLnMKrbZiinLkrVpamPPEw8RnUCA,6485
36
37
  agents/extensions/memory/redis_session.py,sha256=JwXY6zUTMgq9bRezlyFZ4Tze7DO7T0hioTc23qjSHjU,9838
37
38
  agents/extensions/memory/sqlalchemy_session.py,sha256=fnlZkNF_XZekP44uhiR4rjlCkwG7JJEiFm35TJfiCtc,12325
@@ -48,9 +49,9 @@ agents/memory/sqlite_session.py,sha256=6HGzSL70mQgutITIPZUC2x2Qtj6U4hXiZTceu3Da7
48
49
  agents/memory/util.py,sha256=ZAHOrNVA36xICFzuNgHgEA1_s_oEMO6Wsu6-EecY8JU,586
49
50
  agents/models/__init__.py,sha256=E0XVqWayVAsFqxucDLBW30siaqfNQsVrAnfidG_C3ok,287
50
51
  agents/models/_openai_shared.py,sha256=4Ngwo2Fv2RXY61Pqck1cYPkSln2tDnb8Ai-ao4QG-iE,836
51
- agents/models/chatcmpl_converter.py,sha256=qEobLnIJjrK6WRi_tsVkrDrGq78EGro3MZXlVMpMK2c,26011
52
+ agents/models/chatcmpl_converter.py,sha256=cwc2JSQpsYzzc_Pk-j_wcQWArWnHWOHglE5N_vdPv5Y,26185
52
53
  agents/models/chatcmpl_helpers.py,sha256=YC2krp_-uBgRCrCEImLjNvONTWRWfwLlPKHI4kBmNXE,1483
53
- agents/models/chatcmpl_stream_handler.py,sha256=1h0esxmnlBk9NwDjjwSlWYzjzuMgIpMLtRU9kaszfyg,29212
54
+ agents/models/chatcmpl_stream_handler.py,sha256=_ZlFgwoiBOHMHbN-WN3hN5lTIEy04M3uoWctDb4H4VM,29209
54
55
  agents/models/default_models.py,sha256=mlvBePn8H4UkHo7lN-wh7A3k2ciLgBUFKpROQxzdTfs,2098
55
56
  agents/models/fake_id.py,sha256=lbXjUUSMeAQ8eFx4V5QLUnBClHE6adJlYYav55RlG5w,268
56
57
  agents/models/interface.py,sha256=-AFUHC8iRuGZmtQwguDw4s-M4OPL2y2mct4TAmWvVrU,4057
@@ -65,15 +66,15 @@ agents/realtime/_util.py,sha256=ehBzUN1RTD2m2TXq73Jm4WohQzJ6y_MfnF5MaK8uu14,341
65
66
  agents/realtime/agent.py,sha256=bkegBJ_lc3z3NtnlIyEkVZFxZWBJwVjsQVzpQZAu7PM,4283
66
67
  agents/realtime/audio_formats.py,sha256=DBUWVVff4XY5BT6Mol86tF4PFMp5OIS3LmAbqUmQn_k,1019
67
68
  agents/realtime/config.py,sha256=vnjgkeZXcOSLFopoAiGj4Vki_75pEJIKTagJtQpCWmg,7072
68
- agents/realtime/events.py,sha256=eANiNNyYlp_1Ybdl-MOwXRVTDtrK9hfgn6iw0xNxnaY,5889
69
+ agents/realtime/events.py,sha256=BkktfS4cCpz53Fn6Di-8kgRXlxzE9NvzqJFevDVE3uc,6084
69
70
  agents/realtime/handoffs.py,sha256=iJ4lr5RVdDkw5W3_AOGB_Az-hlRt1CoFFFNFDfd3ues,6698
70
71
  agents/realtime/items.py,sha256=5EG768FkKpbk-dhe4b_7BfFpdUEFWtxoiVUtNI9KXsc,5517
71
- agents/realtime/model.py,sha256=Lnb9pEcvnlIdXJUcldVyioaX5lpmrBou5FZoNJe4XfA,6457
72
+ agents/realtime/model.py,sha256=jVZBhPRc2yDQfAFn2pqnnVNtkgsguKS8qO-KbQCtuEs,6774
72
73
  agents/realtime/model_events.py,sha256=2NKofzLszKHwtlcsogsNnH6hdeFfO7S96yWDB4AlxB8,4340
73
74
  agents/realtime/model_inputs.py,sha256=-pl8Oj0WVrA5Gt-dqP5Va3ZHqXyIXpsjMsf9UL-suEY,2789
74
- agents/realtime/openai_realtime.py,sha256=jN3OvcEQt9X-59t6InllkOOEd8Tdw69K5vuKfXBeObg,44763
75
+ agents/realtime/openai_realtime.py,sha256=5tLF_gglC1GAKcJNjleSPdVZbM7656SxtqnLzVynrpk,45806
75
76
  agents/realtime/runner.py,sha256=KfU7utmc9QFH2htIKN2IN9H-5EnB0qN9ezmvlRTnOm4,2511
76
- agents/realtime/session.py,sha256=79WqKWwGOsutQRLs7fDsijE-OxEJjGm-aOpjL5F7Fn8,36983
77
+ agents/realtime/session.py,sha256=TmMZdw-1qpQh22Oa_XDy1T6NrnPnGXXRAZ-yKKKFj_w,37077
77
78
  agents/tracing/__init__.py,sha256=5HO_6na5S6EwICgwl50OMtxiIIosUrqalhvldlYvSVc,2991
78
79
  agents/tracing/create.py,sha256=xpJ4ZRnGyUDPKoVVkA_8hmdhtwOKGhSkwRco2AQIhAo,18003
79
80
  agents/tracing/logger.py,sha256=J4KUDRSGa7x5UVfUwWe-gbKwoaq8AeETRqkPt3QvtGg,68
@@ -108,7 +109,7 @@ agents/voice/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
108
109
  agents/voice/models/openai_model_provider.py,sha256=Khn0uT-VhsEbe7_OhBMGFQzXNwL80gcWZyTHl3CaBII,3587
109
110
  agents/voice/models/openai_stt.py,sha256=Lb_F9160VNKDHXZ9zylSzeig7sB8lBjiYhQLDZsp6NQ,17257
110
111
  agents/voice/models/openai_tts.py,sha256=4KoLQuFDHKu5a1VTJlu9Nj3MHwMlrn9wfT_liJDJ2dw,1477
111
- openai_agents-0.4.2.dist-info/METADATA,sha256=UUyVoFXNYwTLrBnkpo7MFwT73-kJH0rQX53xwF3pFXw,13046
112
- openai_agents-0.4.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
113
- openai_agents-0.4.2.dist-info/licenses/LICENSE,sha256=E994EspT7Krhy0qGiES7WYNzBHrh1YDk3r--8d1baRU,1063
114
- openai_agents-0.4.2.dist-info/RECORD,,
112
+ openai_agents-0.5.0.dist-info/METADATA,sha256=GcCRv8byD4gVjMUNanTpvOaCeAbNxQLN1gPncRMHbBI,13295
113
+ openai_agents-0.5.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
114
+ openai_agents-0.5.0.dist-info/licenses/LICENSE,sha256=E994EspT7Krhy0qGiES7WYNzBHrh1YDk3r--8d1baRU,1063
115
+ openai_agents-0.5.0.dist-info/RECORD,,