openai-agents 0.0.14__py3-none-any.whl → 0.0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openai-agents might be problematic. Click here for more details.

@@ -269,6 +269,8 @@ class LitellmModel(Model):
269
269
  extra_kwargs["extra_query"] = model_settings.extra_query
270
270
  if model_settings.metadata:
271
271
  extra_kwargs["metadata"] = model_settings.metadata
272
+ if model_settings.extra_body and isinstance(model_settings.extra_body, dict):
273
+ extra_kwargs.update(model_settings.extra_body)
272
274
 
273
275
  ret = await litellm.acompletion(
274
276
  model=self.model,
agents/mcp/__init__.py CHANGED
@@ -5,6 +5,8 @@ try:
5
5
  MCPServerSseParams,
6
6
  MCPServerStdio,
7
7
  MCPServerStdioParams,
8
+ MCPServerStreamableHttp,
9
+ MCPServerStreamableHttpParams,
8
10
  )
9
11
  except ImportError:
10
12
  pass
@@ -17,5 +19,7 @@ __all__ = [
17
19
  "MCPServerSseParams",
18
20
  "MCPServerStdio",
19
21
  "MCPServerStdioParams",
22
+ "MCPServerStreamableHttp",
23
+ "MCPServerStreamableHttpParams",
20
24
  "MCPUtil",
21
25
  ]
agents/mcp/server.py CHANGED
@@ -10,7 +10,9 @@ from typing import Any, Literal
10
10
  from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
11
11
  from mcp import ClientSession, StdioServerParameters, Tool as MCPTool, stdio_client
12
12
  from mcp.client.sse import sse_client
13
- from mcp.types import CallToolResult, JSONRPCMessage
13
+ from mcp.client.streamable_http import GetSessionIdCallback, streamablehttp_client
14
+ from mcp.shared.message import SessionMessage
15
+ from mcp.types import CallToolResult
14
16
  from typing_extensions import NotRequired, TypedDict
15
17
 
16
18
  from ..exceptions import UserError
@@ -83,8 +85,9 @@ class _MCPServerWithClientSession(MCPServer, abc.ABC):
83
85
  self,
84
86
  ) -> AbstractAsyncContextManager[
85
87
  tuple[
86
- MemoryObjectReceiveStream[JSONRPCMessage | Exception],
87
- MemoryObjectSendStream[JSONRPCMessage],
88
+ MemoryObjectReceiveStream[SessionMessage | Exception],
89
+ MemoryObjectSendStream[SessionMessage],
90
+ GetSessionIdCallback | None
88
91
  ]
89
92
  ]:
90
93
  """Create the streams for the server."""
@@ -105,7 +108,11 @@ class _MCPServerWithClientSession(MCPServer, abc.ABC):
105
108
  """Connect to the server."""
106
109
  try:
107
110
  transport = await self.exit_stack.enter_async_context(self.create_streams())
108
- read, write = transport
111
+ # streamablehttp_client returns (read, write, get_session_id)
112
+ # sse_client returns (read, write)
113
+
114
+ read, write, *_ = transport
115
+
109
116
  session = await self.exit_stack.enter_async_context(
110
117
  ClientSession(
111
118
  read,
@@ -232,8 +239,9 @@ class MCPServerStdio(_MCPServerWithClientSession):
232
239
  self,
233
240
  ) -> AbstractAsyncContextManager[
234
241
  tuple[
235
- MemoryObjectReceiveStream[JSONRPCMessage | Exception],
236
- MemoryObjectSendStream[JSONRPCMessage],
242
+ MemoryObjectReceiveStream[SessionMessage | Exception],
243
+ MemoryObjectSendStream[SessionMessage],
244
+ GetSessionIdCallback | None
237
245
  ]
238
246
  ]:
239
247
  """Create the streams for the server."""
@@ -302,8 +310,9 @@ class MCPServerSse(_MCPServerWithClientSession):
302
310
  self,
303
311
  ) -> AbstractAsyncContextManager[
304
312
  tuple[
305
- MemoryObjectReceiveStream[JSONRPCMessage | Exception],
306
- MemoryObjectSendStream[JSONRPCMessage],
313
+ MemoryObjectReceiveStream[SessionMessage | Exception],
314
+ MemoryObjectSendStream[SessionMessage],
315
+ GetSessionIdCallback | None
307
316
  ]
308
317
  ]:
309
318
  """Create the streams for the server."""
@@ -318,3 +327,84 @@ class MCPServerSse(_MCPServerWithClientSession):
318
327
  def name(self) -> str:
319
328
  """A readable name for the server."""
320
329
  return self._name
330
+
331
+
332
+ class MCPServerStreamableHttpParams(TypedDict):
333
+ """Mirrors the params in`mcp.client.streamable_http.streamablehttp_client`."""
334
+
335
+ url: str
336
+ """The URL of the server."""
337
+
338
+ headers: NotRequired[dict[str, str]]
339
+ """The headers to send to the server."""
340
+
341
+ timeout: NotRequired[timedelta]
342
+ """The timeout for the HTTP request. Defaults to 5 seconds."""
343
+
344
+ sse_read_timeout: NotRequired[timedelta]
345
+ """The timeout for the SSE connection, in seconds. Defaults to 5 minutes."""
346
+
347
+ terminate_on_close: NotRequired[bool]
348
+ """Terminate on close"""
349
+
350
+
351
+ class MCPServerStreamableHttp(_MCPServerWithClientSession):
352
+ """MCP server implementation that uses the Streamable HTTP transport. See the [spec]
353
+ (https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http)
354
+ for details.
355
+ """
356
+
357
+ def __init__(
358
+ self,
359
+ params: MCPServerStreamableHttpParams,
360
+ cache_tools_list: bool = False,
361
+ name: str | None = None,
362
+ client_session_timeout_seconds: float | None = 5,
363
+ ):
364
+ """Create a new MCP server based on the Streamable HTTP transport.
365
+
366
+ Args:
367
+ params: The params that configure the server. This includes the URL of the server,
368
+ the headers to send to the server, the timeout for the HTTP request, and the
369
+ timeout for the Streamable HTTP connection and whether we need to
370
+ terminate on close.
371
+
372
+ cache_tools_list: Whether to cache the tools list. If `True`, the tools list will be
373
+ cached and only fetched from the server once. If `False`, the tools list will be
374
+ fetched from the server on each call to `list_tools()`. The cache can be
375
+ invalidated by calling `invalidate_tools_cache()`. You should set this to `True`
376
+ if you know the server will not change its tools list, because it can drastically
377
+ improve latency (by avoiding a round-trip to the server every time).
378
+
379
+ name: A readable name for the server. If not provided, we'll create one from the
380
+ URL.
381
+
382
+ client_session_timeout_seconds: the read timeout passed to the MCP ClientSession.
383
+ """
384
+ super().__init__(cache_tools_list, client_session_timeout_seconds)
385
+
386
+ self.params = params
387
+ self._name = name or f"streamable_http: {self.params['url']}"
388
+
389
+ def create_streams(
390
+ self,
391
+ ) -> AbstractAsyncContextManager[
392
+ tuple[
393
+ MemoryObjectReceiveStream[SessionMessage | Exception],
394
+ MemoryObjectSendStream[SessionMessage],
395
+ GetSessionIdCallback | None
396
+ ]
397
+ ]:
398
+ """Create the streams for the server."""
399
+ return streamablehttp_client(
400
+ url=self.params["url"],
401
+ headers=self.params.get("headers", None),
402
+ timeout=self.params.get("timeout", timedelta(seconds=30)),
403
+ sse_read_timeout=self.params.get("sse_read_timeout", timedelta(seconds=60 * 5)),
404
+ terminate_on_close=self.params.get("terminate_on_close", True)
405
+ )
406
+
407
+ @property
408
+ def name(self) -> str:
409
+ """A readable name for the server."""
410
+ return self._name
@@ -234,7 +234,7 @@ class Converter:
234
234
  type="image_url",
235
235
  image_url={
236
236
  "url": casted_image_param["image_url"],
237
- "detail": casted_image_param["detail"],
237
+ "detail": casted_image_param.get("detail", "auto"),
238
238
  },
239
239
  )
240
240
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: openai-agents
3
- Version: 0.0.14
3
+ Version: 0.0.15
4
4
  Summary: OpenAI Agents SDK
5
5
  Project-URL: Homepage, https://github.com/openai/openai-agents-python
6
6
  Project-URL: Repository, https://github.com/openai/openai-agents-python
@@ -19,7 +19,7 @@ Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
19
  Classifier: Typing :: Typed
20
20
  Requires-Python: >=3.9
21
21
  Requires-Dist: griffe<2,>=1.5.6
22
- Requires-Dist: mcp<2,>=1.6.0; python_version >= '3.10'
22
+ Requires-Dist: mcp<2,>=1.8.0; python_version >= '3.10'
23
23
  Requires-Dist: openai>=1.76.0
24
24
  Requires-Dist: pydantic<3,>=2.10
25
25
  Requires-Dist: requests<3,>=2.0
@@ -27,14 +27,14 @@ agents/extensions/handoff_filters.py,sha256=2cXxu1JROez96CpTiGuT9PIuaIrIE8ksP01f
27
27
  agents/extensions/handoff_prompt.py,sha256=oGWN0uNh3Z1L7E-Ev2up8W084fFrDNOsLDy7P6bcmic,1006
28
28
  agents/extensions/visualization.py,sha256=AQFC7kQlZqTI6QVkyDHrF_DodCytrrhcLg35nfRd_JA,4256
29
29
  agents/extensions/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- agents/extensions/models/litellm_model.py,sha256=sqlUA4uS6jJExf05oY-ZdhlX_97ZX47Eg5zHmR0UkyI,13631
30
+ agents/extensions/models/litellm_model.py,sha256=JV9DfS6gj9QR0NMFyX4o3Dq5QqJNQ3BksBh7ijrgWoA,13776
31
31
  agents/extensions/models/litellm_provider.py,sha256=wTm00Anq8YoNb9AnyT0JOunDG-HCDm_98ORNy7aNJdw,928
32
- agents/mcp/__init__.py,sha256=x-4ZFiXNyJPn9Nbwcai6neKgonyRJ7by67HxnOLPgrw,359
33
- agents/mcp/server.py,sha256=vB3K2GREyrQv2ikRz2m4EtEY0nynQ5X-7sLbOH3s29E,12163
32
+ agents/mcp/__init__.py,sha256=_aDpMTvYCe1IezOEasZ0vmombBM8r7BD8lpXiKi-UlM,499
33
+ agents/mcp/server.py,sha256=f4F5DSH3TfMJ1SvruRqJT_wYbWhKQbRzVhdYdAjnOnk,15748
34
34
  agents/mcp/util.py,sha256=dIEdYDMc7Sjp-DFQnvoc4VWU-B7Heyx0I41bcW7RlEg,5232
35
35
  agents/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
36
  agents/models/_openai_shared.py,sha256=4Ngwo2Fv2RXY61Pqck1cYPkSln2tDnb8Ai-ao4QG-iE,836
37
- agents/models/chatcmpl_converter.py,sha256=C4EOZQDffCRDDRGT8XEobOHJoX7ONaiETDz2MeTviHg,18114
37
+ agents/models/chatcmpl_converter.py,sha256=Sae-ITlhQz8_SiFiSat7Z-lavqIuczduOXR_PF_f6cs,18126
38
38
  agents/models/chatcmpl_helpers.py,sha256=eIWySobaH7I0AQijAz5i-_rtsXrSvmEHD567s_8Zw1o,1318
39
39
  agents/models/chatcmpl_stream_handler.py,sha256=VjskdeGnepn0iJbxsqNZrexcuAYAV1zd5hwt0lU8E7I,12452
40
40
  agents/models/fake_id.py,sha256=lbXjUUSMeAQ8eFx4V5QLUnBClHE6adJlYYav55RlG5w,268
@@ -76,7 +76,7 @@ agents/voice/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
76
76
  agents/voice/models/openai_model_provider.py,sha256=Khn0uT-VhsEbe7_OhBMGFQzXNwL80gcWZyTHl3CaBII,3587
77
77
  agents/voice/models/openai_stt.py,sha256=rRsldkvkPhH4T0waX1dhccEqIwmPYh-teK_LRvBgiNI,16882
78
78
  agents/voice/models/openai_tts.py,sha256=4KoLQuFDHKu5a1VTJlu9Nj3MHwMlrn9wfT_liJDJ2dw,1477
79
- openai_agents-0.0.14.dist-info/METADATA,sha256=OPr5u89L0F1s4_2FnlB2yBGLKZzgJ9oZ15TXItVedp4,8163
80
- openai_agents-0.0.14.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
81
- openai_agents-0.0.14.dist-info/licenses/LICENSE,sha256=E994EspT7Krhy0qGiES7WYNzBHrh1YDk3r--8d1baRU,1063
82
- openai_agents-0.0.14.dist-info/RECORD,,
79
+ openai_agents-0.0.15.dist-info/METADATA,sha256=aWuj1znysjWdTLEA_Qtou1aOsPBNRRVFXiPcIoJ4o9c,8163
80
+ openai_agents-0.0.15.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
81
+ openai_agents-0.0.15.dist-info/licenses/LICENSE,sha256=E994EspT7Krhy0qGiES7WYNzBHrh1YDk3r--8d1baRU,1063
82
+ openai_agents-0.0.15.dist-info/RECORD,,