openai-agents 0.0.14__tar.gz → 0.0.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of openai-agents might be problematic. Click here for more details.
- {openai_agents-0.0.14 → openai_agents-0.0.15}/PKG-INFO +2 -2
- openai_agents-0.0.15/examples/mcp/streamablehttp_example/README.md +13 -0
- openai_agents-0.0.15/examples/mcp/streamablehttp_example/main.py +83 -0
- openai_agents-0.0.15/examples/mcp/streamablehttp_example/server.py +33 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/agents/search_agent.py +1 -1
- {openai_agents-0.0.14 → openai_agents-0.0.15}/pyproject.toml +2 -2
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/models/litellm_model.py +2 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/mcp/__init__.py +4 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/mcp/server.py +98 -8
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/chatcmpl_converter.py +1 -1
- openai_agents-0.0.15/tests/models/test_litellm_extra_body.py +45 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/uv.lock +1698 -1688
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/ISSUE_TEMPLATE/model_provider.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/ISSUE_TEMPLATE/question.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/workflows/docs.yml +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/workflows/issues.yml +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/workflows/publish.yml +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.github/workflows/tests.yml +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.gitignore +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.prettierrc +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/.vscode/settings.json +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/LICENSE +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/Makefile +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/agents.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/assets/images/favicon-platform.svg +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/assets/images/graph.png +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/assets/images/mcp-tracing.jpg +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/assets/images/orchestration.png +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/assets/logo.svg +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/config.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/context.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/examples.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/guardrails.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/handoffs.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/index.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/agents.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/config.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/context.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/examples.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/guardrails.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/handoffs.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/index.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/mcp.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/models/index.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/models/litellm.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/models.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/multi_agent.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/quickstart.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/results.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/running_agents.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/streaming.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/tools.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/tracing.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/visualization.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/voice/pipeline.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/voice/quickstart.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ja/voice/tracing.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/mcp.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/models/index.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/models/litellm.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/multi_agent.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/quickstart.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/agent.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/agent_output.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/exceptions.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/extensions/handoff_filters.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/extensions/handoff_prompt.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/extensions/litellm.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/function_schema.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/guardrail.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/handoffs.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/index.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/items.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/lifecycle.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/mcp/server.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/mcp/util.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/model_settings.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/models/interface.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/models/openai_chatcompletions.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/models/openai_responses.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/result.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/run.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/run_context.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/stream_events.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tool.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/create.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/index.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/processor_interface.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/processors.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/scope.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/setup.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/span_data.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/spans.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/traces.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/tracing/util.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/usage.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/events.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/exceptions.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/input.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/model.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/models/openai_provider.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/models/openai_stt.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/models/openai_tts.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/pipeline.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/pipeline_config.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/result.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/utils.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/ref/voice/workflow.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/results.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/running_agents.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/scripts/translate_docs.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/streaming.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/stylesheets/extra.css +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/tools.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/tracing.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/visualization.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/voice/pipeline.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/voice/quickstart.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/docs/voice/tracing.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/agents_as_tools.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/deterministic.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/forcing_tool_use.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/input_guardrails.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/llm_as_a_judge.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/output_guardrails.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/parallelization.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/routing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/agent_patterns/streaming_guardrails.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/agent_lifecycle_example.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/dynamic_system_prompt.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/hello_world.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/hello_world_jupyter.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/lifecycle_example.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/local_image.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/media/image_bison.jpg +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/non_strict_output_type.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/previous_response_id.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/remote_image.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/stream_items.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/stream_text.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/basic/tools.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/customer_service/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/financials_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/planner_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/risk_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/search_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/verifier_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/agents/writer_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/manager.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/financial_research_agent/printer.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/handoffs/message_filter.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/handoffs/message_filter_streaming.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/filesystem_example/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/filesystem_example/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/filesystem_example/sample_files/favorite_books.txt +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/filesystem_example/sample_files/favorite_cities.txt +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/filesystem_example/sample_files/favorite_songs.txt +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/git_example/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/git_example/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/sse_example/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/sse_example/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/mcp/sse_example/server.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/model_providers/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/model_providers/custom_example_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/model_providers/custom_example_global.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/model_providers/custom_example_provider.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/model_providers/litellm_auto.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/model_providers/litellm_provider.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/agents/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/agents/planner_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/agents/writer_agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/manager.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/printer.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/sample_outputs/product_recs.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/sample_outputs/product_recs.txt +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/sample_outputs/vacation.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/research_bot/sample_outputs/vacation.txt +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/tools/computer_use.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/tools/file_search.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/tools/web_search.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/static/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/static/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/static/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/static/util.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/streamed/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/streamed/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/streamed/main.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/examples/voice/streamed/my_workflow.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/mkdocs.yml +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/_config.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/_debug.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/_run_impl.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/agent.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/agent_output.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/computer.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/exceptions.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/handoff_filters.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/handoff_prompt.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/models/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/models/litellm_provider.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/extensions/visualization.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/function_schema.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/guardrail.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/handoffs.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/items.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/lifecycle.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/logger.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/mcp/util.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/model_settings.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/_openai_shared.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/chatcmpl_helpers.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/chatcmpl_stream_handler.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/fake_id.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/interface.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/multi_provider.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/openai_chatcompletions.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/openai_provider.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/models/openai_responses.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/py.typed +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/result.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/run.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/run_context.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/stream_events.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/strict_schema.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tool.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/create.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/logger.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/processor_interface.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/processors.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/scope.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/setup.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/span_data.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/spans.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/traces.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/tracing/util.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/usage.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/_coro.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/_error_tracing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/_json.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/_pretty_print.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/_transforms.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/util/_types.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/version.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/events.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/exceptions.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/imports.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/input.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/model.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/models/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/models/openai_model_provider.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/models/openai_stt.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/models/openai_tts.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/pipeline.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/pipeline_config.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/result.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/utils.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/src/agents/voice/workflow.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/README.md +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/conftest.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/fake_model.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/fastapi/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/fastapi/streaming_app.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/fastapi/test_streaming_context.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/conftest.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/helpers.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/test_caching.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/test_connect_disconnect.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/test_mcp_tracing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/test_mcp_util.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/test_runner_calls_mcp.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/mcp/test_server_errors.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/model_settings/test_serialization.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/models/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/models/conftest.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/models/test_litellm_chatcompletions_stream.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/models/test_map.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_agent_config.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_agent_hooks.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_agent_runner.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_agent_runner_streamed.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_agent_tracing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_cancel_streaming.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_computer_action.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_config.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_doc_parsing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_extension_filters.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_extra_headers.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_function_schema.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_function_tool.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_function_tool_decorator.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_global_hooks.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_guardrails.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_handoff_tool.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_items_helpers.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_max_turns.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_openai_chatcompletions.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_openai_chatcompletions_converter.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_openai_chatcompletions_stream.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_openai_responses_converter.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_output_tool.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_pretty_print.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_responses.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_responses_tracing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_result_cast.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_run_config.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_run_step_execution.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_run_step_processing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_strict_schema.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_tool_choice_reset.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_tool_converter.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_tool_use_behavior.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_trace_processor.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_tracing.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_tracing_errors.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_tracing_errors_streamed.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/test_visualization.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/testing_processor.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/tracing/test_processor_api_key.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/__init__.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/conftest.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/fake_models.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/helpers.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/test_input.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/test_openai_stt.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/test_openai_tts.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/test_pipeline.py +0 -0
- {openai_agents-0.0.14 → openai_agents-0.0.15}/tests/voice/test_workflow.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: openai-agents
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.15
|
|
4
4
|
Summary: OpenAI Agents SDK
|
|
5
5
|
Project-URL: Homepage, https://github.com/openai/openai-agents-python
|
|
6
6
|
Project-URL: Repository, https://github.com/openai/openai-agents-python
|
|
@@ -19,7 +19,7 @@ Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
|
19
19
|
Classifier: Typing :: Typed
|
|
20
20
|
Requires-Python: >=3.9
|
|
21
21
|
Requires-Dist: griffe<2,>=1.5.6
|
|
22
|
-
Requires-Dist: mcp<2,>=1.
|
|
22
|
+
Requires-Dist: mcp<2,>=1.8.0; python_version >= '3.10'
|
|
23
23
|
Requires-Dist: openai>=1.76.0
|
|
24
24
|
Requires-Dist: pydantic<3,>=2.10
|
|
25
25
|
Requires-Dist: requests<3,>=2.0
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# MCP Streamable HTTP Example
|
|
2
|
+
|
|
3
|
+
This example uses a local Streamable HTTP server in [server.py](server.py).
|
|
4
|
+
|
|
5
|
+
Run the example via:
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
uv run python examples/mcp/streamablehttp_example/main.py
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Details
|
|
12
|
+
|
|
13
|
+
The example uses the `MCPServerStreamableHttp` class from `agents.mcp`. The server runs in a sub-process at `https://localhost:8000/mcp`.
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import subprocess
|
|
5
|
+
import time
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from agents import Agent, Runner, gen_trace_id, trace
|
|
9
|
+
from agents.mcp import MCPServer, MCPServerStreamableHttp
|
|
10
|
+
from agents.model_settings import ModelSettings
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def run(mcp_server: MCPServer):
|
|
14
|
+
agent = Agent(
|
|
15
|
+
name="Assistant",
|
|
16
|
+
instructions="Use the tools to answer the questions.",
|
|
17
|
+
mcp_servers=[mcp_server],
|
|
18
|
+
model_settings=ModelSettings(tool_choice="required"),
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# Use the `add` tool to add two numbers
|
|
22
|
+
message = "Add these numbers: 7 and 22."
|
|
23
|
+
print(f"Running: {message}")
|
|
24
|
+
result = await Runner.run(starting_agent=agent, input=message)
|
|
25
|
+
print(result.final_output)
|
|
26
|
+
|
|
27
|
+
# Run the `get_weather` tool
|
|
28
|
+
message = "What's the weather in Tokyo?"
|
|
29
|
+
print(f"\n\nRunning: {message}")
|
|
30
|
+
result = await Runner.run(starting_agent=agent, input=message)
|
|
31
|
+
print(result.final_output)
|
|
32
|
+
|
|
33
|
+
# Run the `get_secret_word` tool
|
|
34
|
+
message = "What's the secret word?"
|
|
35
|
+
print(f"\n\nRunning: {message}")
|
|
36
|
+
result = await Runner.run(starting_agent=agent, input=message)
|
|
37
|
+
print(result.final_output)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
async def main():
|
|
41
|
+
async with MCPServerStreamableHttp(
|
|
42
|
+
name="Streamable HTTP Python Server",
|
|
43
|
+
params={
|
|
44
|
+
"url": "http://localhost:8000/mcp",
|
|
45
|
+
},
|
|
46
|
+
) as server:
|
|
47
|
+
trace_id = gen_trace_id()
|
|
48
|
+
with trace(workflow_name="Streamable HTTP Example", trace_id=trace_id):
|
|
49
|
+
print(f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}\n")
|
|
50
|
+
await run(server)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
if __name__ == "__main__":
|
|
54
|
+
# Let's make sure the user has uv installed
|
|
55
|
+
if not shutil.which("uv"):
|
|
56
|
+
raise RuntimeError(
|
|
57
|
+
"uv is not installed. Please install it: https://docs.astral.sh/uv/getting-started/installation/"
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# We'll run the Streamable HTTP server in a subprocess. Usually this would be a remote server, but for this
|
|
61
|
+
# demo, we'll run it locally at http://localhost:8000/mcp
|
|
62
|
+
process: subprocess.Popen[Any] | None = None
|
|
63
|
+
try:
|
|
64
|
+
this_dir = os.path.dirname(os.path.abspath(__file__))
|
|
65
|
+
server_file = os.path.join(this_dir, "server.py")
|
|
66
|
+
|
|
67
|
+
print("Starting Streamable HTTP server at http://localhost:8000/mcp ...")
|
|
68
|
+
|
|
69
|
+
# Run `uv run server.py` to start the Streamable HTTP server
|
|
70
|
+
process = subprocess.Popen(["uv", "run", server_file])
|
|
71
|
+
# Give it 3 seconds to start
|
|
72
|
+
time.sleep(3)
|
|
73
|
+
|
|
74
|
+
print("Streamable HTTP server started. Running example...\n\n")
|
|
75
|
+
except Exception as e:
|
|
76
|
+
print(f"Error starting Streamable HTTP server: {e}")
|
|
77
|
+
exit(1)
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
asyncio.run(main())
|
|
81
|
+
finally:
|
|
82
|
+
if process:
|
|
83
|
+
process.terminate()
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import random
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
from mcp.server.fastmcp import FastMCP
|
|
5
|
+
|
|
6
|
+
# Create server
|
|
7
|
+
mcp = FastMCP("Echo Server")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@mcp.tool()
|
|
11
|
+
def add(a: int, b: int) -> int:
|
|
12
|
+
"""Add two numbers"""
|
|
13
|
+
print(f"[debug-server] add({a}, {b})")
|
|
14
|
+
return a + b
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@mcp.tool()
|
|
18
|
+
def get_secret_word() -> str:
|
|
19
|
+
print("[debug-server] get_secret_word()")
|
|
20
|
+
return random.choice(["apple", "banana", "cherry"])
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@mcp.tool()
|
|
24
|
+
def get_current_weather(city: str) -> str:
|
|
25
|
+
print(f"[debug-server] get_current_weather({city})")
|
|
26
|
+
|
|
27
|
+
endpoint = "https://wttr.in"
|
|
28
|
+
response = requests.get(f"{endpoint}/{city}")
|
|
29
|
+
return response.text
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
if __name__ == "__main__":
|
|
33
|
+
mcp.run(transport="streamable-http")
|
|
@@ -3,7 +3,7 @@ from agents.model_settings import ModelSettings
|
|
|
3
3
|
|
|
4
4
|
INSTRUCTIONS = (
|
|
5
5
|
"You are a research assistant. Given a search term, you search the web for that term and "
|
|
6
|
-
"produce a concise summary of the results. The summary must 2-3 paragraphs and less than 300 "
|
|
6
|
+
"produce a concise summary of the results. The summary must be 2-3 paragraphs and less than 300 "
|
|
7
7
|
"words. Capture the main points. Write succinctly, no need to have complete sentences or good "
|
|
8
8
|
"grammar. This will be consumed by someone synthesizing a report, so its vital you capture the "
|
|
9
9
|
"essence and ignore any fluff. Do not include any additional commentary other than the summary "
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "openai-agents"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.15"
|
|
4
4
|
description = "OpenAI Agents SDK"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
requires-python = ">=3.9"
|
|
@@ -13,7 +13,7 @@ dependencies = [
|
|
|
13
13
|
"typing-extensions>=4.12.2, <5",
|
|
14
14
|
"requests>=2.0, <3",
|
|
15
15
|
"types-requests>=2.0, <3",
|
|
16
|
-
"mcp>=1.
|
|
16
|
+
"mcp>=1.8.0, <2; python_version >= '3.10'",
|
|
17
17
|
]
|
|
18
18
|
classifiers = [
|
|
19
19
|
"Typing :: Typed",
|
|
@@ -269,6 +269,8 @@ class LitellmModel(Model):
|
|
|
269
269
|
extra_kwargs["extra_query"] = model_settings.extra_query
|
|
270
270
|
if model_settings.metadata:
|
|
271
271
|
extra_kwargs["metadata"] = model_settings.metadata
|
|
272
|
+
if model_settings.extra_body and isinstance(model_settings.extra_body, dict):
|
|
273
|
+
extra_kwargs.update(model_settings.extra_body)
|
|
272
274
|
|
|
273
275
|
ret = await litellm.acompletion(
|
|
274
276
|
model=self.model,
|
|
@@ -5,6 +5,8 @@ try:
|
|
|
5
5
|
MCPServerSseParams,
|
|
6
6
|
MCPServerStdio,
|
|
7
7
|
MCPServerStdioParams,
|
|
8
|
+
MCPServerStreamableHttp,
|
|
9
|
+
MCPServerStreamableHttpParams,
|
|
8
10
|
)
|
|
9
11
|
except ImportError:
|
|
10
12
|
pass
|
|
@@ -17,5 +19,7 @@ __all__ = [
|
|
|
17
19
|
"MCPServerSseParams",
|
|
18
20
|
"MCPServerStdio",
|
|
19
21
|
"MCPServerStdioParams",
|
|
22
|
+
"MCPServerStreamableHttp",
|
|
23
|
+
"MCPServerStreamableHttpParams",
|
|
20
24
|
"MCPUtil",
|
|
21
25
|
]
|
|
@@ -10,7 +10,9 @@ from typing import Any, Literal
|
|
|
10
10
|
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
|
11
11
|
from mcp import ClientSession, StdioServerParameters, Tool as MCPTool, stdio_client
|
|
12
12
|
from mcp.client.sse import sse_client
|
|
13
|
-
from mcp.
|
|
13
|
+
from mcp.client.streamable_http import GetSessionIdCallback, streamablehttp_client
|
|
14
|
+
from mcp.shared.message import SessionMessage
|
|
15
|
+
from mcp.types import CallToolResult
|
|
14
16
|
from typing_extensions import NotRequired, TypedDict
|
|
15
17
|
|
|
16
18
|
from ..exceptions import UserError
|
|
@@ -83,8 +85,9 @@ class _MCPServerWithClientSession(MCPServer, abc.ABC):
|
|
|
83
85
|
self,
|
|
84
86
|
) -> AbstractAsyncContextManager[
|
|
85
87
|
tuple[
|
|
86
|
-
MemoryObjectReceiveStream[
|
|
87
|
-
MemoryObjectSendStream[
|
|
88
|
+
MemoryObjectReceiveStream[SessionMessage | Exception],
|
|
89
|
+
MemoryObjectSendStream[SessionMessage],
|
|
90
|
+
GetSessionIdCallback | None
|
|
88
91
|
]
|
|
89
92
|
]:
|
|
90
93
|
"""Create the streams for the server."""
|
|
@@ -105,7 +108,11 @@ class _MCPServerWithClientSession(MCPServer, abc.ABC):
|
|
|
105
108
|
"""Connect to the server."""
|
|
106
109
|
try:
|
|
107
110
|
transport = await self.exit_stack.enter_async_context(self.create_streams())
|
|
108
|
-
read, write
|
|
111
|
+
# streamablehttp_client returns (read, write, get_session_id)
|
|
112
|
+
# sse_client returns (read, write)
|
|
113
|
+
|
|
114
|
+
read, write, *_ = transport
|
|
115
|
+
|
|
109
116
|
session = await self.exit_stack.enter_async_context(
|
|
110
117
|
ClientSession(
|
|
111
118
|
read,
|
|
@@ -232,8 +239,9 @@ class MCPServerStdio(_MCPServerWithClientSession):
|
|
|
232
239
|
self,
|
|
233
240
|
) -> AbstractAsyncContextManager[
|
|
234
241
|
tuple[
|
|
235
|
-
MemoryObjectReceiveStream[
|
|
236
|
-
MemoryObjectSendStream[
|
|
242
|
+
MemoryObjectReceiveStream[SessionMessage | Exception],
|
|
243
|
+
MemoryObjectSendStream[SessionMessage],
|
|
244
|
+
GetSessionIdCallback | None
|
|
237
245
|
]
|
|
238
246
|
]:
|
|
239
247
|
"""Create the streams for the server."""
|
|
@@ -302,8 +310,9 @@ class MCPServerSse(_MCPServerWithClientSession):
|
|
|
302
310
|
self,
|
|
303
311
|
) -> AbstractAsyncContextManager[
|
|
304
312
|
tuple[
|
|
305
|
-
MemoryObjectReceiveStream[
|
|
306
|
-
MemoryObjectSendStream[
|
|
313
|
+
MemoryObjectReceiveStream[SessionMessage | Exception],
|
|
314
|
+
MemoryObjectSendStream[SessionMessage],
|
|
315
|
+
GetSessionIdCallback | None
|
|
307
316
|
]
|
|
308
317
|
]:
|
|
309
318
|
"""Create the streams for the server."""
|
|
@@ -318,3 +327,84 @@ class MCPServerSse(_MCPServerWithClientSession):
|
|
|
318
327
|
def name(self) -> str:
|
|
319
328
|
"""A readable name for the server."""
|
|
320
329
|
return self._name
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
class MCPServerStreamableHttpParams(TypedDict):
|
|
333
|
+
"""Mirrors the params in`mcp.client.streamable_http.streamablehttp_client`."""
|
|
334
|
+
|
|
335
|
+
url: str
|
|
336
|
+
"""The URL of the server."""
|
|
337
|
+
|
|
338
|
+
headers: NotRequired[dict[str, str]]
|
|
339
|
+
"""The headers to send to the server."""
|
|
340
|
+
|
|
341
|
+
timeout: NotRequired[timedelta]
|
|
342
|
+
"""The timeout for the HTTP request. Defaults to 5 seconds."""
|
|
343
|
+
|
|
344
|
+
sse_read_timeout: NotRequired[timedelta]
|
|
345
|
+
"""The timeout for the SSE connection, in seconds. Defaults to 5 minutes."""
|
|
346
|
+
|
|
347
|
+
terminate_on_close: NotRequired[bool]
|
|
348
|
+
"""Terminate on close"""
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
class MCPServerStreamableHttp(_MCPServerWithClientSession):
|
|
352
|
+
"""MCP server implementation that uses the Streamable HTTP transport. See the [spec]
|
|
353
|
+
(https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http)
|
|
354
|
+
for details.
|
|
355
|
+
"""
|
|
356
|
+
|
|
357
|
+
def __init__(
|
|
358
|
+
self,
|
|
359
|
+
params: MCPServerStreamableHttpParams,
|
|
360
|
+
cache_tools_list: bool = False,
|
|
361
|
+
name: str | None = None,
|
|
362
|
+
client_session_timeout_seconds: float | None = 5,
|
|
363
|
+
):
|
|
364
|
+
"""Create a new MCP server based on the Streamable HTTP transport.
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
params: The params that configure the server. This includes the URL of the server,
|
|
368
|
+
the headers to send to the server, the timeout for the HTTP request, and the
|
|
369
|
+
timeout for the Streamable HTTP connection and whether we need to
|
|
370
|
+
terminate on close.
|
|
371
|
+
|
|
372
|
+
cache_tools_list: Whether to cache the tools list. If `True`, the tools list will be
|
|
373
|
+
cached and only fetched from the server once. If `False`, the tools list will be
|
|
374
|
+
fetched from the server on each call to `list_tools()`. The cache can be
|
|
375
|
+
invalidated by calling `invalidate_tools_cache()`. You should set this to `True`
|
|
376
|
+
if you know the server will not change its tools list, because it can drastically
|
|
377
|
+
improve latency (by avoiding a round-trip to the server every time).
|
|
378
|
+
|
|
379
|
+
name: A readable name for the server. If not provided, we'll create one from the
|
|
380
|
+
URL.
|
|
381
|
+
|
|
382
|
+
client_session_timeout_seconds: the read timeout passed to the MCP ClientSession.
|
|
383
|
+
"""
|
|
384
|
+
super().__init__(cache_tools_list, client_session_timeout_seconds)
|
|
385
|
+
|
|
386
|
+
self.params = params
|
|
387
|
+
self._name = name or f"streamable_http: {self.params['url']}"
|
|
388
|
+
|
|
389
|
+
def create_streams(
|
|
390
|
+
self,
|
|
391
|
+
) -> AbstractAsyncContextManager[
|
|
392
|
+
tuple[
|
|
393
|
+
MemoryObjectReceiveStream[SessionMessage | Exception],
|
|
394
|
+
MemoryObjectSendStream[SessionMessage],
|
|
395
|
+
GetSessionIdCallback | None
|
|
396
|
+
]
|
|
397
|
+
]:
|
|
398
|
+
"""Create the streams for the server."""
|
|
399
|
+
return streamablehttp_client(
|
|
400
|
+
url=self.params["url"],
|
|
401
|
+
headers=self.params.get("headers", None),
|
|
402
|
+
timeout=self.params.get("timeout", timedelta(seconds=30)),
|
|
403
|
+
sse_read_timeout=self.params.get("sse_read_timeout", timedelta(seconds=60 * 5)),
|
|
404
|
+
terminate_on_close=self.params.get("terminate_on_close", True)
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
@property
|
|
408
|
+
def name(self) -> str:
|
|
409
|
+
"""A readable name for the server."""
|
|
410
|
+
return self._name
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import litellm
|
|
2
|
+
import pytest
|
|
3
|
+
from litellm.types.utils import Choices, Message, ModelResponse, Usage
|
|
4
|
+
|
|
5
|
+
from agents.extensions.models.litellm_model import LitellmModel
|
|
6
|
+
from agents.model_settings import ModelSettings
|
|
7
|
+
from agents.models.interface import ModelTracing
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@pytest.mark.allow_call_model_methods
|
|
11
|
+
@pytest.mark.asyncio
|
|
12
|
+
async def test_extra_body_is_forwarded(monkeypatch):
|
|
13
|
+
"""
|
|
14
|
+
Forward `extra_body` entries into litellm.acompletion kwargs.
|
|
15
|
+
|
|
16
|
+
This ensures that user-provided parameters (e.g. cached_content)
|
|
17
|
+
arrive alongside default arguments.
|
|
18
|
+
"""
|
|
19
|
+
captured: dict[str, object] = {}
|
|
20
|
+
|
|
21
|
+
async def fake_acompletion(model, messages=None, **kwargs):
|
|
22
|
+
captured.update(kwargs)
|
|
23
|
+
msg = Message(role="assistant", content="ok")
|
|
24
|
+
choice = Choices(index=0, message=msg)
|
|
25
|
+
return ModelResponse(choices=[choice], usage=Usage(0, 0, 0))
|
|
26
|
+
|
|
27
|
+
monkeypatch.setattr(litellm, "acompletion", fake_acompletion)
|
|
28
|
+
settings = ModelSettings(
|
|
29
|
+
temperature=0.1,
|
|
30
|
+
extra_body={"cached_content": "some_cache", "foo": 123}
|
|
31
|
+
)
|
|
32
|
+
model = LitellmModel(model="test-model")
|
|
33
|
+
|
|
34
|
+
await model.get_response(
|
|
35
|
+
system_instructions=None,
|
|
36
|
+
input=[],
|
|
37
|
+
model_settings=settings,
|
|
38
|
+
tools=[],
|
|
39
|
+
output_schema=None,
|
|
40
|
+
handoffs=[],
|
|
41
|
+
tracing=ModelTracing.DISABLED,
|
|
42
|
+
previous_response_id=None,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
assert {"cached_content": "some_cache", "foo": 123}.items() <= captured.items()
|