massgen 0.0.3__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of massgen might be problematic. Click here for more details.
- massgen/__init__.py +142 -8
- massgen/adapters/__init__.py +29 -0
- massgen/adapters/ag2_adapter.py +483 -0
- massgen/adapters/base.py +183 -0
- massgen/adapters/tests/__init__.py +0 -0
- massgen/adapters/tests/test_ag2_adapter.py +439 -0
- massgen/adapters/tests/test_agent_adapter.py +128 -0
- massgen/adapters/utils/__init__.py +2 -0
- massgen/adapters/utils/ag2_utils.py +236 -0
- massgen/adapters/utils/tests/__init__.py +0 -0
- massgen/adapters/utils/tests/test_ag2_utils.py +138 -0
- massgen/agent_config.py +329 -55
- massgen/api_params_handler/__init__.py +10 -0
- massgen/api_params_handler/_api_params_handler_base.py +99 -0
- massgen/api_params_handler/_chat_completions_api_params_handler.py +176 -0
- massgen/api_params_handler/_claude_api_params_handler.py +113 -0
- massgen/api_params_handler/_response_api_params_handler.py +130 -0
- massgen/backend/__init__.py +39 -4
- massgen/backend/azure_openai.py +385 -0
- massgen/backend/base.py +341 -69
- massgen/backend/base_with_mcp.py +1102 -0
- massgen/backend/capabilities.py +386 -0
- massgen/backend/chat_completions.py +577 -130
- massgen/backend/claude.py +1033 -537
- massgen/backend/claude_code.py +1203 -0
- massgen/backend/cli_base.py +209 -0
- massgen/backend/docs/BACKEND_ARCHITECTURE.md +126 -0
- massgen/backend/{CLAUDE_API_RESEARCH.md → docs/CLAUDE_API_RESEARCH.md} +18 -18
- massgen/backend/{GEMINI_API_DOCUMENTATION.md → docs/GEMINI_API_DOCUMENTATION.md} +9 -9
- massgen/backend/docs/Gemini MCP Integration Analysis.md +1050 -0
- massgen/backend/docs/MCP_IMPLEMENTATION_CLAUDE_BACKEND.md +177 -0
- massgen/backend/docs/MCP_INTEGRATION_RESPONSE_BACKEND.md +352 -0
- massgen/backend/docs/OPENAI_GPT5_MODELS.md +211 -0
- massgen/backend/{OPENAI_RESPONSES_API_FORMAT.md → docs/OPENAI_RESPONSE_API_TOOL_CALLS.md} +3 -3
- massgen/backend/docs/OPENAI_response_streaming.md +20654 -0
- massgen/backend/docs/inference_backend.md +257 -0
- massgen/backend/docs/permissions_and_context_files.md +1085 -0
- massgen/backend/external.py +126 -0
- massgen/backend/gemini.py +1850 -241
- massgen/backend/grok.py +40 -156
- massgen/backend/inference.py +156 -0
- massgen/backend/lmstudio.py +171 -0
- massgen/backend/response.py +1095 -322
- massgen/chat_agent.py +131 -113
- massgen/cli.py +1560 -275
- massgen/config_builder.py +2396 -0
- massgen/configs/BACKEND_CONFIGURATION.md +458 -0
- massgen/configs/README.md +559 -216
- massgen/configs/ag2/ag2_case_study.yaml +27 -0
- massgen/configs/ag2/ag2_coder.yaml +34 -0
- massgen/configs/ag2/ag2_coder_case_study.yaml +36 -0
- massgen/configs/ag2/ag2_gemini.yaml +27 -0
- massgen/configs/ag2/ag2_groupchat.yaml +108 -0
- massgen/configs/ag2/ag2_groupchat_gpt.yaml +118 -0
- massgen/configs/ag2/ag2_single_agent.yaml +21 -0
- massgen/configs/basic/multi/fast_timeout_example.yaml +37 -0
- massgen/configs/basic/multi/gemini_4o_claude.yaml +31 -0
- massgen/configs/basic/multi/gemini_gpt5nano_claude.yaml +36 -0
- massgen/configs/{gemini_4o_claude.yaml → basic/multi/geminicode_4o_claude.yaml} +3 -3
- massgen/configs/basic/multi/geminicode_gpt5nano_claude.yaml +36 -0
- massgen/configs/basic/multi/glm_gemini_claude.yaml +25 -0
- massgen/configs/basic/multi/gpt4o_audio_generation.yaml +30 -0
- massgen/configs/basic/multi/gpt4o_image_generation.yaml +31 -0
- massgen/configs/basic/multi/gpt5nano_glm_qwen.yaml +26 -0
- massgen/configs/basic/multi/gpt5nano_image_understanding.yaml +26 -0
- massgen/configs/{three_agents_default.yaml → basic/multi/three_agents_default.yaml} +8 -4
- massgen/configs/basic/multi/three_agents_opensource.yaml +27 -0
- massgen/configs/basic/multi/three_agents_vllm.yaml +20 -0
- massgen/configs/basic/multi/two_agents_gemini.yaml +19 -0
- massgen/configs/{two_agents.yaml → basic/multi/two_agents_gpt5.yaml} +14 -6
- massgen/configs/basic/multi/two_agents_opensource_lmstudio.yaml +31 -0
- massgen/configs/basic/multi/two_qwen_vllm_sglang.yaml +28 -0
- massgen/configs/{single_agent.yaml → basic/single/single_agent.yaml} +1 -1
- massgen/configs/{single_flash2.5.yaml → basic/single/single_flash2.5.yaml} +1 -2
- massgen/configs/basic/single/single_gemini2.5pro.yaml +16 -0
- massgen/configs/basic/single/single_gpt4o_audio_generation.yaml +22 -0
- massgen/configs/basic/single/single_gpt4o_image_generation.yaml +22 -0
- massgen/configs/basic/single/single_gpt4o_video_generation.yaml +24 -0
- massgen/configs/basic/single/single_gpt5nano.yaml +20 -0
- massgen/configs/basic/single/single_gpt5nano_file_search.yaml +18 -0
- massgen/configs/basic/single/single_gpt5nano_image_understanding.yaml +17 -0
- massgen/configs/basic/single/single_gptoss120b.yaml +15 -0
- massgen/configs/basic/single/single_openrouter_audio_understanding.yaml +15 -0
- massgen/configs/basic/single/single_qwen_video_understanding.yaml +15 -0
- massgen/configs/debug/code_execution/command_filtering_blacklist.yaml +29 -0
- massgen/configs/debug/code_execution/command_filtering_whitelist.yaml +28 -0
- massgen/configs/debug/code_execution/docker_verification.yaml +29 -0
- massgen/configs/debug/skip_coordination_test.yaml +27 -0
- massgen/configs/debug/test_sdk_migration.yaml +17 -0
- massgen/configs/docs/DISCORD_MCP_SETUP.md +208 -0
- massgen/configs/docs/TWITTER_MCP_ENESCINAR_SETUP.md +82 -0
- massgen/configs/providers/azure/azure_openai_multi.yaml +21 -0
- massgen/configs/providers/azure/azure_openai_single.yaml +19 -0
- massgen/configs/providers/claude/claude.yaml +14 -0
- massgen/configs/providers/gemini/gemini_gpt5nano.yaml +28 -0
- massgen/configs/providers/local/lmstudio.yaml +11 -0
- massgen/configs/providers/openai/gpt5.yaml +46 -0
- massgen/configs/providers/openai/gpt5_nano.yaml +46 -0
- massgen/configs/providers/others/grok_single_agent.yaml +19 -0
- massgen/configs/providers/others/zai_coding_team.yaml +108 -0
- massgen/configs/providers/others/zai_glm45.yaml +12 -0
- massgen/configs/{creative_team.yaml → teams/creative/creative_team.yaml} +16 -6
- massgen/configs/{travel_planning.yaml → teams/creative/travel_planning.yaml} +16 -6
- massgen/configs/{news_analysis.yaml → teams/research/news_analysis.yaml} +16 -6
- massgen/configs/{research_team.yaml → teams/research/research_team.yaml} +15 -7
- massgen/configs/{technical_analysis.yaml → teams/research/technical_analysis.yaml} +16 -6
- massgen/configs/tools/code-execution/basic_command_execution.yaml +25 -0
- massgen/configs/tools/code-execution/code_execution_use_case_simple.yaml +41 -0
- massgen/configs/tools/code-execution/docker_claude_code.yaml +32 -0
- massgen/configs/tools/code-execution/docker_multi_agent.yaml +32 -0
- massgen/configs/tools/code-execution/docker_simple.yaml +29 -0
- massgen/configs/tools/code-execution/docker_with_resource_limits.yaml +32 -0
- massgen/configs/tools/code-execution/multi_agent_playwright_automation.yaml +57 -0
- massgen/configs/tools/filesystem/cc_gpt5_gemini_filesystem.yaml +34 -0
- massgen/configs/tools/filesystem/claude_code_context_sharing.yaml +68 -0
- massgen/configs/tools/filesystem/claude_code_flash2.5.yaml +43 -0
- massgen/configs/tools/filesystem/claude_code_flash2.5_gptoss.yaml +49 -0
- massgen/configs/tools/filesystem/claude_code_gpt5nano.yaml +31 -0
- massgen/configs/tools/filesystem/claude_code_single.yaml +40 -0
- massgen/configs/tools/filesystem/fs_permissions_test.yaml +87 -0
- massgen/configs/tools/filesystem/gemini_gemini_workspace_cleanup.yaml +54 -0
- massgen/configs/tools/filesystem/gemini_gpt5_filesystem_casestudy.yaml +30 -0
- massgen/configs/tools/filesystem/gemini_gpt5nano_file_context_path.yaml +43 -0
- massgen/configs/tools/filesystem/gemini_gpt5nano_protected_paths.yaml +45 -0
- massgen/configs/tools/filesystem/gpt5mini_cc_fs_context_path.yaml +31 -0
- massgen/configs/tools/filesystem/grok4_gpt5_gemini_filesystem.yaml +32 -0
- massgen/configs/tools/filesystem/multiturn/grok4_gpt5_claude_code_filesystem_multiturn.yaml +58 -0
- massgen/configs/tools/filesystem/multiturn/grok4_gpt5_gemini_filesystem_multiturn.yaml +58 -0
- massgen/configs/tools/filesystem/multiturn/two_claude_code_filesystem_multiturn.yaml +47 -0
- massgen/configs/tools/filesystem/multiturn/two_gemini_flash_filesystem_multiturn.yaml +48 -0
- massgen/configs/tools/mcp/claude_code_discord_mcp_example.yaml +27 -0
- massgen/configs/tools/mcp/claude_code_simple_mcp.yaml +35 -0
- massgen/configs/tools/mcp/claude_code_twitter_mcp_example.yaml +32 -0
- massgen/configs/tools/mcp/claude_mcp_example.yaml +24 -0
- massgen/configs/tools/mcp/claude_mcp_test.yaml +27 -0
- massgen/configs/tools/mcp/five_agents_travel_mcp_test.yaml +157 -0
- massgen/configs/tools/mcp/five_agents_weather_mcp_test.yaml +103 -0
- massgen/configs/tools/mcp/gemini_mcp_example.yaml +24 -0
- massgen/configs/tools/mcp/gemini_mcp_filesystem_test.yaml +23 -0
- massgen/configs/tools/mcp/gemini_mcp_filesystem_test_sharing.yaml +23 -0
- massgen/configs/tools/mcp/gemini_mcp_filesystem_test_single_agent.yaml +17 -0
- massgen/configs/tools/mcp/gemini_mcp_filesystem_test_with_claude_code.yaml +24 -0
- massgen/configs/tools/mcp/gemini_mcp_test.yaml +27 -0
- massgen/configs/tools/mcp/gemini_notion_mcp.yaml +52 -0
- massgen/configs/tools/mcp/gpt5_nano_mcp_example.yaml +24 -0
- massgen/configs/tools/mcp/gpt5_nano_mcp_test.yaml +27 -0
- massgen/configs/tools/mcp/gpt5mini_claude_code_discord_mcp_example.yaml +38 -0
- massgen/configs/tools/mcp/gpt_oss_mcp_example.yaml +25 -0
- massgen/configs/tools/mcp/gpt_oss_mcp_test.yaml +28 -0
- massgen/configs/tools/mcp/grok3_mini_mcp_example.yaml +24 -0
- massgen/configs/tools/mcp/grok3_mini_mcp_test.yaml +27 -0
- massgen/configs/tools/mcp/multimcp_gemini.yaml +111 -0
- massgen/configs/tools/mcp/qwen_api_mcp_example.yaml +25 -0
- massgen/configs/tools/mcp/qwen_api_mcp_test.yaml +28 -0
- massgen/configs/tools/mcp/qwen_local_mcp_example.yaml +24 -0
- massgen/configs/tools/mcp/qwen_local_mcp_test.yaml +27 -0
- massgen/configs/tools/planning/five_agents_discord_mcp_planning_mode.yaml +140 -0
- massgen/configs/tools/planning/five_agents_filesystem_mcp_planning_mode.yaml +151 -0
- massgen/configs/tools/planning/five_agents_notion_mcp_planning_mode.yaml +151 -0
- massgen/configs/tools/planning/five_agents_twitter_mcp_planning_mode.yaml +155 -0
- massgen/configs/tools/planning/gpt5_mini_case_study_mcp_planning_mode.yaml +73 -0
- massgen/configs/tools/web-search/claude_streamable_http_test.yaml +43 -0
- massgen/configs/tools/web-search/gemini_streamable_http_test.yaml +43 -0
- massgen/configs/tools/web-search/gpt5_mini_streamable_http_test.yaml +43 -0
- massgen/configs/tools/web-search/gpt_oss_streamable_http_test.yaml +44 -0
- massgen/configs/tools/web-search/grok3_mini_streamable_http_test.yaml +43 -0
- massgen/configs/tools/web-search/qwen_api_streamable_http_test.yaml +44 -0
- massgen/configs/tools/web-search/qwen_local_streamable_http_test.yaml +43 -0
- massgen/coordination_tracker.py +708 -0
- massgen/docker/README.md +462 -0
- massgen/filesystem_manager/__init__.py +21 -0
- massgen/filesystem_manager/_base.py +9 -0
- massgen/filesystem_manager/_code_execution_server.py +545 -0
- massgen/filesystem_manager/_docker_manager.py +477 -0
- massgen/filesystem_manager/_file_operation_tracker.py +248 -0
- massgen/filesystem_manager/_filesystem_manager.py +813 -0
- massgen/filesystem_manager/_path_permission_manager.py +1261 -0
- massgen/filesystem_manager/_workspace_tools_server.py +1815 -0
- massgen/formatter/__init__.py +10 -0
- massgen/formatter/_chat_completions_formatter.py +284 -0
- massgen/formatter/_claude_formatter.py +235 -0
- massgen/formatter/_formatter_base.py +156 -0
- massgen/formatter/_response_formatter.py +263 -0
- massgen/frontend/__init__.py +1 -2
- massgen/frontend/coordination_ui.py +471 -286
- massgen/frontend/displays/base_display.py +56 -11
- massgen/frontend/displays/create_coordination_table.py +1956 -0
- massgen/frontend/displays/rich_terminal_display.py +1259 -619
- massgen/frontend/displays/simple_display.py +9 -4
- massgen/frontend/displays/terminal_display.py +27 -68
- massgen/logger_config.py +681 -0
- massgen/mcp_tools/README.md +232 -0
- massgen/mcp_tools/__init__.py +105 -0
- massgen/mcp_tools/backend_utils.py +1035 -0
- massgen/mcp_tools/circuit_breaker.py +195 -0
- massgen/mcp_tools/client.py +894 -0
- massgen/mcp_tools/config_validator.py +138 -0
- massgen/mcp_tools/docs/circuit_breaker.md +646 -0
- massgen/mcp_tools/docs/client.md +950 -0
- massgen/mcp_tools/docs/config_validator.md +478 -0
- massgen/mcp_tools/docs/exceptions.md +1165 -0
- massgen/mcp_tools/docs/security.md +854 -0
- massgen/mcp_tools/exceptions.py +338 -0
- massgen/mcp_tools/hooks.py +212 -0
- massgen/mcp_tools/security.py +780 -0
- massgen/message_templates.py +342 -64
- massgen/orchestrator.py +1515 -241
- massgen/stream_chunk/__init__.py +35 -0
- massgen/stream_chunk/base.py +92 -0
- massgen/stream_chunk/multimodal.py +237 -0
- massgen/stream_chunk/text.py +162 -0
- massgen/tests/mcp_test_server.py +150 -0
- massgen/tests/multi_turn_conversation_design.md +0 -8
- massgen/tests/test_azure_openai_backend.py +156 -0
- massgen/tests/test_backend_capabilities.py +262 -0
- massgen/tests/test_backend_event_loop_all.py +179 -0
- massgen/tests/test_chat_completions_refactor.py +142 -0
- massgen/tests/test_claude_backend.py +15 -28
- massgen/tests/test_claude_code.py +268 -0
- massgen/tests/test_claude_code_context_sharing.py +233 -0
- massgen/tests/test_claude_code_orchestrator.py +175 -0
- massgen/tests/test_cli_backends.py +180 -0
- massgen/tests/test_code_execution.py +679 -0
- massgen/tests/test_external_agent_backend.py +134 -0
- massgen/tests/test_final_presentation_fallback.py +237 -0
- massgen/tests/test_gemini_planning_mode.py +351 -0
- massgen/tests/test_grok_backend.py +7 -10
- massgen/tests/test_http_mcp_server.py +42 -0
- massgen/tests/test_integration_simple.py +198 -0
- massgen/tests/test_mcp_blocking.py +125 -0
- massgen/tests/test_message_context_building.py +29 -47
- massgen/tests/test_orchestrator_final_presentation.py +48 -0
- massgen/tests/test_path_permission_manager.py +2087 -0
- massgen/tests/test_rich_terminal_display.py +14 -13
- massgen/tests/test_timeout.py +133 -0
- massgen/tests/test_v3_3agents.py +11 -12
- massgen/tests/test_v3_simple.py +8 -13
- massgen/tests/test_v3_three_agents.py +11 -18
- massgen/tests/test_v3_two_agents.py +8 -13
- massgen/token_manager/__init__.py +7 -0
- massgen/token_manager/token_manager.py +400 -0
- massgen/utils.py +52 -16
- massgen/v1/agent.py +45 -91
- massgen/v1/agents.py +18 -53
- massgen/v1/backends/gemini.py +50 -153
- massgen/v1/backends/grok.py +21 -54
- massgen/v1/backends/oai.py +39 -111
- massgen/v1/cli.py +36 -93
- massgen/v1/config.py +8 -12
- massgen/v1/logging.py +43 -127
- massgen/v1/main.py +18 -32
- massgen/v1/orchestrator.py +68 -209
- massgen/v1/streaming_display.py +62 -163
- massgen/v1/tools.py +8 -12
- massgen/v1/types.py +9 -23
- massgen/v1/utils.py +5 -23
- massgen-0.1.0.dist-info/METADATA +1245 -0
- massgen-0.1.0.dist-info/RECORD +273 -0
- massgen-0.1.0.dist-info/entry_points.txt +2 -0
- massgen/frontend/logging/__init__.py +0 -9
- massgen/frontend/logging/realtime_logger.py +0 -197
- massgen-0.0.3.dist-info/METADATA +0 -568
- massgen-0.0.3.dist-info/RECORD +0 -76
- massgen-0.0.3.dist-info/entry_points.txt +0 -2
- /massgen/backend/{Function calling openai responses.md → docs/Function calling openai responses.md} +0 -0
- {massgen-0.0.3.dist-info → massgen-0.1.0.dist-info}/WHEEL +0 -0
- {massgen-0.0.3.dist-info → massgen-0.1.0.dist-info}/licenses/LICENSE +0 -0
- {massgen-0.0.3.dist-info → massgen-0.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1102 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
Base class with MCP (Model Context Protocol) support.
|
|
4
|
+
Provides common MCP functionality for backends that support MCP integration.
|
|
5
|
+
Inherits from LLMBackend and adds MCP-specific features.
|
|
6
|
+
"""
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import base64
|
|
11
|
+
import json
|
|
12
|
+
import mimetypes
|
|
13
|
+
from abc import abstractmethod
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple
|
|
16
|
+
|
|
17
|
+
import httpx
|
|
18
|
+
|
|
19
|
+
from ..logger_config import log_backend_activity, logger
|
|
20
|
+
from .base import LLMBackend, StreamChunk
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class UploadFileError(Exception):
|
|
24
|
+
"""Raised when an upload specified in configuration fails to process."""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class UnsupportedUploadSourceError(UploadFileError):
|
|
28
|
+
"""Raised when a provided upload source cannot be processed (e.g., URL without fetch support)."""
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# MCP integration imports
|
|
32
|
+
try:
|
|
33
|
+
from ..mcp_tools import (
|
|
34
|
+
Function,
|
|
35
|
+
MCPCircuitBreaker,
|
|
36
|
+
MCPCircuitBreakerManager,
|
|
37
|
+
MCPClient,
|
|
38
|
+
MCPConfigHelper,
|
|
39
|
+
MCPConnectionError,
|
|
40
|
+
MCPError,
|
|
41
|
+
MCPErrorHandler,
|
|
42
|
+
MCPExecutionManager,
|
|
43
|
+
MCPMessageManager,
|
|
44
|
+
MCPResourceManager,
|
|
45
|
+
MCPServerError,
|
|
46
|
+
MCPSetupManager,
|
|
47
|
+
MCPTimeoutError,
|
|
48
|
+
)
|
|
49
|
+
except ImportError as e:
|
|
50
|
+
logger.warning(f"MCP import failed: {e}")
|
|
51
|
+
# Create fallback assignments for all MCP imports
|
|
52
|
+
MCPClient = None
|
|
53
|
+
MCPCircuitBreaker = None
|
|
54
|
+
Function = None
|
|
55
|
+
MCPErrorHandler = None
|
|
56
|
+
MCPSetupManager = None
|
|
57
|
+
MCPResourceManager = None
|
|
58
|
+
MCPExecutionManager = None
|
|
59
|
+
MCPMessageManager = None
|
|
60
|
+
MCPConfigHelper = None
|
|
61
|
+
MCPCircuitBreakerManager = None
|
|
62
|
+
MCPError = ImportError
|
|
63
|
+
MCPConnectionError = ImportError
|
|
64
|
+
MCPTimeoutError = ImportError
|
|
65
|
+
MCPServerError = ImportError
|
|
66
|
+
|
|
67
|
+
# Supported file types for OpenAI File Search
|
|
68
|
+
# NOTE: These are the extensions supported by OpenAI's File Search API.
|
|
69
|
+
# Claude Files API has different restrictions (only .pdf and .txt) - see claude.py for Claude-specific validation.
|
|
70
|
+
FILE_SEARCH_SUPPORTED_EXTENSIONS = {
|
|
71
|
+
".c",
|
|
72
|
+
".cpp",
|
|
73
|
+
".cs",
|
|
74
|
+
".css",
|
|
75
|
+
".doc",
|
|
76
|
+
".docx",
|
|
77
|
+
".html",
|
|
78
|
+
".java",
|
|
79
|
+
".js",
|
|
80
|
+
".json",
|
|
81
|
+
".md",
|
|
82
|
+
".pdf",
|
|
83
|
+
".php",
|
|
84
|
+
".pptx",
|
|
85
|
+
".py",
|
|
86
|
+
".rb",
|
|
87
|
+
".sh",
|
|
88
|
+
".tex",
|
|
89
|
+
".ts",
|
|
90
|
+
".txt",
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
FILE_SEARCH_MAX_FILE_SIZE = 512 * 1024 * 1024 # 512 MB
|
|
94
|
+
# Max size for media uploads (audio/video). Configurable via `media_max_file_size_mb` in config/all_params.
|
|
95
|
+
MEDIA_MAX_FILE_SIZE_MB = 64
|
|
96
|
+
|
|
97
|
+
# Supported audio formats for OpenAI audio models (starting with wav and mp3)
|
|
98
|
+
SUPPORTED_AUDIO_FORMATS = {"mp3", "wav"}
|
|
99
|
+
|
|
100
|
+
# Supported audio MIME types (for validation consistency)
|
|
101
|
+
SUPPORTED_AUDIO_MIME_TYPES = {
|
|
102
|
+
"audio/wav",
|
|
103
|
+
"audio/wave",
|
|
104
|
+
"audio/x-wav",
|
|
105
|
+
"audio/mpeg",
|
|
106
|
+
"audio/mp3",
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class MCPBackend(LLMBackend):
|
|
111
|
+
"""Base backend class with MCP (Model Context Protocol) support."""
|
|
112
|
+
|
|
113
|
+
def __init__(self, api_key: Optional[str] = None, **kwargs):
|
|
114
|
+
"""Initialize backend with MCP support."""
|
|
115
|
+
super().__init__(api_key, **kwargs)
|
|
116
|
+
|
|
117
|
+
# MCP integration (filesystem MCP server may have been injected by base class)
|
|
118
|
+
self.mcp_servers = self.config.get("mcp_servers", [])
|
|
119
|
+
self.allowed_tools = kwargs.pop("allowed_tools", None)
|
|
120
|
+
self.exclude_tools = kwargs.pop("exclude_tools", None)
|
|
121
|
+
self._mcp_client: Optional[MCPClient] = None
|
|
122
|
+
self._mcp_initialized = False
|
|
123
|
+
|
|
124
|
+
# MCP tool execution monitoring
|
|
125
|
+
self._mcp_tool_calls_count = 0
|
|
126
|
+
self._mcp_tool_failures = 0
|
|
127
|
+
self._mcp_function_names: set[str] = set()
|
|
128
|
+
|
|
129
|
+
# Circuit breaker for MCP tools (stdio + streamable-http)
|
|
130
|
+
self._mcp_tools_circuit_breaker = None
|
|
131
|
+
self._circuit_breakers_enabled = MCPCircuitBreaker is not None
|
|
132
|
+
|
|
133
|
+
# Initialize circuit breaker if available and MCP servers are configured
|
|
134
|
+
if self._circuit_breakers_enabled and self.mcp_servers:
|
|
135
|
+
# Use shared utility to build circuit breaker configuration
|
|
136
|
+
mcp_tools_config = MCPConfigHelper.build_circuit_breaker_config("mcp_tools") if MCPConfigHelper else None
|
|
137
|
+
|
|
138
|
+
if mcp_tools_config:
|
|
139
|
+
self._mcp_tools_circuit_breaker = MCPCircuitBreaker(mcp_tools_config)
|
|
140
|
+
logger.info("Circuit breaker initialized for MCP tools")
|
|
141
|
+
else:
|
|
142
|
+
logger.warning("MCP tools circuit breaker config not available, disabling circuit breaker functionality")
|
|
143
|
+
self._circuit_breakers_enabled = False
|
|
144
|
+
else:
|
|
145
|
+
if not self.mcp_servers:
|
|
146
|
+
# No MCP servers configured - skip circuit breaker initialization silently
|
|
147
|
+
self._circuit_breakers_enabled = False
|
|
148
|
+
else:
|
|
149
|
+
logger.warning("Circuit breakers not available - proceeding without circuit breaker protection")
|
|
150
|
+
|
|
151
|
+
# Function registry for mcp_tools-based servers (stdio + streamable-http)
|
|
152
|
+
self._mcp_functions: Dict[str, Function] = {}
|
|
153
|
+
|
|
154
|
+
# Thread safety for counters
|
|
155
|
+
self._stats_lock = asyncio.Lock()
|
|
156
|
+
|
|
157
|
+
# Limit for message history growth within MCP execution loop
|
|
158
|
+
self._max_mcp_message_history = kwargs.pop("max_mcp_message_history", 200)
|
|
159
|
+
|
|
160
|
+
# Initialize backend name and agent ID for MCP operations
|
|
161
|
+
self.backend_name = self.get_provider_name()
|
|
162
|
+
self.agent_id = kwargs.get("agent_id", None)
|
|
163
|
+
|
|
164
|
+
def supports_upload_files(self) -> bool:
|
|
165
|
+
"""Return True if the backend supports `upload_files` preprocessing."""
|
|
166
|
+
return False
|
|
167
|
+
|
|
168
|
+
@abstractmethod
|
|
169
|
+
async def _process_stream(self, stream, all_params, agent_id: Optional[str] = None) -> AsyncGenerator[StreamChunk, None]:
|
|
170
|
+
"""Process stream."""
|
|
171
|
+
|
|
172
|
+
async def _setup_mcp_tools(self) -> None:
|
|
173
|
+
"""Initialize MCP client for mcp_tools-based servers (stdio + streamable-http)."""
|
|
174
|
+
if not self.mcp_servers or self._mcp_initialized:
|
|
175
|
+
return
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
# Normalize and separate MCP servers by transport type using mcp_tools utilities
|
|
179
|
+
normalized_servers = (
|
|
180
|
+
MCPSetupManager.normalize_mcp_servers(
|
|
181
|
+
self.mcp_servers,
|
|
182
|
+
backend_name=self.backend_name,
|
|
183
|
+
agent_id=self.agent_id,
|
|
184
|
+
)
|
|
185
|
+
if MCPSetupManager
|
|
186
|
+
else []
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
if not MCPSetupManager:
|
|
190
|
+
logger.warning("MCPSetupManager not available")
|
|
191
|
+
return
|
|
192
|
+
|
|
193
|
+
mcp_tools_servers = MCPSetupManager.separate_stdio_streamable_servers(
|
|
194
|
+
normalized_servers,
|
|
195
|
+
backend_name=self.backend_name,
|
|
196
|
+
agent_id=self.agent_id,
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
if not mcp_tools_servers:
|
|
200
|
+
logger.info("No stdio/streamable-http servers configured")
|
|
201
|
+
return
|
|
202
|
+
|
|
203
|
+
# Apply circuit breaker filtering before connection attempts
|
|
204
|
+
if self._circuit_breakers_enabled and self._mcp_tools_circuit_breaker and MCPCircuitBreakerManager:
|
|
205
|
+
filtered_servers = MCPCircuitBreakerManager.apply_circuit_breaker_filtering(
|
|
206
|
+
mcp_tools_servers,
|
|
207
|
+
self._mcp_tools_circuit_breaker,
|
|
208
|
+
backend_name=self.backend_name,
|
|
209
|
+
agent_id=self.agent_id,
|
|
210
|
+
)
|
|
211
|
+
if not filtered_servers:
|
|
212
|
+
logger.warning("All MCP servers blocked by circuit breaker during setup")
|
|
213
|
+
return
|
|
214
|
+
if len(filtered_servers) < len(mcp_tools_servers):
|
|
215
|
+
logger.info(f"Circuit breaker filtered {len(mcp_tools_servers) - len(filtered_servers)} servers during setup")
|
|
216
|
+
servers_to_use = filtered_servers
|
|
217
|
+
else:
|
|
218
|
+
servers_to_use = mcp_tools_servers
|
|
219
|
+
|
|
220
|
+
# Setup MCP client using consolidated utilities
|
|
221
|
+
if not MCPResourceManager:
|
|
222
|
+
logger.warning("MCPResourceManager not available")
|
|
223
|
+
return
|
|
224
|
+
|
|
225
|
+
self._mcp_client = await MCPResourceManager.setup_mcp_client(
|
|
226
|
+
servers=servers_to_use,
|
|
227
|
+
allowed_tools=self.allowed_tools,
|
|
228
|
+
exclude_tools=self.exclude_tools,
|
|
229
|
+
circuit_breaker=self._mcp_tools_circuit_breaker,
|
|
230
|
+
timeout_seconds=400, # Increased timeout for image generation tools
|
|
231
|
+
backend_name=self.backend_name,
|
|
232
|
+
agent_id=self.agent_id,
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# Guard after client setup
|
|
236
|
+
if not self._mcp_client:
|
|
237
|
+
self._mcp_initialized = False
|
|
238
|
+
logger.warning("MCP client setup failed, falling back to no-MCP streaming")
|
|
239
|
+
return
|
|
240
|
+
|
|
241
|
+
# Convert tools to functions using consolidated utility
|
|
242
|
+
self._mcp_functions.update(
|
|
243
|
+
MCPResourceManager.convert_tools_to_functions(
|
|
244
|
+
self._mcp_client,
|
|
245
|
+
backend_name=self.backend_name,
|
|
246
|
+
agent_id=self.agent_id,
|
|
247
|
+
hook_manager=getattr(self, "function_hook_manager", None),
|
|
248
|
+
),
|
|
249
|
+
)
|
|
250
|
+
self._mcp_initialized = True
|
|
251
|
+
logger.info(f"Successfully initialized MCP sessions with {len(self._mcp_functions)} tools converted to functions")
|
|
252
|
+
|
|
253
|
+
# Record success for circuit breaker
|
|
254
|
+
await self._record_mcp_circuit_breaker_success(servers_to_use)
|
|
255
|
+
|
|
256
|
+
except Exception as e:
|
|
257
|
+
# Record failure for circuit breaker
|
|
258
|
+
self._record_mcp_circuit_breaker_failure(e, self.agent_id)
|
|
259
|
+
logger.warning(f"Failed to setup MCP sessions: {e}")
|
|
260
|
+
self._mcp_client = None
|
|
261
|
+
self._mcp_initialized = False
|
|
262
|
+
self._mcp_functions = {}
|
|
263
|
+
|
|
264
|
+
async def _execute_mcp_function_with_retry(
|
|
265
|
+
self,
|
|
266
|
+
function_name: str,
|
|
267
|
+
arguments_json: str,
|
|
268
|
+
max_retries: int = 3,
|
|
269
|
+
) -> Tuple[str, Any]:
|
|
270
|
+
"""Execute MCP function with exponential backoff retry logic."""
|
|
271
|
+
# Check if planning mode is enabled - block MCP tool execution during planning
|
|
272
|
+
if self.is_planning_mode_enabled():
|
|
273
|
+
logger.info(f"[MCP] Planning mode enabled - blocking MCP tool execution: {function_name}")
|
|
274
|
+
error_str = "🚫 [MCP] Planning mode active - MCP tools blocked during coordination"
|
|
275
|
+
return error_str, {"error": error_str, "blocked_by": "planning_mode", "function_name": function_name}
|
|
276
|
+
|
|
277
|
+
# Convert JSON string to dict for shared utility
|
|
278
|
+
try:
|
|
279
|
+
args = json.loads(arguments_json) if isinstance(arguments_json, str) else arguments_json
|
|
280
|
+
except (json.JSONDecodeError, ValueError) as e:
|
|
281
|
+
error_str = f"Error: Invalid JSON arguments: {e}"
|
|
282
|
+
return error_str, {"error": error_str}
|
|
283
|
+
|
|
284
|
+
# Stats callback for tracking
|
|
285
|
+
async def stats_callback(action: str) -> int:
|
|
286
|
+
async with self._stats_lock:
|
|
287
|
+
if action == "increment_calls":
|
|
288
|
+
self._mcp_tool_calls_count += 1
|
|
289
|
+
return self._mcp_tool_calls_count
|
|
290
|
+
elif action == "increment_failures":
|
|
291
|
+
self._mcp_tool_failures += 1
|
|
292
|
+
return self._mcp_tool_failures
|
|
293
|
+
return 0
|
|
294
|
+
|
|
295
|
+
# Circuit breaker callback
|
|
296
|
+
async def circuit_breaker_callback(event: str, error_msg: str = "") -> None:
|
|
297
|
+
if not (self._circuit_breakers_enabled and MCPCircuitBreakerManager and self._mcp_tools_circuit_breaker):
|
|
298
|
+
return
|
|
299
|
+
|
|
300
|
+
# For individual function calls, we don't have server configurations readily available
|
|
301
|
+
# The circuit breaker manager should handle this gracefully with empty server list
|
|
302
|
+
if event == "failure":
|
|
303
|
+
await MCPCircuitBreakerManager.record_event(
|
|
304
|
+
[],
|
|
305
|
+
self._mcp_tools_circuit_breaker,
|
|
306
|
+
"failure",
|
|
307
|
+
error_msg,
|
|
308
|
+
backend_name=self.backend_name,
|
|
309
|
+
agent_id=self.agent_id,
|
|
310
|
+
)
|
|
311
|
+
else:
|
|
312
|
+
await MCPCircuitBreakerManager.record_event(
|
|
313
|
+
[],
|
|
314
|
+
self._mcp_tools_circuit_breaker,
|
|
315
|
+
"success",
|
|
316
|
+
backend_name=self.backend_name,
|
|
317
|
+
agent_id=self.agent_id,
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
if not MCPExecutionManager:
|
|
321
|
+
return "Error: MCPExecutionManager unavailable", {"error": "MCPExecutionManager unavailable"}
|
|
322
|
+
|
|
323
|
+
result = await MCPExecutionManager.execute_function_with_retry(
|
|
324
|
+
function_name=function_name,
|
|
325
|
+
args=args,
|
|
326
|
+
functions=self._mcp_functions,
|
|
327
|
+
max_retries=max_retries,
|
|
328
|
+
stats_callback=stats_callback,
|
|
329
|
+
circuit_breaker_callback=circuit_breaker_callback,
|
|
330
|
+
logger_instance=logger,
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Convert result to string for compatibility and return tuple
|
|
334
|
+
if isinstance(result, dict) and "error" in result:
|
|
335
|
+
return f"Error: {result['error']}", result
|
|
336
|
+
return str(result), result
|
|
337
|
+
|
|
338
|
+
async def _process_upload_files(
|
|
339
|
+
self,
|
|
340
|
+
messages: List[Dict[str, Any]],
|
|
341
|
+
all_params: Dict[str, Any],
|
|
342
|
+
) -> List[Dict[str, Any]]:
|
|
343
|
+
"""Process upload_files config entries and attach to messages.
|
|
344
|
+
|
|
345
|
+
Supports these forms:
|
|
346
|
+
|
|
347
|
+
- {"image_path": "..."}: image file path or HTTP/HTTPS URL
|
|
348
|
+
- Local paths: loads and base64-encodes the image file
|
|
349
|
+
- URLs: passed directly without encoding
|
|
350
|
+
Supported formats: PNG, JPEG, WEBP, GIF, BMP, TIFF, HEIC (provider-dependent)
|
|
351
|
+
|
|
352
|
+
- {"audio_path": "..."}: audio file path or HTTP/HTTPS URL
|
|
353
|
+
- Local paths: loads and base64-encodes the audio file
|
|
354
|
+
- URLs: fetched and base64-encoded (30s timeout, configurable size limit)
|
|
355
|
+
Supported formats: WAV, MP3 (strictly validated)
|
|
356
|
+
|
|
357
|
+
- {"video_path": "..."}: video file path or HTTP/HTTPS URL
|
|
358
|
+
- Local paths: loads and base64-encodes the video file
|
|
359
|
+
- URLs: passed directly without encoding, converted to video_url format
|
|
360
|
+
Supported formats: MP4, AVI, MOV, WEBM (provider-dependent)
|
|
361
|
+
|
|
362
|
+
- {"file_path": "..."}: document/code file for File Search (local path or URL)
|
|
363
|
+
- Local paths: validated against supported extensions and size limits
|
|
364
|
+
- URLs: queued for upload without local validation
|
|
365
|
+
Supported extensions: .c, .cpp, .cs, .css, .doc, .docx, .html, .java, .js,
|
|
366
|
+
.json, .md, .pdf, .php, .pptx, .py, .rb, .sh, .tex, .ts, .txt
|
|
367
|
+
|
|
368
|
+
Note: Format support varies by provider (OpenAI, Qwen, vLLM, etc.). The implementation
|
|
369
|
+
uses MIME type detection for automatic format handling.
|
|
370
|
+
|
|
371
|
+
Audio/Video/Image uploads are limited by `media_max_file_size_mb` (default 64MB).
|
|
372
|
+
File Search files are limited to 512MB. You can override limits via config or call parameters.
|
|
373
|
+
|
|
374
|
+
Returns updated messages list with additional content items.
|
|
375
|
+
"""
|
|
376
|
+
|
|
377
|
+
upload_entries = all_params.get("upload_files")
|
|
378
|
+
if not upload_entries:
|
|
379
|
+
return messages
|
|
380
|
+
|
|
381
|
+
if not self.supports_upload_files():
|
|
382
|
+
logger.debug(
|
|
383
|
+
"upload_files provided but backend %s does not support file uploads; ignoring",
|
|
384
|
+
self.get_provider_name(),
|
|
385
|
+
)
|
|
386
|
+
all_params.pop("upload_files", None)
|
|
387
|
+
return messages
|
|
388
|
+
|
|
389
|
+
processed_messages = list(messages)
|
|
390
|
+
extra_content: List[Dict[str, Any]] = []
|
|
391
|
+
has_file_search_files = False
|
|
392
|
+
|
|
393
|
+
for entry in upload_entries:
|
|
394
|
+
if not isinstance(entry, dict):
|
|
395
|
+
logger.warning("upload_files entry is not a dict: %s", entry)
|
|
396
|
+
raise UploadFileError("Each upload_files entry must be a mapping")
|
|
397
|
+
|
|
398
|
+
# Check for file_path (File Search documents/code)
|
|
399
|
+
file_path_value = entry.get("file_path")
|
|
400
|
+
if file_path_value:
|
|
401
|
+
# Process file_path entry for File Search
|
|
402
|
+
file_content = self._process_file_path_entry(file_path_value, all_params)
|
|
403
|
+
if file_content:
|
|
404
|
+
extra_content.append(file_content)
|
|
405
|
+
has_file_search_files = True
|
|
406
|
+
continue
|
|
407
|
+
|
|
408
|
+
# Check for image_path (supports both URLs and local paths)
|
|
409
|
+
# image_url deprecated; use image_path with http(s) URL instead
|
|
410
|
+
path_value = entry.get("image_path")
|
|
411
|
+
|
|
412
|
+
if path_value:
|
|
413
|
+
# Check if it's a URL (like file_path does)
|
|
414
|
+
if path_value.startswith(("http://", "https://")):
|
|
415
|
+
# Handle image URLs directly (no base64 encoding needed)
|
|
416
|
+
extra_content.append(
|
|
417
|
+
{
|
|
418
|
+
"type": "image",
|
|
419
|
+
"url": path_value,
|
|
420
|
+
},
|
|
421
|
+
)
|
|
422
|
+
else:
|
|
423
|
+
# Handle local file paths
|
|
424
|
+
resolved = self._resolve_local_path(path_value, all_params)
|
|
425
|
+
|
|
426
|
+
if not resolved.exists():
|
|
427
|
+
raise UploadFileError(f"File not found: {resolved}")
|
|
428
|
+
|
|
429
|
+
# Enforce configurable media size limit (in MB) for images (parity with audio/video)
|
|
430
|
+
limit_mb = all_params.get("media_max_file_size_mb") or self.config.get("media_max_file_size_mb") or MEDIA_MAX_FILE_SIZE_MB
|
|
431
|
+
self._validate_media_size(resolved, int(limit_mb))
|
|
432
|
+
|
|
433
|
+
encoded, mime_type = self._read_base64(resolved)
|
|
434
|
+
if not mime_type:
|
|
435
|
+
mime_type = "image/jpeg"
|
|
436
|
+
|
|
437
|
+
extra_content.append(
|
|
438
|
+
{
|
|
439
|
+
"type": "image",
|
|
440
|
+
"base64": encoded,
|
|
441
|
+
"mime_type": mime_type,
|
|
442
|
+
"source_path": str(resolved),
|
|
443
|
+
},
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
continue
|
|
447
|
+
|
|
448
|
+
audio_path_value = entry.get("audio_path")
|
|
449
|
+
|
|
450
|
+
if audio_path_value:
|
|
451
|
+
# Check if it's a URL (like file_path does)
|
|
452
|
+
if audio_path_value.startswith(("http://", "https://")):
|
|
453
|
+
# Fetch audio URL and convert to base64
|
|
454
|
+
encoded, mime_type = await self._fetch_audio_url_as_base64(
|
|
455
|
+
audio_path_value,
|
|
456
|
+
all_params,
|
|
457
|
+
)
|
|
458
|
+
extra_content.append(
|
|
459
|
+
{
|
|
460
|
+
"type": "audio",
|
|
461
|
+
"base64": encoded,
|
|
462
|
+
"mime_type": mime_type,
|
|
463
|
+
},
|
|
464
|
+
)
|
|
465
|
+
else:
|
|
466
|
+
# Handle local file paths
|
|
467
|
+
resolved = self._resolve_local_path(audio_path_value, all_params)
|
|
468
|
+
|
|
469
|
+
if not resolved.exists():
|
|
470
|
+
raise UploadFileError(f"Audio file not found: {resolved}")
|
|
471
|
+
|
|
472
|
+
# Enforce configurable media size limit (in MB)
|
|
473
|
+
limit_mb = all_params.get("media_max_file_size_mb") or self.config.get("media_max_file_size_mb") or MEDIA_MAX_FILE_SIZE_MB
|
|
474
|
+
|
|
475
|
+
self._validate_media_size(resolved, int(limit_mb))
|
|
476
|
+
|
|
477
|
+
encoded, mime_type = self._read_base64(resolved)
|
|
478
|
+
|
|
479
|
+
# Validate audio format (wav and mp3 only)
|
|
480
|
+
mime_lower = (mime_type or "").split(";")[0].strip().lower()
|
|
481
|
+
if mime_lower not in SUPPORTED_AUDIO_MIME_TYPES:
|
|
482
|
+
raise UploadFileError(
|
|
483
|
+
f"Unsupported audio format for {resolved}. " f"Supported formats: mp3, wav",
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
# Normalize MIME type
|
|
487
|
+
if mime_lower in {"audio/wav", "audio/wave", "audio/x-wav"}:
|
|
488
|
+
mime_type = "audio/wav"
|
|
489
|
+
else:
|
|
490
|
+
mime_type = "audio/mpeg"
|
|
491
|
+
|
|
492
|
+
extra_content.append(
|
|
493
|
+
{
|
|
494
|
+
"type": "audio",
|
|
495
|
+
"base64": encoded,
|
|
496
|
+
"mime_type": mime_type,
|
|
497
|
+
"source_path": str(resolved),
|
|
498
|
+
},
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
continue
|
|
502
|
+
|
|
503
|
+
# Check for video_path (supports both URLs and local paths)
|
|
504
|
+
video_path_value = entry.get("video_path")
|
|
505
|
+
|
|
506
|
+
if video_path_value:
|
|
507
|
+
# Check if it's a URL
|
|
508
|
+
if video_path_value.startswith(("http://", "https://")):
|
|
509
|
+
# Handle video URLs directly (no base64 encoding needed)
|
|
510
|
+
extra_content.append(
|
|
511
|
+
{
|
|
512
|
+
"type": "video_url",
|
|
513
|
+
"url": video_path_value,
|
|
514
|
+
},
|
|
515
|
+
)
|
|
516
|
+
else:
|
|
517
|
+
# Handle local file paths
|
|
518
|
+
resolved = self._resolve_local_path(video_path_value, all_params)
|
|
519
|
+
|
|
520
|
+
if not resolved.exists():
|
|
521
|
+
raise UploadFileError(f"Video file not found: {resolved}")
|
|
522
|
+
|
|
523
|
+
# Enforce configurable media size limit (in MB)
|
|
524
|
+
limit_mb = all_params.get("media_max_file_size_mb") or self.config.get("media_max_file_size_mb") or MEDIA_MAX_FILE_SIZE_MB
|
|
525
|
+
|
|
526
|
+
self._validate_media_size(resolved, int(limit_mb))
|
|
527
|
+
|
|
528
|
+
encoded, mime_type = self._read_base64(resolved)
|
|
529
|
+
if not mime_type:
|
|
530
|
+
mime_type = "video/mp4"
|
|
531
|
+
extra_content.append(
|
|
532
|
+
{
|
|
533
|
+
"type": "video",
|
|
534
|
+
"base64": encoded,
|
|
535
|
+
"mime_type": mime_type,
|
|
536
|
+
"source_path": str(resolved),
|
|
537
|
+
},
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
continue
|
|
541
|
+
|
|
542
|
+
raise UploadFileError(
|
|
543
|
+
"upload_files entry must specify either 'image_path', 'audio_path', 'video_path', or 'file_path'",
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
if not extra_content:
|
|
547
|
+
return processed_messages
|
|
548
|
+
|
|
549
|
+
# Track if file search files are present for API params handler
|
|
550
|
+
if has_file_search_files:
|
|
551
|
+
all_params["_has_file_search_files"] = True
|
|
552
|
+
|
|
553
|
+
if processed_messages:
|
|
554
|
+
last_message = processed_messages[-1].copy()
|
|
555
|
+
last_content = last_message.get("content", [])
|
|
556
|
+
|
|
557
|
+
if isinstance(last_content, str):
|
|
558
|
+
last_content = [{"type": "text", "text": last_content}]
|
|
559
|
+
elif isinstance(last_content, dict) and "type" in last_content:
|
|
560
|
+
last_content = [dict(last_content)]
|
|
561
|
+
elif isinstance(last_content, list):
|
|
562
|
+
if all(isinstance(item, str) for item in last_content):
|
|
563
|
+
last_content = [{"type": "text", "text": item} for item in last_content]
|
|
564
|
+
elif all(isinstance(item, dict) and "type" in item and "text" in item for item in last_content):
|
|
565
|
+
last_content = list(last_content)
|
|
566
|
+
else:
|
|
567
|
+
last_content = []
|
|
568
|
+
else:
|
|
569
|
+
last_content = []
|
|
570
|
+
|
|
571
|
+
last_content.extend(extra_content)
|
|
572
|
+
last_message["content"] = last_content
|
|
573
|
+
processed_messages[-1] = last_message
|
|
574
|
+
else:
|
|
575
|
+
processed_messages.append(
|
|
576
|
+
{
|
|
577
|
+
"role": "user",
|
|
578
|
+
"content": extra_content,
|
|
579
|
+
},
|
|
580
|
+
)
|
|
581
|
+
|
|
582
|
+
# Prevent downstream handlers from seeing upload_files
|
|
583
|
+
all_params.pop("upload_files", None)
|
|
584
|
+
|
|
585
|
+
return processed_messages
|
|
586
|
+
|
|
587
|
+
def _process_file_path_entry(
|
|
588
|
+
self,
|
|
589
|
+
file_path_value: str,
|
|
590
|
+
all_params: Dict[str, Any],
|
|
591
|
+
) -> Optional[Dict[str, Any]]:
|
|
592
|
+
"""Process file path entry and validate against provider-specific restrictions.
|
|
593
|
+
|
|
594
|
+
Note: This base implementation validates against OpenAI File Search extensions.
|
|
595
|
+
Backends like Claude may have additional restrictions (e.g., only .pdf and .txt)
|
|
596
|
+
and should perform provider-specific validation in their upload methods.
|
|
597
|
+
"""
|
|
598
|
+
# Check if it's a URL
|
|
599
|
+
if file_path_value.startswith(("http://", "https://")):
|
|
600
|
+
logger.info(f"Queued file URL for File Search upload: {file_path_value}")
|
|
601
|
+
return {
|
|
602
|
+
"type": "file_pending_upload",
|
|
603
|
+
"url": file_path_value,
|
|
604
|
+
"source": "url",
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
# Local file path
|
|
608
|
+
resolved = Path(file_path_value).expanduser()
|
|
609
|
+
if not resolved.is_absolute():
|
|
610
|
+
cwd = all_params.get("cwd") or self.config.get("cwd")
|
|
611
|
+
if cwd:
|
|
612
|
+
resolved = Path(cwd).joinpath(resolved)
|
|
613
|
+
else:
|
|
614
|
+
resolved = resolved.resolve()
|
|
615
|
+
|
|
616
|
+
if not resolved.exists():
|
|
617
|
+
raise UploadFileError(f"File not found: {resolved}")
|
|
618
|
+
|
|
619
|
+
# Validate file extension (OpenAI File Search extensions)
|
|
620
|
+
# Note: Backends like Claude may override with stricter validation
|
|
621
|
+
file_ext = resolved.suffix.lower()
|
|
622
|
+
if file_ext not in FILE_SEARCH_SUPPORTED_EXTENSIONS:
|
|
623
|
+
raise UploadFileError(
|
|
624
|
+
f"File type {file_ext} not supported by File Search. " f"Supported types: {', '.join(sorted(FILE_SEARCH_SUPPORTED_EXTENSIONS))}",
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
# Validate file size
|
|
628
|
+
file_size = resolved.stat().st_size
|
|
629
|
+
if file_size > FILE_SEARCH_MAX_FILE_SIZE:
|
|
630
|
+
raise UploadFileError(
|
|
631
|
+
f"File size {file_size / (1024*1024):.2f} MB exceeds " f"File Search limit of {FILE_SEARCH_MAX_FILE_SIZE / (1024*1024):.0f} MB",
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
# Determine MIME type
|
|
635
|
+
mime_type, _ = mimetypes.guess_type(resolved.as_posix())
|
|
636
|
+
if not mime_type:
|
|
637
|
+
mime_type = "application/octet-stream"
|
|
638
|
+
|
|
639
|
+
logger.info(f"Queued local file for File Search upload: {resolved}")
|
|
640
|
+
return {
|
|
641
|
+
"type": "file_pending_upload",
|
|
642
|
+
"path": str(resolved),
|
|
643
|
+
"mime_type": mime_type,
|
|
644
|
+
"source": "local",
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
def _resolve_local_path(self, raw_path: str, all_params: Dict[str, Any]) -> Path:
|
|
648
|
+
"""Resolve a local path using cwd from all_params or config, mirroring file_path resolution."""
|
|
649
|
+
resolved = Path(raw_path).expanduser()
|
|
650
|
+
if not resolved.is_absolute():
|
|
651
|
+
cwd = all_params.get("cwd") or self.config.get("cwd")
|
|
652
|
+
if cwd:
|
|
653
|
+
resolved = Path(cwd).joinpath(resolved)
|
|
654
|
+
else:
|
|
655
|
+
resolved = resolved.resolve()
|
|
656
|
+
return resolved
|
|
657
|
+
|
|
658
|
+
def _validate_media_size(self, path: Path, limit_mb: int) -> None:
|
|
659
|
+
"""Validate media file size against MB limit; raise UploadFileError if exceeded."""
|
|
660
|
+
file_size = path.stat().st_size
|
|
661
|
+
if file_size > limit_mb * 1024 * 1024:
|
|
662
|
+
logger.warning(
|
|
663
|
+
f"Media file too large: {file_size / (1024 * 1024):.2f} MB at {path} (limit {limit_mb} MB)",
|
|
664
|
+
)
|
|
665
|
+
raise UploadFileError(
|
|
666
|
+
f"Media file size {file_size / (1024 * 1024):.2f} MB exceeds limit of {limit_mb:.0f} MB: {path}",
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
def _read_base64(self, path: Path) -> Tuple[str, str]:
|
|
670
|
+
"""Read file bytes and return (base64, guessed_mime_type)."""
|
|
671
|
+
mime_type, _ = mimetypes.guess_type(path.as_posix())
|
|
672
|
+
try:
|
|
673
|
+
data = path.read_bytes()
|
|
674
|
+
except OSError as exc:
|
|
675
|
+
raise UploadFileError(f"Failed to read file {path}: {exc}") from exc
|
|
676
|
+
encoded = base64.b64encode(data).decode("utf-8")
|
|
677
|
+
return encoded, (mime_type or "")
|
|
678
|
+
|
|
679
|
+
async def _fetch_audio_url_as_base64(
|
|
680
|
+
self,
|
|
681
|
+
url: str,
|
|
682
|
+
all_params: Dict[str, Any],
|
|
683
|
+
) -> Tuple[str, str]:
|
|
684
|
+
"""
|
|
685
|
+
Fetch audio from URL and return (base64_encoded_data, mime_type).
|
|
686
|
+
|
|
687
|
+
Currently supports: wav, mp3
|
|
688
|
+
|
|
689
|
+
Args:
|
|
690
|
+
url: HTTP/HTTPS URL to fetch audio from
|
|
691
|
+
all_params: Parameters dict containing optional media_max_file_size_mb
|
|
692
|
+
|
|
693
|
+
Returns:
|
|
694
|
+
Tuple of (base64_encoded_string, mime_type)
|
|
695
|
+
|
|
696
|
+
Raises:
|
|
697
|
+
UploadFileError: If fetch fails, format is unsupported, or size exceeds limit
|
|
698
|
+
"""
|
|
699
|
+
# Get size limit from config (default 64MB)
|
|
700
|
+
limit_mb = all_params.get("media_max_file_size_mb") or self.config.get("media_max_file_size_mb") or MEDIA_MAX_FILE_SIZE_MB
|
|
701
|
+
max_size_bytes = int(limit_mb) * 1024 * 1024
|
|
702
|
+
|
|
703
|
+
async with httpx.AsyncClient() as http_client:
|
|
704
|
+
try:
|
|
705
|
+
response = await http_client.get(url, timeout=30.0)
|
|
706
|
+
response.raise_for_status()
|
|
707
|
+
except httpx.TimeoutException as exc:
|
|
708
|
+
raise UploadFileError(
|
|
709
|
+
f"Timeout (30s) while fetching audio from {url}",
|
|
710
|
+
) from exc
|
|
711
|
+
except httpx.HTTPError as exc:
|
|
712
|
+
raise UploadFileError(
|
|
713
|
+
f"Failed to fetch audio from {url}: {exc}",
|
|
714
|
+
) from exc
|
|
715
|
+
|
|
716
|
+
# Validate Content-Type
|
|
717
|
+
content_type = response.headers.get("Content-Type", "")
|
|
718
|
+
mime_type = content_type.split(";")[0].strip().lower()
|
|
719
|
+
|
|
720
|
+
# Simple format validation (wav and mp3 only)
|
|
721
|
+
if mime_type not in SUPPORTED_AUDIO_MIME_TYPES:
|
|
722
|
+
# Try to guess from URL extension
|
|
723
|
+
guessed_mime, _ = mimetypes.guess_type(url)
|
|
724
|
+
if guessed_mime and guessed_mime.lower() in SUPPORTED_AUDIO_MIME_TYPES:
|
|
725
|
+
mime_type = guessed_mime.lower()
|
|
726
|
+
else:
|
|
727
|
+
raise UploadFileError(
|
|
728
|
+
f"Unsupported audio format for {url}. " f"Supported formats: {', '.join(sorted(SUPPORTED_AUDIO_FORMATS))}",
|
|
729
|
+
)
|
|
730
|
+
|
|
731
|
+
# Normalize MIME type
|
|
732
|
+
if mime_type in {"audio/wav", "audio/wave", "audio/x-wav"}:
|
|
733
|
+
mime_type = "audio/wav"
|
|
734
|
+
elif mime_type in {"audio/mpeg", "audio/mp3"}:
|
|
735
|
+
mime_type = "audio/mpeg"
|
|
736
|
+
|
|
737
|
+
# Get audio bytes
|
|
738
|
+
audio_bytes = response.content
|
|
739
|
+
|
|
740
|
+
# Validate size
|
|
741
|
+
if len(audio_bytes) > max_size_bytes:
|
|
742
|
+
raise UploadFileError(
|
|
743
|
+
f"Audio file size {len(audio_bytes) / (1024 * 1024):.2f} MB exceeds limit of {limit_mb} MB: {url}",
|
|
744
|
+
)
|
|
745
|
+
|
|
746
|
+
# Encode to base64
|
|
747
|
+
encoded = base64.b64encode(audio_bytes).decode("utf-8")
|
|
748
|
+
|
|
749
|
+
logger.info(
|
|
750
|
+
f"Fetched and encoded audio from URL: {url} " f"({len(audio_bytes) / (1024 * 1024):.2f} MB, {mime_type})",
|
|
751
|
+
)
|
|
752
|
+
|
|
753
|
+
return encoded, mime_type
|
|
754
|
+
|
|
755
|
+
async def stream_with_tools(
|
|
756
|
+
self,
|
|
757
|
+
messages: List[Dict[str, Any]],
|
|
758
|
+
tools: List[Dict[str, Any]],
|
|
759
|
+
**kwargs,
|
|
760
|
+
) -> AsyncGenerator[StreamChunk, None]:
|
|
761
|
+
"""Stream response using OpenAI Response API with unified MCP/non-MCP processing."""
|
|
762
|
+
|
|
763
|
+
agent_id = kwargs.get("agent_id", None)
|
|
764
|
+
|
|
765
|
+
log_backend_activity(
|
|
766
|
+
self.get_provider_name(),
|
|
767
|
+
"Starting stream_with_tools",
|
|
768
|
+
{"num_messages": len(messages), "num_tools": len(tools) if tools else 0},
|
|
769
|
+
agent_id=agent_id,
|
|
770
|
+
)
|
|
771
|
+
|
|
772
|
+
# Catch setup errors by wrapping the context manager itself
|
|
773
|
+
try:
|
|
774
|
+
# Use async context manager for proper MCP resource management
|
|
775
|
+
async with self:
|
|
776
|
+
client = self._create_client(**kwargs)
|
|
777
|
+
|
|
778
|
+
try:
|
|
779
|
+
# Determine if MCP processing is needed
|
|
780
|
+
use_mcp = bool(self._mcp_functions)
|
|
781
|
+
|
|
782
|
+
# Use parent class method to yield MCP status chunks
|
|
783
|
+
async for chunk in self.yield_mcp_status_chunks(use_mcp):
|
|
784
|
+
yield chunk
|
|
785
|
+
|
|
786
|
+
if use_mcp:
|
|
787
|
+
# MCP MODE: Recursive function call detection and execution
|
|
788
|
+
logger.info("Using recursive MCP execution mode")
|
|
789
|
+
|
|
790
|
+
current_messages = self._trim_message_history(messages.copy())
|
|
791
|
+
|
|
792
|
+
# Start recursive MCP streaming
|
|
793
|
+
async for chunk in self._stream_with_mcp_tools(current_messages, tools, client, **kwargs):
|
|
794
|
+
yield chunk
|
|
795
|
+
|
|
796
|
+
else:
|
|
797
|
+
# NON-MCP MODE: Simple passthrough streaming
|
|
798
|
+
logger.info("Using no-MCP mode")
|
|
799
|
+
|
|
800
|
+
# Start non-MCP streaming
|
|
801
|
+
async for chunk in self._stream_without_mcp_tools(messages, tools, client, **kwargs):
|
|
802
|
+
yield chunk
|
|
803
|
+
|
|
804
|
+
except Exception as e:
|
|
805
|
+
# Enhanced error handling for MCP-related errors during streaming
|
|
806
|
+
if isinstance(e, (MCPConnectionError, MCPTimeoutError, MCPServerError, MCPError)):
|
|
807
|
+
# Record failure for circuit breaker
|
|
808
|
+
await self._record_mcp_circuit_breaker_failure(e, agent_id)
|
|
809
|
+
|
|
810
|
+
# Handle MCP exceptions with fallback
|
|
811
|
+
async for chunk in self._stream_handle_mcp_exceptions(e, messages, tools, client, **kwargs):
|
|
812
|
+
yield chunk
|
|
813
|
+
else:
|
|
814
|
+
logger.error(f"Streaming error: {e}")
|
|
815
|
+
yield StreamChunk(type="error", error=str(e))
|
|
816
|
+
|
|
817
|
+
finally:
|
|
818
|
+
await self._cleanup_client(client)
|
|
819
|
+
except Exception as e:
|
|
820
|
+
# Handle exceptions that occur during MCP setup (__aenter__) or teardown
|
|
821
|
+
# Provide a clear user-facing message and fall back to non-MCP streaming
|
|
822
|
+
try:
|
|
823
|
+
client = self._create_client(**kwargs)
|
|
824
|
+
|
|
825
|
+
if isinstance(e, (MCPConnectionError, MCPTimeoutError, MCPServerError, MCPError)):
|
|
826
|
+
# Handle MCP exceptions with fallback
|
|
827
|
+
async for chunk in self._stream_handle_mcp_exceptions(e, messages, tools, client, **kwargs):
|
|
828
|
+
yield chunk
|
|
829
|
+
else:
|
|
830
|
+
# Generic setup error: still notify if MCP was configured
|
|
831
|
+
if self.mcp_servers:
|
|
832
|
+
yield StreamChunk(
|
|
833
|
+
type="mcp_status",
|
|
834
|
+
status="mcp_unavailable",
|
|
835
|
+
content=f"⚠️ [MCP] Setup failed; continuing without MCP ({e})",
|
|
836
|
+
source="mcp_setup",
|
|
837
|
+
)
|
|
838
|
+
|
|
839
|
+
# Proceed with non-MCP streaming
|
|
840
|
+
async for chunk in self._stream_without_mcp_tools(messages, tools, client, **kwargs):
|
|
841
|
+
yield chunk
|
|
842
|
+
except Exception as inner_e:
|
|
843
|
+
logger.error(f"Streaming error during MCP setup fallback: {inner_e}")
|
|
844
|
+
yield StreamChunk(type="error", error=str(inner_e))
|
|
845
|
+
finally:
|
|
846
|
+
await self._cleanup_client(client)
|
|
847
|
+
|
|
848
|
+
async def _stream_without_mcp_tools(
|
|
849
|
+
self,
|
|
850
|
+
messages: List[Dict[str, Any]],
|
|
851
|
+
tools: List[Dict[str, Any]],
|
|
852
|
+
client,
|
|
853
|
+
**kwargs,
|
|
854
|
+
) -> AsyncGenerator[StreamChunk, None]:
|
|
855
|
+
"""Simple passthrough streaming without MCP processing."""
|
|
856
|
+
agent_id = kwargs.get("agent_id", None)
|
|
857
|
+
all_params = {**self.config, **kwargs}
|
|
858
|
+
processed_messages = await self._process_upload_files(messages, all_params)
|
|
859
|
+
api_params = await self.api_params_handler.build_api_params(processed_messages, tools, all_params)
|
|
860
|
+
|
|
861
|
+
# Remove any MCP tools from the tools list
|
|
862
|
+
if "tools" in api_params:
|
|
863
|
+
non_mcp_tools = []
|
|
864
|
+
for tool in api_params.get("tools", []):
|
|
865
|
+
# Check different formats for MCP tools
|
|
866
|
+
if tool.get("type") == "function":
|
|
867
|
+
name = tool.get("function", {}).get("name") if "function" in tool else tool.get("name")
|
|
868
|
+
if name and name in self._mcp_function_names:
|
|
869
|
+
continue
|
|
870
|
+
elif tool.get("type") == "mcp":
|
|
871
|
+
continue
|
|
872
|
+
non_mcp_tools.append(tool)
|
|
873
|
+
api_params["tools"] = non_mcp_tools
|
|
874
|
+
|
|
875
|
+
if "openai" in self.get_provider_name().lower():
|
|
876
|
+
stream = await client.responses.create(**api_params)
|
|
877
|
+
elif "claude" in self.get_provider_name().lower():
|
|
878
|
+
if "betas" in api_params:
|
|
879
|
+
stream = await client.beta.messages.create(**api_params)
|
|
880
|
+
else:
|
|
881
|
+
stream = await client.messages.create(**api_params)
|
|
882
|
+
else:
|
|
883
|
+
stream = await client.chat.completions.create(**api_params)
|
|
884
|
+
|
|
885
|
+
async for chunk in self._process_stream(stream, all_params, agent_id):
|
|
886
|
+
yield chunk
|
|
887
|
+
|
|
888
|
+
async def _stream_handle_mcp_exceptions(
|
|
889
|
+
self,
|
|
890
|
+
error: Exception,
|
|
891
|
+
messages: List[Dict[str, Any]],
|
|
892
|
+
tools: List[Dict[str, Any]],
|
|
893
|
+
client,
|
|
894
|
+
**kwargs,
|
|
895
|
+
) -> AsyncGenerator[StreamChunk, None]:
|
|
896
|
+
"""Handle MCP exceptions with fallback streaming."""
|
|
897
|
+
|
|
898
|
+
"""Handle MCP errors with specific messaging and fallback to non-MCP tools."""
|
|
899
|
+
async with self._stats_lock:
|
|
900
|
+
self._mcp_tool_failures += 1
|
|
901
|
+
call_index_snapshot = self._mcp_tool_calls_count
|
|
902
|
+
|
|
903
|
+
if MCPErrorHandler:
|
|
904
|
+
log_type, user_message, _ = MCPErrorHandler.get_error_details(error)
|
|
905
|
+
else:
|
|
906
|
+
log_type, user_message = "mcp_error", "[MCP] Error occurred"
|
|
907
|
+
|
|
908
|
+
logger.warning(f"MCP tool call #{call_index_snapshot} failed - {log_type}: {error}")
|
|
909
|
+
|
|
910
|
+
# Yield detailed MCP error status as StreamChunk
|
|
911
|
+
yield StreamChunk(
|
|
912
|
+
type="mcp_status",
|
|
913
|
+
status="mcp_tools_failed",
|
|
914
|
+
content=f"MCP tool call failed (call #{call_index_snapshot}): {user_message}",
|
|
915
|
+
source="mcp_error",
|
|
916
|
+
)
|
|
917
|
+
|
|
918
|
+
# Yield user-friendly error message
|
|
919
|
+
yield StreamChunk(
|
|
920
|
+
type="content",
|
|
921
|
+
content=f"\n⚠️ {user_message} ({error}); continuing without MCP tools\n",
|
|
922
|
+
)
|
|
923
|
+
|
|
924
|
+
async for chunk in self._stream_without_mcp_tools(messages, tools, client, **kwargs):
|
|
925
|
+
yield chunk
|
|
926
|
+
|
|
927
|
+
def _track_mcp_function_names(self, tools: List[Dict[str, Any]]) -> None:
|
|
928
|
+
"""Track MCP function names for fallback filtering."""
|
|
929
|
+
for tool in tools:
|
|
930
|
+
if tool.get("type") == "function":
|
|
931
|
+
name = tool.get("function", {}).get("name") if "function" in tool else tool.get("name")
|
|
932
|
+
if name:
|
|
933
|
+
self._mcp_function_names.add(name)
|
|
934
|
+
|
|
935
|
+
async def _check_circuit_breaker_before_execution(self) -> bool:
|
|
936
|
+
"""Check circuit breaker status before executing MCP functions."""
|
|
937
|
+
if not (self._circuit_breakers_enabled and self._mcp_tools_circuit_breaker and MCPSetupManager and MCPCircuitBreakerManager):
|
|
938
|
+
return True
|
|
939
|
+
|
|
940
|
+
# Get current mcp_tools servers using utility functions
|
|
941
|
+
normalized_servers = MCPSetupManager.normalize_mcp_servers(self.mcp_servers)
|
|
942
|
+
mcp_tools_servers = MCPSetupManager.separate_stdio_streamable_servers(normalized_servers)
|
|
943
|
+
|
|
944
|
+
filtered_servers = MCPCircuitBreakerManager.apply_circuit_breaker_filtering(
|
|
945
|
+
mcp_tools_servers,
|
|
946
|
+
self._mcp_tools_circuit_breaker,
|
|
947
|
+
)
|
|
948
|
+
|
|
949
|
+
if not filtered_servers:
|
|
950
|
+
logger.warning("All MCP servers blocked by circuit breaker")
|
|
951
|
+
return False
|
|
952
|
+
|
|
953
|
+
return True
|
|
954
|
+
|
|
955
|
+
async def _record_mcp_circuit_breaker_failure(
|
|
956
|
+
self,
|
|
957
|
+
error: Exception,
|
|
958
|
+
agent_id: Optional[str] = None,
|
|
959
|
+
) -> None:
|
|
960
|
+
"""Record MCP failure for circuit breaker if enabled."""
|
|
961
|
+
if self._circuit_breakers_enabled and self._mcp_tools_circuit_breaker:
|
|
962
|
+
try:
|
|
963
|
+
# Get current mcp_tools servers for circuit breaker failure recording
|
|
964
|
+
normalized_servers = MCPSetupManager.normalize_mcp_servers(self.mcp_servers)
|
|
965
|
+
mcp_tools_servers = MCPSetupManager.separate_stdio_streamable_servers(normalized_servers)
|
|
966
|
+
|
|
967
|
+
await MCPCircuitBreakerManager.record_event(
|
|
968
|
+
mcp_tools_servers,
|
|
969
|
+
self._mcp_tools_circuit_breaker,
|
|
970
|
+
"failure",
|
|
971
|
+
error_message=str(error),
|
|
972
|
+
backend_name=self.backend_name,
|
|
973
|
+
agent_id=agent_id,
|
|
974
|
+
)
|
|
975
|
+
except Exception as cb_error:
|
|
976
|
+
logger.warning(f"Failed to record circuit breaker failure: {cb_error}")
|
|
977
|
+
|
|
978
|
+
async def _record_mcp_circuit_breaker_success(self, servers_to_use: List[Dict[str, Any]]) -> None:
|
|
979
|
+
"""Record MCP success for circuit breaker if enabled."""
|
|
980
|
+
if self._circuit_breakers_enabled and self._mcp_tools_circuit_breaker and self._mcp_client and MCPCircuitBreakerManager:
|
|
981
|
+
try:
|
|
982
|
+
connected_server_names = self._mcp_client.get_server_names() if hasattr(self._mcp_client, "get_server_names") else []
|
|
983
|
+
if connected_server_names:
|
|
984
|
+
connected_server_configs = [server for server in servers_to_use if server.get("name") in connected_server_names]
|
|
985
|
+
if connected_server_configs:
|
|
986
|
+
await MCPCircuitBreakerManager.record_event(
|
|
987
|
+
connected_server_configs,
|
|
988
|
+
self._mcp_tools_circuit_breaker,
|
|
989
|
+
"success",
|
|
990
|
+
backend_name=self.backend_name,
|
|
991
|
+
agent_id=self.agent_id,
|
|
992
|
+
)
|
|
993
|
+
except Exception as cb_error:
|
|
994
|
+
logger.warning(f"Failed to record circuit breaker success: {cb_error}")
|
|
995
|
+
|
|
996
|
+
def _trim_message_history(self, messages: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
997
|
+
"""Trim message history to prevent unbounded growth."""
|
|
998
|
+
if MCPMessageManager:
|
|
999
|
+
return MCPMessageManager.trim_message_history(messages, self._max_mcp_message_history)
|
|
1000
|
+
return messages
|
|
1001
|
+
|
|
1002
|
+
async def cleanup_mcp(self) -> None:
|
|
1003
|
+
"""Cleanup MCP connections."""
|
|
1004
|
+
if self._mcp_client and MCPResourceManager:
|
|
1005
|
+
await MCPResourceManager.cleanup_mcp_client(
|
|
1006
|
+
self._mcp_client,
|
|
1007
|
+
backend_name=self.backend_name,
|
|
1008
|
+
agent_id=self.agent_id,
|
|
1009
|
+
)
|
|
1010
|
+
self._mcp_client = None
|
|
1011
|
+
self._mcp_initialized = False
|
|
1012
|
+
self._mcp_functions.clear()
|
|
1013
|
+
self._mcp_function_names.clear()
|
|
1014
|
+
|
|
1015
|
+
async def __aenter__(self) -> "MCPBackend":
|
|
1016
|
+
"""Async context manager entry."""
|
|
1017
|
+
# Initialize MCP tools if configured
|
|
1018
|
+
if MCPResourceManager:
|
|
1019
|
+
await MCPResourceManager.setup_mcp_context_manager(
|
|
1020
|
+
self,
|
|
1021
|
+
backend_name=self.backend_name,
|
|
1022
|
+
agent_id=self.agent_id,
|
|
1023
|
+
)
|
|
1024
|
+
return self
|
|
1025
|
+
|
|
1026
|
+
async def __aexit__(
|
|
1027
|
+
self,
|
|
1028
|
+
exc_type: Optional[type],
|
|
1029
|
+
exc_val: Optional[BaseException],
|
|
1030
|
+
exc_tb: Optional[object],
|
|
1031
|
+
) -> None:
|
|
1032
|
+
"""Async context manager exit with automatic resource cleanup."""
|
|
1033
|
+
if MCPResourceManager:
|
|
1034
|
+
await MCPResourceManager.cleanup_mcp_context_manager(
|
|
1035
|
+
self,
|
|
1036
|
+
logger_instance=logger,
|
|
1037
|
+
backend_name=self.backend_name,
|
|
1038
|
+
agent_id=self.agent_id,
|
|
1039
|
+
)
|
|
1040
|
+
# Don't suppress the original exception if one occurred
|
|
1041
|
+
return False
|
|
1042
|
+
|
|
1043
|
+
def get_mcp_server_count(self) -> int:
|
|
1044
|
+
"""Get count of stdio/streamable-http servers."""
|
|
1045
|
+
if not (self.mcp_servers and MCPSetupManager):
|
|
1046
|
+
return 0
|
|
1047
|
+
|
|
1048
|
+
normalized_servers = MCPSetupManager.normalize_mcp_servers(self.mcp_servers)
|
|
1049
|
+
mcp_tools_servers = MCPSetupManager.separate_stdio_streamable_servers(normalized_servers)
|
|
1050
|
+
return len(mcp_tools_servers)
|
|
1051
|
+
|
|
1052
|
+
def yield_mcp_status_chunks(self, use_mcp: bool) -> AsyncGenerator[StreamChunk, None]:
|
|
1053
|
+
"""Yield MCP status chunks for connection and availability."""
|
|
1054
|
+
|
|
1055
|
+
async def _generator():
|
|
1056
|
+
# If MCP is configured but unavailable, inform the user and fall back
|
|
1057
|
+
if self.mcp_servers and not use_mcp:
|
|
1058
|
+
yield StreamChunk(
|
|
1059
|
+
type="mcp_status",
|
|
1060
|
+
status="mcp_unavailable",
|
|
1061
|
+
content="⚠️ [MCP] Setup failed or no tools available; continuing without MCP",
|
|
1062
|
+
source="mcp_setup",
|
|
1063
|
+
)
|
|
1064
|
+
|
|
1065
|
+
# Yield MCP connection status if MCP tools are available
|
|
1066
|
+
if use_mcp and self.mcp_servers:
|
|
1067
|
+
server_count = self.get_mcp_server_count()
|
|
1068
|
+
if server_count > 0:
|
|
1069
|
+
yield StreamChunk(
|
|
1070
|
+
type="mcp_status",
|
|
1071
|
+
status="mcp_connected",
|
|
1072
|
+
content=f"✅ [MCP] Connected to {server_count} servers",
|
|
1073
|
+
source="mcp_setup",
|
|
1074
|
+
)
|
|
1075
|
+
|
|
1076
|
+
if use_mcp:
|
|
1077
|
+
yield StreamChunk(
|
|
1078
|
+
type="mcp_status",
|
|
1079
|
+
status="mcp_tools_initiated",
|
|
1080
|
+
content=f"🔧 [MCP] {len(self._mcp_functions)} tools available",
|
|
1081
|
+
source="mcp_session",
|
|
1082
|
+
)
|
|
1083
|
+
|
|
1084
|
+
return _generator()
|
|
1085
|
+
|
|
1086
|
+
def is_mcp_tool_call(self, tool_name: str) -> bool:
|
|
1087
|
+
"""Check if a tool call is an MCP function."""
|
|
1088
|
+
return tool_name in self._mcp_functions
|
|
1089
|
+
|
|
1090
|
+
def get_mcp_tools_formatted(self) -> List[Dict[str, Any]]:
|
|
1091
|
+
"""Get MCP tools formatted for specific API format."""
|
|
1092
|
+
if not self._mcp_functions:
|
|
1093
|
+
return []
|
|
1094
|
+
|
|
1095
|
+
# Determine format based on backend type
|
|
1096
|
+
mcp_tools = []
|
|
1097
|
+
mcp_tools = self.formatter.format_mcp_tools(self._mcp_functions)
|
|
1098
|
+
|
|
1099
|
+
# Track function names for fallback filtering
|
|
1100
|
+
self._track_mcp_function_names(mcp_tools)
|
|
1101
|
+
|
|
1102
|
+
return mcp_tools
|