signalpilot-ai-internal 0.7.6__py3-none-any.whl → 0.10.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- signalpilot_ai_internal/__init__.py +1 -0
- signalpilot_ai_internal/_version.py +1 -1
- signalpilot_ai_internal/databricks_schema_service.py +902 -0
- signalpilot_ai_internal/file_scanner_service.py +2 -1
- signalpilot_ai_internal/handlers.py +72 -2
- signalpilot_ai_internal/mcp_handlers.py +508 -0
- signalpilot_ai_internal/mcp_server_manager.py +298 -0
- signalpilot_ai_internal/mcp_service.py +1303 -0
- signalpilot_ai_internal/schema_search_config.yml +8 -8
- signalpilot_ai_internal/schema_search_service.py +62 -1
- signalpilot_ai_internal/test_dbt_mcp_server.py +180 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +5 -3
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +4 -2
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +7 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/110.224e83db03814fd03955.js +7 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.c4232851631fb2e7e59a.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/726.318e4e791edb63cc788f.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.d9914229e4f120e7e9e4.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.d80de1e4da5b520d2f3b.js +1 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b63c429ca81e743b403c.js +1 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +38 -20
- {signalpilot_ai_internal-0.7.6.dist-info → signalpilot_ai_internal-0.10.22.dist-info}/METADATA +3 -2
- signalpilot_ai_internal-0.10.22.dist-info/RECORD +56 -0
- {signalpilot_ai_internal-0.7.6.dist-info → signalpilot_ai_internal-0.10.22.dist-info}/WHEEL +1 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/490.b4ccb9601c8112407c5d.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.ed04fa601a43e8dd24d1.js +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/898.4e9edb7f224152c1dcb4.js +0 -2
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/898.4e9edb7f224152c1dcb4.js.LICENSE.txt +0 -1
- signalpilot_ai_internal-0.7.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.ee8951353b00c13b8070.js +0 -1
- signalpilot_ai_internal-0.7.6.dist-info/RECORD +0 -49
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
- {signalpilot_ai_internal-0.7.6.data → signalpilot_ai_internal-0.10.22.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
- {signalpilot_ai_internal-0.7.6.dist-info → signalpilot_ai_internal-0.10.22.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1303 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MCP Connection Service - Manages connections to Model Context Protocol servers
|
|
3
|
+
Supports command-based (stdio), HTTP, and SSE connection types
|
|
4
|
+
|
|
5
|
+
IMPORTANT: On Windows, asyncio subprocess support requires ProactorEventLoop, but Jupyter/Tornado
|
|
6
|
+
uses SelectorEventLoop. Instead of using asyncio.create_subprocess_*, we use subprocess.Popen
|
|
7
|
+
with asyncio wrappers for cross-platform compatibility.
|
|
8
|
+
"""
|
|
9
|
+
import asyncio
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import uuid
|
|
13
|
+
import traceback
|
|
14
|
+
import sys
|
|
15
|
+
import platform
|
|
16
|
+
import subprocess
|
|
17
|
+
import threading
|
|
18
|
+
import os
|
|
19
|
+
from typing import Dict, List, Optional, Any
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
import aiohttp
|
|
22
|
+
from .cache_service import get_cache_service, RobustFileOperations
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
# Set logger to DEBUG level for comprehensive debugging
|
|
27
|
+
logger.setLevel(logging.DEBUG)
|
|
28
|
+
|
|
29
|
+
# Check current event loop (for debugging)
|
|
30
|
+
try:
|
|
31
|
+
loop = asyncio.get_running_loop()
|
|
32
|
+
loop_type = type(loop).__name__
|
|
33
|
+
logger.debug(f"[MCP] Current event loop type: {loop_type}")
|
|
34
|
+
if platform.system() == 'Windows' and 'ProactorEventLoop' not in loop_type:
|
|
35
|
+
logger.warning(f"[MCP] Windows using {loop_type} - will use subprocess.Popen instead of asyncio subprocesses")
|
|
36
|
+
except RuntimeError:
|
|
37
|
+
logger.debug(f"[MCP] No running event loop yet")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class MCPConnectionService:
|
|
42
|
+
"""Service for managing MCP server connections and tool calls"""
|
|
43
|
+
|
|
44
|
+
_instance = None
|
|
45
|
+
|
|
46
|
+
# Default whitelist of tools grouped by server type
|
|
47
|
+
DEFAULT_WHITELISTED_TOOLS_BY_SERVER = {
|
|
48
|
+
'Dbt': [
|
|
49
|
+
'query_metrics',
|
|
50
|
+
'get_metrics_compiled_sql',
|
|
51
|
+
'get_all_models',
|
|
52
|
+
'get_mart_models',
|
|
53
|
+
'get_model_details',
|
|
54
|
+
'get_model_parents',
|
|
55
|
+
'get_model_children',
|
|
56
|
+
'get_related_models',
|
|
57
|
+
'list_metrics',
|
|
58
|
+
'get_semantic_model_details',
|
|
59
|
+
],
|
|
60
|
+
'Google': [
|
|
61
|
+
'start_google_auth',
|
|
62
|
+
'search_docs',
|
|
63
|
+
'get_doc_content',
|
|
64
|
+
'list_docs_in_folder',
|
|
65
|
+
'inspect_doc_structure',
|
|
66
|
+
'read_document_comments',
|
|
67
|
+
'create_document_comment',
|
|
68
|
+
'reply_to_document_comment',
|
|
69
|
+
'resolve_document_comment',
|
|
70
|
+
'search_drive_files',
|
|
71
|
+
'list_drive_items',
|
|
72
|
+
'get_drive_file_content',
|
|
73
|
+
'get_drive_file_download_url',
|
|
74
|
+
'list_drive_items_in_folder',
|
|
75
|
+
],
|
|
76
|
+
'Slack': [
|
|
77
|
+
'conversations_search_messages',
|
|
78
|
+
'conversations_history',
|
|
79
|
+
'conversations_replies',
|
|
80
|
+
'channels_list',
|
|
81
|
+
],
|
|
82
|
+
'Notion': [
|
|
83
|
+
'API-post-search',
|
|
84
|
+
'API-get-block-children',
|
|
85
|
+
'API-retrieve-a-page',
|
|
86
|
+
'API-retrieve-a-database',
|
|
87
|
+
'API-post-database-query',
|
|
88
|
+
],
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
# Flattened list for backward compatibility and general checks
|
|
92
|
+
DEFAULT_WHITELISTED_TOOLS = [
|
|
93
|
+
tool for tools in DEFAULT_WHITELISTED_TOOLS_BY_SERVER.values() for tool in tools
|
|
94
|
+
]
|
|
95
|
+
|
|
96
|
+
def __init__(self):
|
|
97
|
+
self.connections: Dict[str, 'MCPConnection'] = {}
|
|
98
|
+
self.tools_cache: Dict[str, List[Dict]] = {}
|
|
99
|
+
self.cache = get_cache_service()
|
|
100
|
+
self.mcp_config_key = 'mcp_servers'
|
|
101
|
+
self._migrated_from_cache = False
|
|
102
|
+
self._ensure_migration()
|
|
103
|
+
|
|
104
|
+
@classmethod
|
|
105
|
+
def get_instance(cls):
|
|
106
|
+
"""Get singleton instance of MCP service"""
|
|
107
|
+
if cls._instance is None:
|
|
108
|
+
cls._instance = MCPConnectionService()
|
|
109
|
+
return cls._instance
|
|
110
|
+
|
|
111
|
+
def get_mcp_config_path(self) -> Path:
|
|
112
|
+
"""Get the path to the MCP configuration JSON file (Cursor format)"""
|
|
113
|
+
# Use the same cache directory as other SignalPilot files for consistency
|
|
114
|
+
from .cache_service import CacheDirectoryManager
|
|
115
|
+
cache_dir = CacheDirectoryManager.find_usable_cache_directory()
|
|
116
|
+
if cache_dir:
|
|
117
|
+
return cache_dir / 'mcp.json'
|
|
118
|
+
else:
|
|
119
|
+
# Fallback to old location if cache directory not available
|
|
120
|
+
if platform.system() == 'Windows':
|
|
121
|
+
config_dir = Path(os.environ.get('USERPROFILE', Path.home())) / '.signalpilot-ai-internal'
|
|
122
|
+
else:
|
|
123
|
+
config_dir = Path.home() / '.signalpilot-ai-internal'
|
|
124
|
+
config_dir.mkdir(parents=True, exist_ok=True)
|
|
125
|
+
return config_dir / 'mcp.json'
|
|
126
|
+
|
|
127
|
+
def _infer_server_type(self, config: Dict) -> str:
|
|
128
|
+
"""Infer server type from config structure (Cursor format)"""
|
|
129
|
+
if 'command' in config:
|
|
130
|
+
return 'command'
|
|
131
|
+
elif 'url' in config:
|
|
132
|
+
return 'http' # HTTP/SSE both use 'url'
|
|
133
|
+
else:
|
|
134
|
+
# Fallback: try to infer from old format
|
|
135
|
+
return config.get('type', 'command')
|
|
136
|
+
|
|
137
|
+
def _normalize_config_from_storage(self, server_id: str, config: Dict) -> Dict:
|
|
138
|
+
"""Convert Cursor schema format to internal format"""
|
|
139
|
+
# Cursor format: server config may have 'command'/'args' or 'name'/'url'
|
|
140
|
+
# Internal format: always has 'id', 'name', 'type', and type-specific fields
|
|
141
|
+
|
|
142
|
+
normalized = {
|
|
143
|
+
'id': server_id,
|
|
144
|
+
'type': self._infer_server_type(config)
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
# Copy all fields
|
|
148
|
+
normalized.update(config)
|
|
149
|
+
|
|
150
|
+
# Ensure name exists (use server_id as fallback for command-based)
|
|
151
|
+
if 'name' not in normalized:
|
|
152
|
+
if normalized['type'] == 'command':
|
|
153
|
+
normalized['name'] = server_id
|
|
154
|
+
else:
|
|
155
|
+
normalized['name'] = config.get('name', server_id)
|
|
156
|
+
|
|
157
|
+
# Handle enabled field (defaults to True)
|
|
158
|
+
if 'enabled' not in normalized:
|
|
159
|
+
normalized['enabled'] = True
|
|
160
|
+
|
|
161
|
+
return normalized
|
|
162
|
+
|
|
163
|
+
def _normalize_config_for_storage(self, config: Dict) -> Dict:
|
|
164
|
+
"""Convert internal format to Cursor schema format"""
|
|
165
|
+
# Remove internal-only fields
|
|
166
|
+
storage_config = {}
|
|
167
|
+
|
|
168
|
+
# Copy relevant fields based on type
|
|
169
|
+
server_type = config.get('type', 'command')
|
|
170
|
+
|
|
171
|
+
if server_type == 'command':
|
|
172
|
+
if 'command' in config:
|
|
173
|
+
storage_config['command'] = config['command']
|
|
174
|
+
if 'args' in config:
|
|
175
|
+
storage_config['args'] = config['args']
|
|
176
|
+
if 'env' in config:
|
|
177
|
+
storage_config['env'] = config['env']
|
|
178
|
+
else: # http/sse
|
|
179
|
+
if 'name' in config:
|
|
180
|
+
storage_config['name'] = config['name']
|
|
181
|
+
if 'url' in config:
|
|
182
|
+
storage_config['url'] = config['url']
|
|
183
|
+
if 'token' in config:
|
|
184
|
+
storage_config['token'] = config['token']
|
|
185
|
+
|
|
186
|
+
# Add enabled if not default (True)
|
|
187
|
+
enabled = config.get('enabled', True)
|
|
188
|
+
if not enabled:
|
|
189
|
+
storage_config['enabled'] = False
|
|
190
|
+
|
|
191
|
+
# Add enabledTools if present
|
|
192
|
+
if 'enabledTools' in config:
|
|
193
|
+
storage_config['enabledTools'] = config['enabledTools']
|
|
194
|
+
|
|
195
|
+
return storage_config
|
|
196
|
+
|
|
197
|
+
def _ensure_migration(self):
|
|
198
|
+
"""Migrate from cache-based storage to JSON file if needed"""
|
|
199
|
+
if self._migrated_from_cache:
|
|
200
|
+
return
|
|
201
|
+
|
|
202
|
+
config_path = self.get_mcp_config_path()
|
|
203
|
+
|
|
204
|
+
# If JSON file already exists, migration already done
|
|
205
|
+
if config_path.exists():
|
|
206
|
+
self._migrated_from_cache = True
|
|
207
|
+
return
|
|
208
|
+
|
|
209
|
+
# Try to migrate from cache
|
|
210
|
+
try:
|
|
211
|
+
cache_data = self.cache.get_app_value(self.mcp_config_key)
|
|
212
|
+
if cache_data:
|
|
213
|
+
old_configs = json.loads(cache_data)
|
|
214
|
+
if old_configs:
|
|
215
|
+
logger.info(f"[MCP] Migrating {len(old_configs)} servers from cache to JSON file")
|
|
216
|
+
|
|
217
|
+
# Convert to Cursor format
|
|
218
|
+
cursor_format = {'mcpServers': {}}
|
|
219
|
+
for server_id, config in old_configs.items():
|
|
220
|
+
# Normalize old config
|
|
221
|
+
if 'id' not in config:
|
|
222
|
+
config['id'] = server_id
|
|
223
|
+
if 'enabled' not in config:
|
|
224
|
+
config['enabled'] = True
|
|
225
|
+
|
|
226
|
+
# Convert to storage format
|
|
227
|
+
storage_config = self._normalize_config_for_storage(config)
|
|
228
|
+
cursor_format['mcpServers'][server_id] = storage_config
|
|
229
|
+
|
|
230
|
+
# Write to JSON file
|
|
231
|
+
if RobustFileOperations.safe_write_json(config_path, cursor_format):
|
|
232
|
+
logger.info(f"[MCP] Successfully migrated to {config_path}")
|
|
233
|
+
# Optionally delete old cache entry (commented for safety)
|
|
234
|
+
# self.cache.delete_app_value(self.mcp_config_key)
|
|
235
|
+
else:
|
|
236
|
+
logger.error(f"[MCP] Failed to write migrated config to {config_path}")
|
|
237
|
+
except Exception as e:
|
|
238
|
+
logger.warning(f"[MCP] Migration from cache failed: {e}, starting fresh")
|
|
239
|
+
|
|
240
|
+
self._migrated_from_cache = True
|
|
241
|
+
|
|
242
|
+
def save_server_config(self, server_config: Dict) -> Dict:
|
|
243
|
+
"""Save MCP server configuration to JSON file (Cursor format)"""
|
|
244
|
+
try:
|
|
245
|
+
# Ensure server has an ID
|
|
246
|
+
if 'id' not in server_config:
|
|
247
|
+
server_config['id'] = str(uuid.uuid4())
|
|
248
|
+
|
|
249
|
+
server_id = server_config['id']
|
|
250
|
+
|
|
251
|
+
# Load existing configs
|
|
252
|
+
config_path = self.get_mcp_config_path()
|
|
253
|
+
cursor_data = RobustFileOperations.safe_read_json(config_path, {})
|
|
254
|
+
mcp_servers = cursor_data.get('mcpServers', {})
|
|
255
|
+
|
|
256
|
+
# Convert to storage format and add/update
|
|
257
|
+
storage_config = self._normalize_config_for_storage(server_config)
|
|
258
|
+
mcp_servers[server_id] = storage_config
|
|
259
|
+
|
|
260
|
+
# Write back to file
|
|
261
|
+
cursor_data['mcpServers'] = mcp_servers
|
|
262
|
+
if not RobustFileOperations.safe_write_json(config_path, cursor_data):
|
|
263
|
+
raise RuntimeError(f"Failed to write MCP config to {config_path}")
|
|
264
|
+
|
|
265
|
+
logger.info(f"Saved MCP server config: {server_config.get('name', server_id)}")
|
|
266
|
+
return server_config
|
|
267
|
+
except Exception as e:
|
|
268
|
+
logger.error(f"Error saving MCP server config: {e}")
|
|
269
|
+
logger.error(f"Stack trace:\n{traceback.format_exc()}")
|
|
270
|
+
raise
|
|
271
|
+
|
|
272
|
+
def load_all_configs(self) -> Dict[str, Dict]:
|
|
273
|
+
"""Load all MCP server configurations from JSON file (Cursor format)"""
|
|
274
|
+
try:
|
|
275
|
+
config_path = self.get_mcp_config_path()
|
|
276
|
+
|
|
277
|
+
if not config_path.exists():
|
|
278
|
+
return {}
|
|
279
|
+
|
|
280
|
+
# Read JSON file
|
|
281
|
+
cursor_data = RobustFileOperations.safe_read_json(config_path, {})
|
|
282
|
+
|
|
283
|
+
# Extract mcpServers object
|
|
284
|
+
mcp_servers = cursor_data.get('mcpServers', {})
|
|
285
|
+
|
|
286
|
+
# Convert from Cursor format to internal format
|
|
287
|
+
configs = {}
|
|
288
|
+
for server_id, server_config in mcp_servers.items():
|
|
289
|
+
configs[server_id] = self._normalize_config_from_storage(server_id, server_config)
|
|
290
|
+
|
|
291
|
+
return configs
|
|
292
|
+
except Exception as e:
|
|
293
|
+
logger.error(f"Error loading MCP configs: {e}")
|
|
294
|
+
logger.error(f"Stack trace:\n{traceback.format_exc()}")
|
|
295
|
+
return {}
|
|
296
|
+
|
|
297
|
+
def get_server_config(self, server_id: str) -> Optional[Dict]:
|
|
298
|
+
"""Get a specific server configuration"""
|
|
299
|
+
configs = self.load_all_configs()
|
|
300
|
+
return configs.get(server_id)
|
|
301
|
+
|
|
302
|
+
def delete_server_config(self, server_id: str) -> bool:
|
|
303
|
+
"""Delete a server configuration from JSON file"""
|
|
304
|
+
try:
|
|
305
|
+
config_path = self.get_mcp_config_path()
|
|
306
|
+
|
|
307
|
+
if not config_path.exists():
|
|
308
|
+
return False
|
|
309
|
+
|
|
310
|
+
# Load current config
|
|
311
|
+
cursor_data = RobustFileOperations.safe_read_json(config_path, {})
|
|
312
|
+
mcp_servers = cursor_data.get('mcpServers', {})
|
|
313
|
+
|
|
314
|
+
if server_id in mcp_servers:
|
|
315
|
+
del mcp_servers[server_id]
|
|
316
|
+
|
|
317
|
+
# Write back to file
|
|
318
|
+
cursor_data['mcpServers'] = mcp_servers
|
|
319
|
+
if not RobustFileOperations.safe_write_json(config_path, cursor_data):
|
|
320
|
+
raise RuntimeError(f"Failed to write MCP config to {config_path}")
|
|
321
|
+
|
|
322
|
+
# Also disconnect if connected
|
|
323
|
+
if server_id in self.connections:
|
|
324
|
+
asyncio.create_task(self.disconnect(server_id))
|
|
325
|
+
|
|
326
|
+
logger.info(f"Deleted MCP server config: {server_id}")
|
|
327
|
+
return True
|
|
328
|
+
return False
|
|
329
|
+
except Exception as e:
|
|
330
|
+
logger.error(f"Error deleting MCP server config: {e}")
|
|
331
|
+
logger.error(f"Stack trace:\n{traceback.format_exc()}")
|
|
332
|
+
return False
|
|
333
|
+
|
|
334
|
+
async def connect(self, server_id: str) -> Dict:
|
|
335
|
+
"""Connect to an MCP server"""
|
|
336
|
+
try:
|
|
337
|
+
logger.debug(f"[MCP] Attempting to connect to server {server_id}")
|
|
338
|
+
config = self.get_server_config(server_id)
|
|
339
|
+
if not config:
|
|
340
|
+
error_msg = f"Server configuration not found: {server_id}"
|
|
341
|
+
logger.error(f"[MCP] {error_msg}")
|
|
342
|
+
raise ValueError(error_msg)
|
|
343
|
+
|
|
344
|
+
# Check if server is enabled
|
|
345
|
+
if not config.get('enabled', True):
|
|
346
|
+
error_msg = f"Server {server_id} is disabled"
|
|
347
|
+
logger.warning(f"[MCP] {error_msg}")
|
|
348
|
+
raise ValueError(error_msg)
|
|
349
|
+
|
|
350
|
+
logger.debug(f"[MCP] Server config loaded: name={config.get('name')}, type={config.get('type')}")
|
|
351
|
+
|
|
352
|
+
# Check if already connected
|
|
353
|
+
if server_id in self.connections:
|
|
354
|
+
connection = self.connections[server_id]
|
|
355
|
+
if connection.is_connected():
|
|
356
|
+
logger.info(f"[MCP] Already connected to MCP server: {config.get('name', server_id)}")
|
|
357
|
+
return self._get_server_info(server_id, config)
|
|
358
|
+
else:
|
|
359
|
+
logger.warning(f"[MCP] Stale connection found for {server_id}, removing")
|
|
360
|
+
del self.connections[server_id]
|
|
361
|
+
|
|
362
|
+
# Determine connection type (infer if not set)
|
|
363
|
+
connection_type = config.get('type')
|
|
364
|
+
if not connection_type:
|
|
365
|
+
connection_type = self._infer_server_type(config)
|
|
366
|
+
config['type'] = connection_type
|
|
367
|
+
|
|
368
|
+
logger.debug(f"[MCP] Connection type: {connection_type}")
|
|
369
|
+
|
|
370
|
+
if connection_type == 'command':
|
|
371
|
+
connection = MCPCommandConnection(server_id, config)
|
|
372
|
+
elif connection_type in ['http', 'sse']:
|
|
373
|
+
connection = MCPHTTPConnection(server_id, config)
|
|
374
|
+
else:
|
|
375
|
+
error_msg = f"Unknown connection type: {connection_type}"
|
|
376
|
+
logger.error(f"[MCP] {error_msg}")
|
|
377
|
+
raise ValueError(error_msg)
|
|
378
|
+
|
|
379
|
+
# Connect and store
|
|
380
|
+
logger.debug(f"[MCP] Starting connection to {config.get('name')}...")
|
|
381
|
+
await connection.connect()
|
|
382
|
+
self.connections[server_id] = connection
|
|
383
|
+
logger.debug(f"[MCP] Connection established, listing tools...")
|
|
384
|
+
|
|
385
|
+
# List and cache tools
|
|
386
|
+
tools = await connection.list_tools()
|
|
387
|
+
self.tools_cache[server_id] = tools
|
|
388
|
+
|
|
389
|
+
# Auto-whitelist tools on first connection or ensure default whitelisted tools are enabled
|
|
390
|
+
tool_names = [tool['name'] for tool in tools]
|
|
391
|
+
self._ensure_default_whitelisted_tools(server_id, config, tool_names)
|
|
392
|
+
|
|
393
|
+
logger.info(f"[MCP] ✓ Connected to MCP server: {config['name']} ({len(tools)} tools)")
|
|
394
|
+
|
|
395
|
+
# Ensure default whitelisted tools are enabled (final check)
|
|
396
|
+
self._ensure_default_whitelisted_tools(server_id, config, tool_names)
|
|
397
|
+
|
|
398
|
+
return self._get_server_info(server_id, config)
|
|
399
|
+
except ValueError as e:
|
|
400
|
+
# Re-raise ValueError with original message
|
|
401
|
+
logger.error(f"[MCP] Configuration error for {server_id}: {str(e)}")
|
|
402
|
+
logger.error(f"[MCP] Stack trace:\n{traceback.format_exc()}")
|
|
403
|
+
raise
|
|
404
|
+
except Exception as e:
|
|
405
|
+
error_msg = f"Failed to connect to MCP server {server_id}: {type(e).__name__}: {str(e)}"
|
|
406
|
+
logger.error(f"[MCP] {error_msg}")
|
|
407
|
+
logger.error(f"[MCP] Full stack trace:\n{traceback.format_exc()}")
|
|
408
|
+
# Include the original exception type in the error message
|
|
409
|
+
raise RuntimeError(error_msg) from e
|
|
410
|
+
|
|
411
|
+
async def disconnect(self, server_id: str) -> bool:
|
|
412
|
+
"""Disconnect from an MCP server"""
|
|
413
|
+
try:
|
|
414
|
+
if server_id in self.connections:
|
|
415
|
+
connection = self.connections[server_id]
|
|
416
|
+
await connection.disconnect()
|
|
417
|
+
del self.connections[server_id]
|
|
418
|
+
|
|
419
|
+
if server_id in self.tools_cache:
|
|
420
|
+
del self.tools_cache[server_id]
|
|
421
|
+
|
|
422
|
+
logger.info(f"Disconnected from MCP server: {server_id}")
|
|
423
|
+
return True
|
|
424
|
+
return False
|
|
425
|
+
except Exception as e:
|
|
426
|
+
logger.error(f"Error disconnecting from MCP server {server_id}: {e}")
|
|
427
|
+
return False
|
|
428
|
+
|
|
429
|
+
async def list_tools(self, server_id: str) -> List[Dict]:
|
|
430
|
+
"""List tools available from a connected MCP server"""
|
|
431
|
+
try:
|
|
432
|
+
# Check cache first
|
|
433
|
+
if server_id in self.tools_cache:
|
|
434
|
+
tools = self.tools_cache[server_id]
|
|
435
|
+
# Ensure default whitelisted tools are enabled even when using cache
|
|
436
|
+
config = self.get_server_config(server_id)
|
|
437
|
+
if config:
|
|
438
|
+
tool_names = [tool['name'] for tool in tools]
|
|
439
|
+
self._ensure_default_whitelisted_tools(server_id, config, tool_names)
|
|
440
|
+
return tools
|
|
441
|
+
|
|
442
|
+
# Otherwise fetch from connection
|
|
443
|
+
if server_id not in self.connections:
|
|
444
|
+
raise ValueError(f"Not connected to server: {server_id}")
|
|
445
|
+
|
|
446
|
+
connection = self.connections[server_id]
|
|
447
|
+
tools = await connection.list_tools()
|
|
448
|
+
self.tools_cache[server_id] = tools
|
|
449
|
+
|
|
450
|
+
# Ensure default whitelisted tools are enabled
|
|
451
|
+
config = self.get_server_config(server_id)
|
|
452
|
+
if config:
|
|
453
|
+
tool_names = [tool['name'] for tool in tools]
|
|
454
|
+
self._ensure_default_whitelisted_tools(server_id, config, tool_names)
|
|
455
|
+
|
|
456
|
+
return tools
|
|
457
|
+
except Exception as e:
|
|
458
|
+
logger.error(f"Error listing tools from MCP server {server_id}: {e}")
|
|
459
|
+
raise
|
|
460
|
+
|
|
461
|
+
async def get_all_tools(self) -> List[Dict]:
|
|
462
|
+
"""Get all tools from all connected servers"""
|
|
463
|
+
all_tools = []
|
|
464
|
+
for server_id in self.connections.keys():
|
|
465
|
+
try:
|
|
466
|
+
tools = await self.list_tools(server_id)
|
|
467
|
+
config = self.get_server_config(server_id)
|
|
468
|
+
|
|
469
|
+
# Add server info to each tool
|
|
470
|
+
for tool in tools:
|
|
471
|
+
tool['serverId'] = server_id
|
|
472
|
+
tool['serverName'] = config.get('name', server_id)
|
|
473
|
+
|
|
474
|
+
all_tools.extend(tools)
|
|
475
|
+
except Exception as e:
|
|
476
|
+
logger.error(f"Error getting tools from server {server_id}: {e}")
|
|
477
|
+
|
|
478
|
+
return all_tools
|
|
479
|
+
|
|
480
|
+
async def call_tool(self, server_id: str, tool_name: str, arguments: Dict) -> Any:
|
|
481
|
+
"""Call a tool on an MCP server"""
|
|
482
|
+
try:
|
|
483
|
+
if server_id not in self.connections:
|
|
484
|
+
raise ValueError(f"Not connected to server: {server_id}")
|
|
485
|
+
|
|
486
|
+
connection = self.connections[server_id]
|
|
487
|
+
result = await connection.call_tool(tool_name, arguments)
|
|
488
|
+
|
|
489
|
+
logger.info(f"Called tool {tool_name} on server {server_id}")
|
|
490
|
+
return result
|
|
491
|
+
except Exception as e:
|
|
492
|
+
logger.error(f"Error calling tool {tool_name} on server {server_id}: {e}")
|
|
493
|
+
raise
|
|
494
|
+
|
|
495
|
+
def get_connection_status(self, server_id: str) -> str:
|
|
496
|
+
"""Get connection status for a server"""
|
|
497
|
+
if server_id in self.connections:
|
|
498
|
+
return 'connected' if self.connections[server_id].is_connected() else 'error'
|
|
499
|
+
return 'disconnected'
|
|
500
|
+
|
|
501
|
+
def _get_server_info(self, server_id: str, config: Dict) -> Dict:
|
|
502
|
+
"""Get server information for response"""
|
|
503
|
+
tools = self.tools_cache.get(server_id, [])
|
|
504
|
+
enabled_tools = config.get('enabledTools', [])
|
|
505
|
+
return {
|
|
506
|
+
'serverId': server_id,
|
|
507
|
+
'name': config.get('name', server_id),
|
|
508
|
+
'status': self.get_connection_status(server_id),
|
|
509
|
+
'type': config.get('type', 'command'),
|
|
510
|
+
'toolCount': len(tools),
|
|
511
|
+
'tools': tools,
|
|
512
|
+
'enabled': config.get('enabled', True),
|
|
513
|
+
'enabledTools': enabled_tools
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
def enable_server(self, server_id: str) -> bool:
|
|
517
|
+
"""Enable an MCP server"""
|
|
518
|
+
try:
|
|
519
|
+
config = self.get_server_config(server_id)
|
|
520
|
+
if not config:
|
|
521
|
+
return False
|
|
522
|
+
|
|
523
|
+
config['enabled'] = True
|
|
524
|
+
self.save_server_config(config)
|
|
525
|
+
logger.info(f"Enabled MCP server: {server_id}")
|
|
526
|
+
return True
|
|
527
|
+
except Exception as e:
|
|
528
|
+
logger.error(f"Error enabling MCP server {server_id}: {e}")
|
|
529
|
+
return False
|
|
530
|
+
|
|
531
|
+
def disable_server(self, server_id: str) -> bool:
|
|
532
|
+
"""Disable an MCP server and disconnect if connected"""
|
|
533
|
+
try:
|
|
534
|
+
config = self.get_server_config(server_id)
|
|
535
|
+
if not config:
|
|
536
|
+
return False
|
|
537
|
+
|
|
538
|
+
config['enabled'] = False
|
|
539
|
+
self.save_server_config(config)
|
|
540
|
+
|
|
541
|
+
# Disconnect if connected
|
|
542
|
+
if server_id in self.connections:
|
|
543
|
+
asyncio.create_task(self.disconnect(server_id))
|
|
544
|
+
|
|
545
|
+
logger.info(f"Disabled MCP server: {server_id}")
|
|
546
|
+
return True
|
|
547
|
+
except Exception as e:
|
|
548
|
+
logger.error(f"Error disabling MCP server {server_id}: {e}")
|
|
549
|
+
return False
|
|
550
|
+
|
|
551
|
+
def _ensure_default_whitelisted_tools(self, server_id: str, config: Dict, available_tool_names: List[str]) -> None:
|
|
552
|
+
"""Ensure default whitelisted tools are always enabled for a server"""
|
|
553
|
+
try:
|
|
554
|
+
existing_enabled = set(config.get('enabledTools', []))
|
|
555
|
+
available_tools = set(available_tool_names)
|
|
556
|
+
|
|
557
|
+
# Get server name to determine which tools to whitelist
|
|
558
|
+
server_name = config.get('name', server_id)
|
|
559
|
+
|
|
560
|
+
# Check if this server should use selective whitelisting
|
|
561
|
+
# Only apply selective whitelisting for servers named "Notion", "Dbt", "Slack", or "Google"
|
|
562
|
+
should_use_selective_whitelist = any(
|
|
563
|
+
server_name.startswith(prefix) for prefix in ['Notion', 'Dbt', 'Slack', 'Google']
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
if should_use_selective_whitelist:
|
|
567
|
+
# Determine which server type based on name prefix
|
|
568
|
+
server_type = None
|
|
569
|
+
for prefix in ['Notion', 'Dbt', 'Slack', 'Google']:
|
|
570
|
+
if server_name.startswith(prefix):
|
|
571
|
+
server_type = prefix
|
|
572
|
+
break
|
|
573
|
+
|
|
574
|
+
# Get default whitelisted tools for this server type
|
|
575
|
+
if server_type and server_type in self.DEFAULT_WHITELISTED_TOOLS_BY_SERVER:
|
|
576
|
+
default_whitelisted = set(self.DEFAULT_WHITELISTED_TOOLS_BY_SERVER[server_type])
|
|
577
|
+
else:
|
|
578
|
+
# Fallback: use all default whitelisted tools
|
|
579
|
+
default_whitelisted = set(self.DEFAULT_WHITELISTED_TOOLS)
|
|
580
|
+
else:
|
|
581
|
+
# For other servers, use all default whitelisted tools
|
|
582
|
+
default_whitelisted = set(self.DEFAULT_WHITELISTED_TOOLS)
|
|
583
|
+
|
|
584
|
+
# Find default whitelisted tools that are available
|
|
585
|
+
available_default_tools = default_whitelisted & available_tools
|
|
586
|
+
|
|
587
|
+
if 'enabledTools' not in config or not config.get('enabledTools'):
|
|
588
|
+
# First connection
|
|
589
|
+
if should_use_selective_whitelist:
|
|
590
|
+
# Only enable default whitelisted tools for this server type
|
|
591
|
+
config['enabledTools'] = list(available_default_tools)
|
|
592
|
+
self.save_server_config(config)
|
|
593
|
+
logger.info(f"[MCP] Auto-whitelisted {len(available_default_tools)} default whitelisted tools for {server_id} ({server_type}) (out of {len(available_tool_names)} available): {sorted(available_default_tools)}")
|
|
594
|
+
else:
|
|
595
|
+
# Enable all tools for other servers
|
|
596
|
+
config['enabledTools'] = list(available_tools)
|
|
597
|
+
self.save_server_config(config)
|
|
598
|
+
logger.info(f"[MCP] Auto-whitelisted all {len(available_tools)} tools for {server_id} (not a selective whitelist server)")
|
|
599
|
+
else:
|
|
600
|
+
# On reconnect or update, ensure all default whitelisted tools are enabled
|
|
601
|
+
# This will re-enable any default whitelisted tools that were disabled
|
|
602
|
+
final_enabled = existing_enabled | available_default_tools
|
|
603
|
+
|
|
604
|
+
if final_enabled != existing_enabled:
|
|
605
|
+
tools_added = final_enabled - existing_enabled
|
|
606
|
+
config['enabledTools'] = list(final_enabled)
|
|
607
|
+
self.save_server_config(config)
|
|
608
|
+
logger.info(f"[MCP] Auto-enabled {len(tools_added)} default whitelisted tools for {server_id}: {sorted(tools_added)}")
|
|
609
|
+
except Exception as e:
|
|
610
|
+
logger.error(f"Error ensuring default whitelisted tools for {server_id}: {e}")
|
|
611
|
+
logger.error(f"Stack trace:\n{traceback.format_exc()}")
|
|
612
|
+
|
|
613
|
+
def update_tool_enabled(self, server_id: str, tool_name: str, enabled: bool) -> bool:
|
|
614
|
+
"""Update enabled/disabled state for a specific tool"""
|
|
615
|
+
try:
|
|
616
|
+
config = self.get_server_config(server_id)
|
|
617
|
+
if not config:
|
|
618
|
+
return False
|
|
619
|
+
|
|
620
|
+
# Check if this is a default whitelisted tool
|
|
621
|
+
is_default_whitelisted = tool_name in self.DEFAULT_WHITELISTED_TOOLS
|
|
622
|
+
|
|
623
|
+
# If trying to disable a default whitelisted tool, warn but allow it
|
|
624
|
+
# (it will be re-enabled on next reconnect)
|
|
625
|
+
if not enabled and is_default_whitelisted:
|
|
626
|
+
logger.warning(f"Attempting to disable default whitelisted tool {tool_name} for server {server_id}. It will be re-enabled on reconnect.")
|
|
627
|
+
|
|
628
|
+
# Get current enabled tools, or initialize with all available tools if not set
|
|
629
|
+
enabled_tools = config.get('enabledTools')
|
|
630
|
+
if enabled_tools is None:
|
|
631
|
+
# If enabledTools is not set, initialize with all available tools from cache
|
|
632
|
+
if server_id in self.tools_cache:
|
|
633
|
+
tool_names = [tool['name'] for tool in self.tools_cache[server_id]]
|
|
634
|
+
enabled_tools = tool_names
|
|
635
|
+
logger.info(f"Initializing enabledTools for {server_id} with {len(tool_names)} tools")
|
|
636
|
+
else:
|
|
637
|
+
# If tools not cached, start with empty list (will be populated on next connect)
|
|
638
|
+
enabled_tools = []
|
|
639
|
+
logger.warning(f"No tools cached for {server_id}, starting with empty enabledTools")
|
|
640
|
+
|
|
641
|
+
enabled_tools_set = set(enabled_tools)
|
|
642
|
+
|
|
643
|
+
if enabled:
|
|
644
|
+
enabled_tools_set.add(tool_name)
|
|
645
|
+
else:
|
|
646
|
+
# Allow disabling even default whitelisted tools (they'll be re-enabled on reconnect)
|
|
647
|
+
enabled_tools_set.discard(tool_name)
|
|
648
|
+
|
|
649
|
+
config['enabledTools'] = list(enabled_tools_set)
|
|
650
|
+
self.save_server_config(config)
|
|
651
|
+
|
|
652
|
+
logger.info(f"{'Enabled' if enabled else 'Disabled'} tool {tool_name} for server {server_id}. Enabled tools: {len(enabled_tools_set)}")
|
|
653
|
+
return True
|
|
654
|
+
except Exception as e:
|
|
655
|
+
logger.error(f"Error updating tool enabled state for {server_id}/{tool_name}: {e}")
|
|
656
|
+
logger.error(f"Stack trace:\n{traceback.format_exc()}")
|
|
657
|
+
return False
|
|
658
|
+
|
|
659
|
+
async def connect_all_enabled(self) -> Dict[str, Dict]:
|
|
660
|
+
"""Connect to all enabled MCP servers"""
|
|
661
|
+
results = {}
|
|
662
|
+
configs = self.load_all_configs()
|
|
663
|
+
|
|
664
|
+
for server_id, config in configs.items():
|
|
665
|
+
if config.get('enabled', True):
|
|
666
|
+
try:
|
|
667
|
+
logger.info(f"[MCP] Auto-connecting enabled server: {server_id}")
|
|
668
|
+
server_info = await self.connect(server_id)
|
|
669
|
+
results[server_id] = {'success': True, 'server': server_info}
|
|
670
|
+
except Exception as e:
|
|
671
|
+
logger.error(f"[MCP] Failed to auto-connect {server_id}: {e}")
|
|
672
|
+
results[server_id] = {'success': False, 'error': str(e)}
|
|
673
|
+
else:
|
|
674
|
+
logger.debug(f"[MCP] Skipping disabled server: {server_id}")
|
|
675
|
+
|
|
676
|
+
return results
|
|
677
|
+
|
|
678
|
+
def update_config_file(self, new_json_content: str) -> Dict[str, Any]:
|
|
679
|
+
"""Update the entire config file and apply diff-based changes"""
|
|
680
|
+
try:
|
|
681
|
+
config_path = self.get_mcp_config_path()
|
|
682
|
+
|
|
683
|
+
# Parse new JSON
|
|
684
|
+
try:
|
|
685
|
+
new_data = json.loads(new_json_content)
|
|
686
|
+
except json.JSONDecodeError as e:
|
|
687
|
+
raise ValueError(f"Invalid JSON: {e}")
|
|
688
|
+
|
|
689
|
+
if 'mcpServers' not in new_data:
|
|
690
|
+
raise ValueError("JSON must contain 'mcpServers' object")
|
|
691
|
+
|
|
692
|
+
new_servers = new_data.get('mcpServers', {})
|
|
693
|
+
|
|
694
|
+
# Load current configs
|
|
695
|
+
old_configs = self.load_all_configs()
|
|
696
|
+
old_server_ids = set(old_configs.keys())
|
|
697
|
+
new_server_ids = set(new_servers.keys())
|
|
698
|
+
|
|
699
|
+
changes = {
|
|
700
|
+
'added': [],
|
|
701
|
+
'removed': [],
|
|
702
|
+
'modified': [],
|
|
703
|
+
'enabled_changes': []
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
# Detect added servers
|
|
707
|
+
for server_id in new_server_ids - old_server_ids:
|
|
708
|
+
changes['added'].append(server_id)
|
|
709
|
+
|
|
710
|
+
# Detect removed servers
|
|
711
|
+
for server_id in old_server_ids - new_server_ids:
|
|
712
|
+
changes['removed'].append(server_id)
|
|
713
|
+
# Disconnect removed servers
|
|
714
|
+
if server_id in self.connections:
|
|
715
|
+
asyncio.create_task(self.disconnect(server_id))
|
|
716
|
+
|
|
717
|
+
# Detect modified servers
|
|
718
|
+
for server_id in new_server_ids & old_server_ids:
|
|
719
|
+
old_config = old_configs[server_id]
|
|
720
|
+
new_storage_config = new_servers[server_id]
|
|
721
|
+
new_config = self._normalize_config_from_storage(server_id, new_storage_config)
|
|
722
|
+
|
|
723
|
+
# Check if enabled status changed
|
|
724
|
+
old_enabled = old_config.get('enabled', True)
|
|
725
|
+
new_enabled = new_config.get('enabled', True)
|
|
726
|
+
|
|
727
|
+
if old_enabled != new_enabled:
|
|
728
|
+
changes['enabled_changes'].append({
|
|
729
|
+
'server_id': server_id,
|
|
730
|
+
'old_enabled': old_enabled,
|
|
731
|
+
'new_enabled': new_enabled
|
|
732
|
+
})
|
|
733
|
+
|
|
734
|
+
# Check if config changed (simple comparison)
|
|
735
|
+
old_storage = self._normalize_config_for_storage(old_config)
|
|
736
|
+
if old_storage != new_storage_config:
|
|
737
|
+
changes['modified'].append(server_id)
|
|
738
|
+
|
|
739
|
+
# Write new config to file
|
|
740
|
+
if not RobustFileOperations.safe_write_json(config_path, new_data):
|
|
741
|
+
raise RuntimeError(f"Failed to write updated config to {config_path}")
|
|
742
|
+
|
|
743
|
+
# Apply changes asynchronously
|
|
744
|
+
async def apply_changes():
|
|
745
|
+
# Connect newly added enabled servers
|
|
746
|
+
for server_id in changes['added']:
|
|
747
|
+
new_config = self._normalize_config_from_storage(server_id, new_servers[server_id])
|
|
748
|
+
if new_config.get('enabled', True):
|
|
749
|
+
try:
|
|
750
|
+
await self.connect(server_id)
|
|
751
|
+
except Exception as e:
|
|
752
|
+
logger.error(f"Failed to connect added server {server_id}: {e}")
|
|
753
|
+
|
|
754
|
+
# Handle enabled/disabled changes
|
|
755
|
+
for change in changes['enabled_changes']:
|
|
756
|
+
server_id = change['server_id']
|
|
757
|
+
if change['new_enabled']:
|
|
758
|
+
try:
|
|
759
|
+
await self.connect(server_id)
|
|
760
|
+
except Exception as e:
|
|
761
|
+
logger.error(f"Failed to connect enabled server {server_id}: {e}")
|
|
762
|
+
else:
|
|
763
|
+
await self.disconnect(server_id)
|
|
764
|
+
|
|
765
|
+
# Handle modified servers (disconnect and reconnect if enabled)
|
|
766
|
+
for server_id in changes['modified']:
|
|
767
|
+
new_config = self._normalize_config_from_storage(server_id, new_servers[server_id])
|
|
768
|
+
# Disconnect old connection
|
|
769
|
+
if server_id in self.connections:
|
|
770
|
+
await self.disconnect(server_id)
|
|
771
|
+
# Reconnect if enabled
|
|
772
|
+
if new_config.get('enabled', True):
|
|
773
|
+
try:
|
|
774
|
+
await self.connect(server_id)
|
|
775
|
+
except Exception as e:
|
|
776
|
+
logger.error(f"Failed to reconnect modified server {server_id}: {e}")
|
|
777
|
+
|
|
778
|
+
# Schedule async changes
|
|
779
|
+
asyncio.create_task(apply_changes())
|
|
780
|
+
|
|
781
|
+
logger.info(f"[MCP] Config file updated: {len(changes['added'])} added, {len(changes['removed'])} removed, {len(changes['modified'])} modified")
|
|
782
|
+
|
|
783
|
+
return {
|
|
784
|
+
'success': True,
|
|
785
|
+
'changes': changes
|
|
786
|
+
}
|
|
787
|
+
|
|
788
|
+
except Exception as e:
|
|
789
|
+
logger.error(f"Error updating config file: {e}")
|
|
790
|
+
logger.error(f"Stack trace:\n{traceback.format_exc()}")
|
|
791
|
+
raise
|
|
792
|
+
|
|
793
|
+
def get_config_file_content(self) -> str:
|
|
794
|
+
"""Get the raw JSON file content"""
|
|
795
|
+
try:
|
|
796
|
+
config_path = self.get_mcp_config_path()
|
|
797
|
+
if not config_path.exists():
|
|
798
|
+
return json.dumps({'mcpServers': {}}, indent=2)
|
|
799
|
+
|
|
800
|
+
cursor_data = RobustFileOperations.safe_read_json(config_path, {'mcpServers': {}})
|
|
801
|
+
return json.dumps(cursor_data, indent=2)
|
|
802
|
+
except Exception as e:
|
|
803
|
+
logger.error(f"Error reading config file: {e}")
|
|
804
|
+
return json.dumps({'mcpServers': {}}, indent=2)
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
class MCPConnection:
|
|
808
|
+
"""Base class for MCP connections"""
|
|
809
|
+
|
|
810
|
+
def __init__(self, server_id: str, config: Dict):
|
|
811
|
+
self.server_id = server_id
|
|
812
|
+
self.config = config
|
|
813
|
+
self.connected = False
|
|
814
|
+
|
|
815
|
+
async def connect(self):
|
|
816
|
+
"""Connect to the MCP server"""
|
|
817
|
+
raise NotImplementedError
|
|
818
|
+
|
|
819
|
+
async def disconnect(self):
|
|
820
|
+
"""Disconnect from the MCP server"""
|
|
821
|
+
raise NotImplementedError
|
|
822
|
+
|
|
823
|
+
async def list_tools(self) -> List[Dict]:
|
|
824
|
+
"""List available tools"""
|
|
825
|
+
raise NotImplementedError
|
|
826
|
+
|
|
827
|
+
async def call_tool(self, tool_name: str, arguments: Dict) -> Any:
|
|
828
|
+
"""Call a tool"""
|
|
829
|
+
raise NotImplementedError
|
|
830
|
+
|
|
831
|
+
def is_connected(self) -> bool:
|
|
832
|
+
"""Check if connected"""
|
|
833
|
+
return self.connected
|
|
834
|
+
|
|
835
|
+
|
|
836
|
+
class MCPCommandConnection(MCPConnection):
|
|
837
|
+
"""Command-based MCP connection using stdio"""
|
|
838
|
+
|
|
839
|
+
def __init__(self, server_id: str, config: Dict):
|
|
840
|
+
super().__init__(server_id, config)
|
|
841
|
+
self.process = None
|
|
842
|
+
self.request_id = 0
|
|
843
|
+
self._stdout_queue = asyncio.Queue()
|
|
844
|
+
self._stderr_queue = asyncio.Queue()
|
|
845
|
+
self._reader_tasks = []
|
|
846
|
+
|
|
847
|
+
async def connect(self):
|
|
848
|
+
"""Start subprocess and establish stdio connection using subprocess.Popen (Windows-compatible)"""
|
|
849
|
+
try:
|
|
850
|
+
command = self.config.get('command')
|
|
851
|
+
args = self.config.get('args', [])
|
|
852
|
+
env = self.config.get('env', {})
|
|
853
|
+
|
|
854
|
+
if not command:
|
|
855
|
+
raise ValueError("Command is required for command-based MCP")
|
|
856
|
+
|
|
857
|
+
# Merge environment variables
|
|
858
|
+
import os
|
|
859
|
+
import shlex
|
|
860
|
+
full_env = os.environ.copy()
|
|
861
|
+
full_env.update(env)
|
|
862
|
+
|
|
863
|
+
is_windows = platform.system() == 'Windows'
|
|
864
|
+
logger.debug(f"[MCP] Platform: {platform.system()} (Windows={is_windows})")
|
|
865
|
+
logger.debug(f"[MCP] Python version: {sys.version}")
|
|
866
|
+
logger.debug(f"[MCP] Working directory: {os.getcwd()}")
|
|
867
|
+
|
|
868
|
+
# Build command as a list (works on all platforms, including Windows)
|
|
869
|
+
# This is the proper way to handle subprocess on Windows
|
|
870
|
+
cmd_list = [command] + args
|
|
871
|
+
|
|
872
|
+
logger.info(f"[MCP] Executing command: {' '.join(cmd_list)}")
|
|
873
|
+
logger.debug(f"[MCP] Command as list: {cmd_list}")
|
|
874
|
+
|
|
875
|
+
# Log PATH for debugging
|
|
876
|
+
path_var = full_env.get('PATH', '')
|
|
877
|
+
if is_windows:
|
|
878
|
+
# On Windows, also check Path and path (case-insensitive)
|
|
879
|
+
for key in full_env.keys():
|
|
880
|
+
if key.lower() == 'path':
|
|
881
|
+
path_var = full_env[key]
|
|
882
|
+
break
|
|
883
|
+
|
|
884
|
+
if path_var:
|
|
885
|
+
path_entries = path_var.split(os.pathsep)
|
|
886
|
+
logger.debug(f"[MCP] PATH has {len(path_entries)} entries:")
|
|
887
|
+
for i, entry in enumerate(path_entries[:5]): # Log first 5 entries
|
|
888
|
+
logger.debug(f"[MCP] [{i}] {entry}")
|
|
889
|
+
if len(path_entries) > 5:
|
|
890
|
+
logger.debug(f"[MCP] ... and {len(path_entries) - 5} more entries")
|
|
891
|
+
else:
|
|
892
|
+
logger.warning(f"[MCP] PATH environment variable not found!")
|
|
893
|
+
|
|
894
|
+
# Log custom environment variables (helpful for debugging)
|
|
895
|
+
if env:
|
|
896
|
+
logger.debug(f"[MCP] Custom env vars: {list(env.keys())}")
|
|
897
|
+
for key, value in env.items():
|
|
898
|
+
# Log first 100 chars of each env var value
|
|
899
|
+
value_preview = str(value)[:100] + ('...' if len(str(value)) > 100 else '')
|
|
900
|
+
logger.debug(f"[MCP] {key}={value_preview}")
|
|
901
|
+
|
|
902
|
+
# Create subprocess using subprocess.Popen (works on all platforms, all event loops)
|
|
903
|
+
# This is more reliable than asyncio.create_subprocess_* which requires ProactorEventLoop on Windows
|
|
904
|
+
logger.debug(f"[MCP] Creating subprocess using subprocess.Popen (cross-platform compatible)")
|
|
905
|
+
|
|
906
|
+
try:
|
|
907
|
+
# Use subprocess.Popen which works with any event loop
|
|
908
|
+
self.process = subprocess.Popen(
|
|
909
|
+
[command] + args,
|
|
910
|
+
stdin=subprocess.PIPE,
|
|
911
|
+
stdout=subprocess.PIPE,
|
|
912
|
+
stderr=subprocess.PIPE,
|
|
913
|
+
env=full_env,
|
|
914
|
+
# Important: on Windows, don't create a visible console window
|
|
915
|
+
creationflags=subprocess.CREATE_NO_WINDOW if is_windows else 0
|
|
916
|
+
)
|
|
917
|
+
logger.debug(f"[MCP] Subprocess created with PID: {self.process.pid}")
|
|
918
|
+
|
|
919
|
+
# Start background tasks to read stdout/stderr asynchronously
|
|
920
|
+
self._start_reader_tasks()
|
|
921
|
+
|
|
922
|
+
except FileNotFoundError as e:
|
|
923
|
+
error_msg = f"Command not found: {command}. Make sure the executable is in PATH or provide full path."
|
|
924
|
+
logger.error(f"[MCP] {error_msg}")
|
|
925
|
+
logger.error(f"[MCP] FileNotFoundError details: {e}")
|
|
926
|
+
|
|
927
|
+
# On Windows, try with .exe, .cmd, .bat extensions
|
|
928
|
+
if is_windows and not any(command.endswith(ext) for ext in ['.exe', '.cmd', '.bat']):
|
|
929
|
+
logger.debug(f"[MCP] Windows: Trying with common executable extensions...")
|
|
930
|
+
for ext in ['.exe', '.cmd', '.bat']:
|
|
931
|
+
try:
|
|
932
|
+
logger.debug(f"[MCP] Trying: {command}{ext}")
|
|
933
|
+
self.process = subprocess.Popen(
|
|
934
|
+
[command + ext] + args,
|
|
935
|
+
stdin=subprocess.PIPE,
|
|
936
|
+
stdout=subprocess.PIPE,
|
|
937
|
+
stderr=subprocess.PIPE,
|
|
938
|
+
env=full_env,
|
|
939
|
+
creationflags=subprocess.CREATE_NO_WINDOW if is_windows else 0
|
|
940
|
+
)
|
|
941
|
+
logger.debug(f"[MCP] Success with {command}{ext}, PID: {self.process.pid}")
|
|
942
|
+
self._start_reader_tasks()
|
|
943
|
+
break
|
|
944
|
+
except FileNotFoundError:
|
|
945
|
+
continue
|
|
946
|
+
else:
|
|
947
|
+
# None of the extensions worked
|
|
948
|
+
raise RuntimeError(error_msg) from e
|
|
949
|
+
else:
|
|
950
|
+
raise RuntimeError(error_msg) from e
|
|
951
|
+
except Exception as e:
|
|
952
|
+
error_msg = f"Failed to start subprocess: {type(e).__name__}: {str(e)}"
|
|
953
|
+
logger.error(f"[MCP] {error_msg}")
|
|
954
|
+
logger.error(f"[MCP] Stack trace:\n{traceback.format_exc()}")
|
|
955
|
+
raise RuntimeError(error_msg) from e
|
|
956
|
+
|
|
957
|
+
# Give the process a moment to start
|
|
958
|
+
await asyncio.sleep(0.5)
|
|
959
|
+
|
|
960
|
+
# Check if process is still running
|
|
961
|
+
poll_result = self.process.poll()
|
|
962
|
+
if poll_result is not None:
|
|
963
|
+
# Process already exited, try to capture stderr
|
|
964
|
+
stderr_text = "No error output"
|
|
965
|
+
try:
|
|
966
|
+
# With subprocess.Popen, we need to read synchronously from stderr
|
|
967
|
+
# But do it in a non-blocking way via a thread
|
|
968
|
+
loop = asyncio.get_event_loop()
|
|
969
|
+
stderr_data = await loop.run_in_executor(None, self.process.stderr.read)
|
|
970
|
+
if stderr_data:
|
|
971
|
+
stderr_text = stderr_data.decode('utf-8', errors='replace')
|
|
972
|
+
except Exception as e:
|
|
973
|
+
stderr_text = f"Could not read stderr: {e}"
|
|
974
|
+
|
|
975
|
+
error_msg = f"MCP server exited immediately with code {poll_result}. Error output: {stderr_text}"
|
|
976
|
+
logger.error(f"[MCP] {error_msg}")
|
|
977
|
+
raise RuntimeError(error_msg)
|
|
978
|
+
|
|
979
|
+
logger.debug(f"[MCP] Process started successfully, sending initialization request...")
|
|
980
|
+
self.connected = True
|
|
981
|
+
|
|
982
|
+
# Send initialization request
|
|
983
|
+
try:
|
|
984
|
+
await self._send_request('initialize', {
|
|
985
|
+
'protocolVersion': '2024-11-05',
|
|
986
|
+
'capabilities': {},
|
|
987
|
+
'clientInfo': {
|
|
988
|
+
'name': 'signalpilot-ai-internal',
|
|
989
|
+
'version': '0.10.1'
|
|
990
|
+
}
|
|
991
|
+
})
|
|
992
|
+
logger.debug(f"[MCP] Initialization successful")
|
|
993
|
+
except Exception as e:
|
|
994
|
+
error_msg = f"Failed to initialize MCP protocol: {type(e).__name__}: {str(e)}"
|
|
995
|
+
logger.error(f"[MCP] {error_msg}")
|
|
996
|
+
raise RuntimeError(error_msg) from e
|
|
997
|
+
|
|
998
|
+
except Exception as e:
|
|
999
|
+
logger.error(f"[MCP] Error starting MCP command: {type(e).__name__}: {str(e)}")
|
|
1000
|
+
logger.error(f"[MCP] Full stack trace:\n{traceback.format_exc()}")
|
|
1001
|
+
self.connected = False
|
|
1002
|
+
|
|
1003
|
+
# Try to capture stderr if process exists
|
|
1004
|
+
if self.process and hasattr(self.process, 'stderr') and self.process.stderr:
|
|
1005
|
+
try:
|
|
1006
|
+
# Use run_in_executor for sync read
|
|
1007
|
+
loop = asyncio.get_event_loop()
|
|
1008
|
+
stderr_data = await asyncio.wait_for(
|
|
1009
|
+
loop.run_in_executor(None, lambda: self.process.stderr.read(4096)),
|
|
1010
|
+
timeout=1.0
|
|
1011
|
+
)
|
|
1012
|
+
if stderr_data:
|
|
1013
|
+
stderr_text = stderr_data.decode('utf-8', errors='replace')
|
|
1014
|
+
logger.error(f"[MCP] Server stderr output:\n{stderr_text}")
|
|
1015
|
+
# Re-raise with stderr included
|
|
1016
|
+
raise RuntimeError(f"{str(e)}\n\nServer error output:\n{stderr_text}") from e
|
|
1017
|
+
except asyncio.TimeoutError:
|
|
1018
|
+
logger.warning(f"[MCP] Timeout reading stderr")
|
|
1019
|
+
except Exception as stderr_e:
|
|
1020
|
+
logger.warning(f"[MCP] Could not read stderr: {stderr_e}")
|
|
1021
|
+
|
|
1022
|
+
raise
|
|
1023
|
+
|
|
1024
|
+
def _start_reader_tasks(self):
|
|
1025
|
+
"""Start background tasks to read from stdout/stderr"""
|
|
1026
|
+
loop = asyncio.get_event_loop()
|
|
1027
|
+
|
|
1028
|
+
# Start stdout reader
|
|
1029
|
+
stdout_task = loop.create_task(self._read_stream(self.process.stdout, self._stdout_queue, 'stdout'))
|
|
1030
|
+
stderr_task = loop.create_task(self._read_stream(self.process.stderr, self._stderr_queue, 'stderr'))
|
|
1031
|
+
|
|
1032
|
+
self._reader_tasks = [stdout_task, stderr_task]
|
|
1033
|
+
|
|
1034
|
+
async def _read_stream(self, stream, queue, name):
|
|
1035
|
+
"""Read from a stream in a background thread and put lines into a queue"""
|
|
1036
|
+
loop = asyncio.get_event_loop()
|
|
1037
|
+
try:
|
|
1038
|
+
while True:
|
|
1039
|
+
# Read line in executor to avoid blocking
|
|
1040
|
+
line = await loop.run_in_executor(None, stream.readline)
|
|
1041
|
+
if not line:
|
|
1042
|
+
logger.debug(f"[MCP] {name} stream closed")
|
|
1043
|
+
break
|
|
1044
|
+
await queue.put(line)
|
|
1045
|
+
except Exception as e:
|
|
1046
|
+
logger.error(f"[MCP] Error reading {name}: {e}")
|
|
1047
|
+
|
|
1048
|
+
async def disconnect(self):
|
|
1049
|
+
"""Terminate subprocess"""
|
|
1050
|
+
if self.process:
|
|
1051
|
+
try:
|
|
1052
|
+
# Cancel reader tasks
|
|
1053
|
+
for task in self._reader_tasks:
|
|
1054
|
+
task.cancel()
|
|
1055
|
+
|
|
1056
|
+
# Terminate process
|
|
1057
|
+
self.process.terminate()
|
|
1058
|
+
|
|
1059
|
+
# Wait for process to exit (with timeout)
|
|
1060
|
+
loop = asyncio.get_event_loop()
|
|
1061
|
+
try:
|
|
1062
|
+
await asyncio.wait_for(
|
|
1063
|
+
loop.run_in_executor(None, self.process.wait),
|
|
1064
|
+
timeout=5.0
|
|
1065
|
+
)
|
|
1066
|
+
except asyncio.TimeoutError:
|
|
1067
|
+
logger.warning(f"[MCP] Process did not terminate, killing...")
|
|
1068
|
+
self.process.kill()
|
|
1069
|
+
await loop.run_in_executor(None, self.process.wait)
|
|
1070
|
+
except Exception as e:
|
|
1071
|
+
logger.error(f"[MCP] Error during disconnect: {e}")
|
|
1072
|
+
finally:
|
|
1073
|
+
self.connected = False
|
|
1074
|
+
|
|
1075
|
+
async def list_tools(self) -> List[Dict]:
|
|
1076
|
+
"""List tools via JSON-RPC"""
|
|
1077
|
+
try:
|
|
1078
|
+
response = await self._send_request('tools/list', {})
|
|
1079
|
+
tools = response.get('result', {}).get('tools', [])
|
|
1080
|
+
|
|
1081
|
+
# Convert to standard format
|
|
1082
|
+
return [self._convert_tool_schema(tool) for tool in tools]
|
|
1083
|
+
except Exception as e:
|
|
1084
|
+
logger.error(f"Error listing tools: {e}")
|
|
1085
|
+
return []
|
|
1086
|
+
|
|
1087
|
+
async def call_tool(self, tool_name: str, arguments: Dict) -> Any:
|
|
1088
|
+
"""Call a tool via JSON-RPC"""
|
|
1089
|
+
try:
|
|
1090
|
+
response = await self._send_request('tools/call', {
|
|
1091
|
+
'name': tool_name,
|
|
1092
|
+
'arguments': arguments
|
|
1093
|
+
})
|
|
1094
|
+
return response.get('result', {})
|
|
1095
|
+
except Exception as e:
|
|
1096
|
+
logger.error(f"Error calling tool {tool_name}: {e}")
|
|
1097
|
+
raise
|
|
1098
|
+
|
|
1099
|
+
async def _send_request(self, method: str, params: Dict) -> Dict:
|
|
1100
|
+
"""Send JSON-RPC request and get response (works with subprocess.Popen)"""
|
|
1101
|
+
if not self.process or not self.connected:
|
|
1102
|
+
raise RuntimeError("Not connected to MCP server")
|
|
1103
|
+
|
|
1104
|
+
# Check if process is still alive
|
|
1105
|
+
poll_result = self.process.poll()
|
|
1106
|
+
if poll_result is not None:
|
|
1107
|
+
# Try to get stderr from queue
|
|
1108
|
+
stderr_lines = []
|
|
1109
|
+
while not self._stderr_queue.empty():
|
|
1110
|
+
try:
|
|
1111
|
+
line = self._stderr_queue.get_nowait()
|
|
1112
|
+
stderr_lines.append(line.decode('utf-8', errors='replace'))
|
|
1113
|
+
except:
|
|
1114
|
+
break
|
|
1115
|
+
|
|
1116
|
+
stderr_text = ''.join(stderr_lines) if stderr_lines else "No error output available"
|
|
1117
|
+
error_msg = f"MCP server process has exited with code {poll_result}. Server output: {stderr_text}"
|
|
1118
|
+
logger.error(f"[MCP] {error_msg}")
|
|
1119
|
+
raise RuntimeError(error_msg)
|
|
1120
|
+
|
|
1121
|
+
self.request_id += 1
|
|
1122
|
+
request = {
|
|
1123
|
+
'jsonrpc': '2.0',
|
|
1124
|
+
'id': self.request_id,
|
|
1125
|
+
'method': method,
|
|
1126
|
+
'params': params
|
|
1127
|
+
}
|
|
1128
|
+
|
|
1129
|
+
# Send request to stdin
|
|
1130
|
+
request_data = json.dumps(request) + '\n'
|
|
1131
|
+
logger.debug(f"[MCP] Sending request: {request_data.strip()}")
|
|
1132
|
+
|
|
1133
|
+
try:
|
|
1134
|
+
# Write to stdin (synchronously via executor)
|
|
1135
|
+
loop = asyncio.get_event_loop()
|
|
1136
|
+
await loop.run_in_executor(None, self.process.stdin.write, request_data.encode())
|
|
1137
|
+
await loop.run_in_executor(None, self.process.stdin.flush)
|
|
1138
|
+
except Exception as e:
|
|
1139
|
+
error_msg = f"Failed to send request to MCP server: {type(e).__name__}: {str(e)}"
|
|
1140
|
+
logger.error(f"[MCP] {error_msg}")
|
|
1141
|
+
raise RuntimeError(error_msg) from e
|
|
1142
|
+
|
|
1143
|
+
# Read response from stdout queue with timeout
|
|
1144
|
+
try:
|
|
1145
|
+
response_line = await asyncio.wait_for(
|
|
1146
|
+
self._stdout_queue.get(),
|
|
1147
|
+
timeout=30.0
|
|
1148
|
+
)
|
|
1149
|
+
except asyncio.TimeoutError:
|
|
1150
|
+
# Try to get stderr from queue
|
|
1151
|
+
stderr_lines = []
|
|
1152
|
+
while not self._stderr_queue.empty():
|
|
1153
|
+
try:
|
|
1154
|
+
line = self._stderr_queue.get_nowait()
|
|
1155
|
+
stderr_lines.append(line.decode('utf-8', errors='replace'))
|
|
1156
|
+
except:
|
|
1157
|
+
break
|
|
1158
|
+
|
|
1159
|
+
stderr_text = ''.join(stderr_lines) if stderr_lines else "No error output"
|
|
1160
|
+
error_msg = f"MCP server response timeout after 30 seconds for method '{method}'. Server stderr: {stderr_text}"
|
|
1161
|
+
logger.error(f"[MCP] {error_msg}")
|
|
1162
|
+
raise RuntimeError(error_msg)
|
|
1163
|
+
|
|
1164
|
+
if not response_line:
|
|
1165
|
+
# Try to get stderr from queue
|
|
1166
|
+
stderr_lines = []
|
|
1167
|
+
while not self._stderr_queue.empty():
|
|
1168
|
+
try:
|
|
1169
|
+
line = self._stderr_queue.get_nowait()
|
|
1170
|
+
stderr_lines.append(line.decode('utf-8', errors='replace'))
|
|
1171
|
+
except:
|
|
1172
|
+
break
|
|
1173
|
+
|
|
1174
|
+
stderr_text = ''.join(stderr_lines) if stderr_lines else "No error output"
|
|
1175
|
+
error_msg = f"MCP server closed connection. Server stderr: {stderr_text}"
|
|
1176
|
+
logger.error(f"[MCP] {error_msg}")
|
|
1177
|
+
raise RuntimeError(error_msg)
|
|
1178
|
+
|
|
1179
|
+
response_text = response_line.decode('utf-8', errors='replace').strip()
|
|
1180
|
+
logger.debug(f"[MCP] Received response: {response_text}")
|
|
1181
|
+
|
|
1182
|
+
try:
|
|
1183
|
+
response = json.loads(response_text)
|
|
1184
|
+
except json.JSONDecodeError as e:
|
|
1185
|
+
error_msg = f"Failed to parse MCP server response as JSON: {e}. Response: {response_text[:200]}"
|
|
1186
|
+
logger.error(f"[MCP] {error_msg}")
|
|
1187
|
+
raise RuntimeError(error_msg) from e
|
|
1188
|
+
|
|
1189
|
+
if 'error' in response:
|
|
1190
|
+
error_details = response['error']
|
|
1191
|
+
error_msg = f"MCP server error for method '{method}': {json.dumps(error_details, indent=2)}"
|
|
1192
|
+
logger.error(f"[MCP] {error_msg}")
|
|
1193
|
+
raise RuntimeError(error_msg)
|
|
1194
|
+
|
|
1195
|
+
return response
|
|
1196
|
+
|
|
1197
|
+
def _convert_tool_schema(self, tool: Dict) -> Dict:
|
|
1198
|
+
"""Convert MCP tool schema to standard format"""
|
|
1199
|
+
return {
|
|
1200
|
+
'name': tool.get('name'),
|
|
1201
|
+
'description': tool.get('description', ''),
|
|
1202
|
+
'inputSchema': tool.get('inputSchema', {
|
|
1203
|
+
'type': 'object',
|
|
1204
|
+
'properties': {}
|
|
1205
|
+
})
|
|
1206
|
+
}
|
|
1207
|
+
|
|
1208
|
+
|
|
1209
|
+
class MCPHTTPConnection(MCPConnection):
|
|
1210
|
+
"""HTTP/SSE-based MCP connection"""
|
|
1211
|
+
|
|
1212
|
+
def __init__(self, server_id: str, config: Dict):
|
|
1213
|
+
super().__init__(server_id, config)
|
|
1214
|
+
self.session: Optional[aiohttp.ClientSession] = None
|
|
1215
|
+
self.base_url = config.get('url', '').rstrip('/')
|
|
1216
|
+
|
|
1217
|
+
async def connect(self):
|
|
1218
|
+
"""Establish HTTP/SSE connection"""
|
|
1219
|
+
try:
|
|
1220
|
+
if not self.base_url:
|
|
1221
|
+
raise ValueError("URL is required for HTTP/SSE MCP")
|
|
1222
|
+
|
|
1223
|
+
# Create session with timeout
|
|
1224
|
+
timeout = aiohttp.ClientTimeout(total=30)
|
|
1225
|
+
self.session = aiohttp.ClientSession(timeout=timeout)
|
|
1226
|
+
|
|
1227
|
+
# Test connection by listing tools
|
|
1228
|
+
await self._request('POST', '/tools/list', {})
|
|
1229
|
+
|
|
1230
|
+
self.connected = True
|
|
1231
|
+
logger.info(f"Connected to MCP HTTP server: {self.base_url}")
|
|
1232
|
+
except Exception as e:
|
|
1233
|
+
logger.error(f"Error connecting to MCP HTTP server: {e}")
|
|
1234
|
+
if self.session:
|
|
1235
|
+
await self.session.close()
|
|
1236
|
+
self.connected = False
|
|
1237
|
+
raise
|
|
1238
|
+
|
|
1239
|
+
async def disconnect(self):
|
|
1240
|
+
"""Close HTTP session"""
|
|
1241
|
+
if self.session:
|
|
1242
|
+
await self.session.close()
|
|
1243
|
+
self.session = None
|
|
1244
|
+
self.connected = False
|
|
1245
|
+
|
|
1246
|
+
async def list_tools(self) -> List[Dict]:
|
|
1247
|
+
"""List tools via HTTP"""
|
|
1248
|
+
try:
|
|
1249
|
+
response = await self._request('POST', '/tools/list', {})
|
|
1250
|
+
tools = response.get('tools', [])
|
|
1251
|
+
|
|
1252
|
+
return [self._convert_tool_schema(tool) for tool in tools]
|
|
1253
|
+
except Exception as e:
|
|
1254
|
+
logger.error(f"Error listing tools: {e}")
|
|
1255
|
+
return []
|
|
1256
|
+
|
|
1257
|
+
async def call_tool(self, tool_name: str, arguments: Dict) -> Any:
|
|
1258
|
+
"""Call a tool via HTTP"""
|
|
1259
|
+
try:
|
|
1260
|
+
response = await self._request('POST', '/tools/call', {
|
|
1261
|
+
'name': tool_name,
|
|
1262
|
+
'arguments': arguments
|
|
1263
|
+
})
|
|
1264
|
+
return response
|
|
1265
|
+
except Exception as e:
|
|
1266
|
+
logger.error(f"Error calling tool {tool_name}: {e}")
|
|
1267
|
+
raise
|
|
1268
|
+
|
|
1269
|
+
async def _request(self, method: str, path: str, data: Dict) -> Dict:
|
|
1270
|
+
"""Make HTTP request to MCP server"""
|
|
1271
|
+
if not self.session or not self.connected:
|
|
1272
|
+
raise RuntimeError("Not connected to MCP server")
|
|
1273
|
+
|
|
1274
|
+
url = f"{self.base_url}{path}"
|
|
1275
|
+
headers = {
|
|
1276
|
+
'Content-Type': 'application/json'
|
|
1277
|
+
}
|
|
1278
|
+
|
|
1279
|
+
# Add auth token if provided
|
|
1280
|
+
token = self.config.get('token')
|
|
1281
|
+
if token:
|
|
1282
|
+
headers['Authorization'] = f"Bearer {token}"
|
|
1283
|
+
|
|
1284
|
+
async with self.session.request(method, url, json=data, headers=headers) as response:
|
|
1285
|
+
response.raise_for_status()
|
|
1286
|
+
return await response.json()
|
|
1287
|
+
|
|
1288
|
+
def _convert_tool_schema(self, tool: Dict) -> Dict:
|
|
1289
|
+
"""Convert MCP tool schema to standard format"""
|
|
1290
|
+
return {
|
|
1291
|
+
'name': tool.get('name'),
|
|
1292
|
+
'description': tool.get('description', ''),
|
|
1293
|
+
'inputSchema': tool.get('inputSchema', {
|
|
1294
|
+
'type': 'object',
|
|
1295
|
+
'properties': {}
|
|
1296
|
+
})
|
|
1297
|
+
}
|
|
1298
|
+
|
|
1299
|
+
|
|
1300
|
+
def get_mcp_service() -> MCPConnectionService:
|
|
1301
|
+
"""Get singleton instance of MCP service"""
|
|
1302
|
+
return MCPConnectionService.get_instance()
|
|
1303
|
+
|