signalpilot-ai-internal 0.10.0__py3-none-any.whl → 0.11.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. signalpilot_ai_internal/__init__.py +1 -0
  2. signalpilot_ai_internal/_version.py +1 -1
  3. signalpilot_ai_internal/cache_service.py +22 -21
  4. signalpilot_ai_internal/composio_handlers.py +224 -0
  5. signalpilot_ai_internal/composio_service.py +511 -0
  6. signalpilot_ai_internal/database_config_handlers.py +182 -0
  7. signalpilot_ai_internal/database_config_service.py +166 -0
  8. signalpilot_ai_internal/databricks_schema_service.py +907 -0
  9. signalpilot_ai_internal/file_scanner_service.py +5 -146
  10. signalpilot_ai_internal/handlers.py +388 -9
  11. signalpilot_ai_internal/integrations_config.py +256 -0
  12. signalpilot_ai_internal/log_utils.py +31 -0
  13. signalpilot_ai_internal/mcp_handlers.py +532 -0
  14. signalpilot_ai_internal/mcp_server_manager.py +298 -0
  15. signalpilot_ai_internal/mcp_service.py +1255 -0
  16. signalpilot_ai_internal/oauth_token_store.py +141 -0
  17. signalpilot_ai_internal/schema_search_config.yml +17 -11
  18. signalpilot_ai_internal/schema_search_service.py +85 -4
  19. signalpilot_ai_internal/signalpilot_home.py +961 -0
  20. signalpilot_ai_internal/snowflake_schema_service.py +2 -0
  21. signalpilot_ai_internal/test_dbt_mcp_server.py +180 -0
  22. signalpilot_ai_internal/unified_database_schema_service.py +2 -0
  23. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +15 -48
  24. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +9 -52
  25. {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +7 -1
  26. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.bab318d6caadb055e29c.js +1 -0
  27. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/129.868ca665e6fc225c20a0.js +1 -0
  28. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/179.fd45a2e75d471d0aa3b9.js +7 -0
  29. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.81105a94aa873fc51a94.js +1 -0
  30. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.a002dd4630d3b6404a90.js +1 -0
  31. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.cc6f6ecacd703bcdb468.js +1 -0
  32. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.817a883549d55a0e0576.js +1 -0
  33. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.a4daecd44f1e9364e44a.js +1 -0
  34. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.667225aab294fb5ed161.js +1 -0
  35. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8138af2522716e5a926f.js +1 -0
  36. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.925c73e32f3c07448da0.js +1 -0
  37. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/477.aaa4cc9e87801fb45f5b.js +1 -0
  38. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.370056149a59022b700c.js +1 -0
  39. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/510.868ca665e6fc225c20a0.js +1 -0
  40. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.835f97f7ccfc70ff5c93.js +1 -0
  41. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.6c13335f73de089d6b1e.js +1 -0
  42. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/574.ad2709e91ebcac5bbe68.js +1 -0
  43. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.bddbab8e464fe31f0393.js +1 -0
  44. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.fda1bcdb10497b0a6ade.js +1 -0
  45. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.d046701f475fcbf6697d.js +1 -0
  46. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.c306dffd4cfe8a613d13.js +1 -0
  47. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.e39898b6f336539f228c.js +1 -0
  48. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.77cc0ca10a1860df1b52.js +1 -0
  49. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.4e2850b2af985ed0d378.js +1 -0
  50. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js +2 -0
  51. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.055f50d20a31f3068c72.js +1 -0
  52. {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +47 -29
  53. {signalpilot_ai_internal-0.10.0.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/METADATA +14 -31
  54. signalpilot_ai_internal-0.11.24.dist-info/RECORD +66 -0
  55. signalpilot_ai_internal-0.11.24.dist-info/licenses/LICENSE +7 -0
  56. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -1
  57. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -1
  58. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -1
  59. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/330.af2e9cb5def5ae2b84d5.js +0 -1
  60. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +0 -1
  61. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -1
  62. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -1
  63. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -1
  64. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +0 -1
  65. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -1
  66. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -1
  67. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -2
  68. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.e9acd2e1f9739037f1ab.js +0 -1
  69. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -1
  70. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -1
  71. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -1
  72. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -1
  73. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +0 -1
  74. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/786.770dc7bcab77e14cc135.js +0 -7
  75. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +0 -1
  76. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.25ddd15aca09421d3765.js +0 -1
  77. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -1
  78. signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b05b2f0c9617ba28370d.js +0 -1
  79. signalpilot_ai_internal-0.10.0.dist-info/RECORD +0 -50
  80. signalpilot_ai_internal-0.10.0.dist-info/licenses/LICENSE +0 -29
  81. {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
  82. {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
  83. /signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt → /signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js.LICENSE.txt +0 -0
  84. {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
  85. {signalpilot_ai_internal-0.10.0.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/WHEEL +0 -0
@@ -0,0 +1,1255 @@
1
+ """
2
+ MCP Connection Service - Manages connections to Model Context Protocol servers
3
+ Supports command-based (stdio), HTTP, and SSE connection types
4
+
5
+ IMPORTANT: On Windows, asyncio subprocess support requires ProactorEventLoop, but Jupyter/Tornado
6
+ uses SelectorEventLoop. Instead of using asyncio.create_subprocess_*, we use subprocess.Popen
7
+ with asyncio wrappers for cross-platform compatibility.
8
+ """
9
+ import asyncio
10
+ import json
11
+ import logging
12
+ import uuid
13
+ import traceback
14
+ import sys
15
+ import platform
16
+ import subprocess
17
+ import threading
18
+ import os
19
+ from typing import Dict, List, Optional, Any
20
+ from pathlib import Path
21
+ import aiohttp
22
+ from .signalpilot_home import get_signalpilot_home
23
+ from .oauth_token_store import get_oauth_token_store
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+ # Set logger to DEBUG level for comprehensive debugging
28
+ logger.setLevel(logging.DEBUG)
29
+
30
+ # Check current event loop (for debugging)
31
+ try:
32
+ loop = asyncio.get_running_loop()
33
+ loop_type = type(loop).__name__
34
+ logger.debug(f"[MCP] Current event loop type: {loop_type}")
35
+ if platform.system() == 'Windows' and 'ProactorEventLoop' not in loop_type:
36
+ logger.warning(f"[MCP] Windows using {loop_type} - will use subprocess.Popen instead of asyncio subprocesses")
37
+ except RuntimeError:
38
+ logger.debug(f"[MCP] No running event loop yet")
39
+
40
+
41
+
42
+ class MCPConnectionService:
43
+ """Service for managing MCP server connections and tool calls"""
44
+
45
+ _instance = None
46
+
47
+ # Default whitelist of tools grouped by server type
48
+ DEFAULT_WHITELISTED_TOOLS_BY_SERVER = {
49
+ 'Dbt': [
50
+ 'query_metrics',
51
+ 'get_metrics_compiled_sql',
52
+ 'get_all_models',
53
+ 'get_mart_models',
54
+ 'get_model_details',
55
+ 'get_model_parents',
56
+ 'get_model_children',
57
+ 'get_related_models',
58
+ 'list_metrics',
59
+ 'get_semantic_model_details',
60
+ ],
61
+ 'Google': [
62
+ 'start_google_auth',
63
+ 'search_docs',
64
+ 'get_doc_content',
65
+ 'list_docs_in_folder',
66
+ 'inspect_doc_structure',
67
+ 'read_document_comments',
68
+ 'create_document_comment',
69
+ 'reply_to_document_comment',
70
+ 'resolve_document_comment',
71
+ 'search_drive_files',
72
+ 'list_drive_items',
73
+ 'get_drive_file_content',
74
+ 'get_drive_file_download_url',
75
+ 'list_drive_items_in_folder',
76
+ ],
77
+ 'Slack': [
78
+ 'conversations_search_messages',
79
+ 'conversations_history',
80
+ 'conversations_replies',
81
+ 'channels_list',
82
+ ],
83
+ 'Notion': [
84
+ 'API-post-search',
85
+ 'API-get-block-children',
86
+ 'API-retrieve-a-page',
87
+ 'API-retrieve-a-database',
88
+ 'API-post-database-query',
89
+ ],
90
+ }
91
+
92
+ # Flattened list for backward compatibility and general checks
93
+ DEFAULT_WHITELISTED_TOOLS = [
94
+ tool for tools in DEFAULT_WHITELISTED_TOOLS_BY_SERVER.values() for tool in tools
95
+ ]
96
+
97
+ def __init__(self):
98
+ self.connections: Dict[str, 'MCPConnection'] = {}
99
+ self.tools_cache: Dict[str, List[Dict]] = {}
100
+ self.home_manager = get_signalpilot_home()
101
+
102
+ @classmethod
103
+ def get_instance(cls):
104
+ """Get singleton instance of MCP service"""
105
+ if cls._instance is None:
106
+ cls._instance = MCPConnectionService()
107
+ return cls._instance
108
+
109
+ def _infer_server_type(self, config: Dict) -> str:
110
+ """Infer server type from config structure (Cursor format)"""
111
+ if 'command' in config:
112
+ return 'command'
113
+ elif 'url' in config:
114
+ return 'http' # HTTP/SSE both use 'url'
115
+ else:
116
+ # Fallback: try to infer from old format
117
+ return config.get('type', 'command')
118
+
119
+ def _normalize_config_from_storage(self, server_id: str, config: Dict) -> Dict:
120
+ """Convert Cursor schema format to internal format"""
121
+ # Cursor format: server config may have 'command'/'args' or 'name'/'url'
122
+ # Internal format: always has 'id', 'name', 'type', and type-specific fields
123
+
124
+ normalized = {
125
+ 'id': server_id,
126
+ 'type': self._infer_server_type(config)
127
+ }
128
+
129
+ # Copy all fields
130
+ normalized.update(config)
131
+
132
+ # Ensure name exists (use server_id as fallback for command-based)
133
+ if 'name' not in normalized:
134
+ if normalized['type'] == 'command':
135
+ normalized['name'] = server_id
136
+ else:
137
+ normalized['name'] = config.get('name', server_id)
138
+
139
+ # Handle enabled field (defaults to True)
140
+ if 'enabled' not in normalized:
141
+ normalized['enabled'] = True
142
+
143
+ return normalized
144
+
145
+ def _normalize_config_for_storage(self, config: Dict) -> Dict:
146
+ """Convert internal format to Cursor schema format"""
147
+ # Remove internal-only fields
148
+ storage_config = {}
149
+
150
+ # Copy relevant fields based on type
151
+ server_type = config.get('type', 'command')
152
+
153
+ if server_type == 'command':
154
+ if 'command' in config:
155
+ storage_config['command'] = config['command']
156
+ if 'args' in config:
157
+ storage_config['args'] = config['args']
158
+ # Only store env if NOT an OAuth integration (OAuth tokens are stored securely elsewhere)
159
+ if 'env' in config and not config.get('isOAuthIntegration', False):
160
+ storage_config['env'] = config['env']
161
+ else: # http/sse
162
+ if 'name' in config:
163
+ storage_config['name'] = config['name']
164
+ if 'url' in config:
165
+ storage_config['url'] = config['url']
166
+ if 'token' in config:
167
+ storage_config['token'] = config['token']
168
+
169
+ # Add enabled if not default (True)
170
+ enabled = config.get('enabled', True)
171
+ if not enabled:
172
+ storage_config['enabled'] = False
173
+
174
+ # Add enabledTools if present
175
+ if 'enabledTools' in config:
176
+ storage_config['enabledTools'] = config['enabledTools']
177
+
178
+ # Mark as OAuth integration if set (tokens stored securely elsewhere)
179
+ if config.get('isOAuthIntegration', False):
180
+ storage_config['isOAuthIntegration'] = True
181
+
182
+ return storage_config
183
+
184
+ def save_server_config(self, server_config: Dict) -> Dict:
185
+ """Save MCP server configuration to JSON file (Cursor format)"""
186
+ try:
187
+ # Ensure server has an ID
188
+ if 'id' not in server_config:
189
+ server_config['id'] = str(uuid.uuid4())
190
+
191
+ server_id = server_config['id']
192
+
193
+ # Convert to storage format and save
194
+ storage_config = self._normalize_config_for_storage(server_config)
195
+
196
+ if not self.home_manager.set_mcp_server(server_id, storage_config):
197
+ raise RuntimeError(f"Failed to write MCP config")
198
+
199
+ logger.info(f"Saved MCP server config: {server_config.get('name', server_id)}")
200
+ return server_config
201
+ except Exception as e:
202
+ logger.error(f"Error saving MCP server config: {e}")
203
+ logger.error(f"Stack trace:\n{traceback.format_exc()}")
204
+ raise
205
+
206
+ def load_all_configs(self) -> Dict[str, Dict]:
207
+ """Load all MCP server configurations from JSON file (Cursor format)"""
208
+ try:
209
+ # Get all MCP servers from home manager
210
+ mcp_servers = self.home_manager.get_mcp_servers()
211
+
212
+ # Convert from Cursor format to internal format
213
+ configs = {}
214
+ for server_id, server_config in mcp_servers.items():
215
+ configs[server_id] = self._normalize_config_from_storage(server_id, server_config)
216
+
217
+ return configs
218
+ except Exception as e:
219
+ logger.error(f"Error loading MCP configs: {e}")
220
+ logger.error(f"Stack trace:\n{traceback.format_exc()}")
221
+ return {}
222
+
223
+ def get_server_config(self, server_id: str) -> Optional[Dict]:
224
+ """Get a specific server configuration"""
225
+ configs = self.load_all_configs()
226
+ return configs.get(server_id)
227
+
228
+ def delete_server_config(self, server_id: str) -> bool:
229
+ """Delete a server configuration from JSON file"""
230
+ try:
231
+ # Remove from home manager
232
+ if self.home_manager.remove_mcp_server(server_id):
233
+ # Also disconnect if connected
234
+ if server_id in self.connections:
235
+ asyncio.create_task(self.disconnect(server_id))
236
+
237
+ logger.info(f"Deleted MCP server config: {server_id}")
238
+ return True
239
+ return False
240
+ except Exception as e:
241
+ logger.error(f"Error deleting MCP server config: {e}")
242
+ logger.error(f"Stack trace:\n{traceback.format_exc()}")
243
+ return False
244
+
245
+ async def connect(self, server_id: str) -> Dict:
246
+ """Connect to an MCP server"""
247
+ try:
248
+ logger.debug(f"[MCP] Attempting to connect to server {server_id}")
249
+ config = self.get_server_config(server_id)
250
+ if not config:
251
+ error_msg = f"Server configuration not found: {server_id}"
252
+ logger.error(f"[MCP] {error_msg}")
253
+ raise ValueError(error_msg)
254
+
255
+ # Check if server is enabled
256
+ if not config.get('enabled', True):
257
+ error_msg = f"Server {server_id} is disabled"
258
+ logger.warning(f"[MCP] {error_msg}")
259
+ raise ValueError(error_msg)
260
+
261
+ logger.debug(f"[MCP] Server config loaded: name={config.get('name')}, type={config.get('type')}")
262
+
263
+ # Check if already connected
264
+ if server_id in self.connections:
265
+ connection = self.connections[server_id]
266
+ if connection.is_connected():
267
+ logger.info(f"[MCP] Already connected to MCP server: {config.get('name', server_id)}")
268
+ return self._get_server_info(server_id, config)
269
+ else:
270
+ logger.warning(f"[MCP] Stale connection found for {server_id}, removing")
271
+ del self.connections[server_id]
272
+
273
+ # Determine connection type (infer if not set)
274
+ connection_type = config.get('type')
275
+ if not connection_type:
276
+ connection_type = self._infer_server_type(config)
277
+ config['type'] = connection_type
278
+
279
+ logger.debug(f"[MCP] Connection type: {connection_type}")
280
+
281
+ if connection_type == 'command':
282
+ connection = MCPCommandConnection(server_id, config)
283
+ elif connection_type in ['http', 'sse']:
284
+ connection = MCPHTTPConnection(server_id, config)
285
+ else:
286
+ error_msg = f"Unknown connection type: {connection_type}"
287
+ logger.error(f"[MCP] {error_msg}")
288
+ raise ValueError(error_msg)
289
+
290
+ # Connect and store
291
+ logger.debug(f"[MCP] Starting connection to {config.get('name')}...")
292
+ await connection.connect()
293
+ self.connections[server_id] = connection
294
+ logger.debug(f"[MCP] Connection established, listing tools...")
295
+
296
+ # List and cache tools
297
+ tools = await connection.list_tools()
298
+ self.tools_cache[server_id] = tools
299
+
300
+ # Auto-whitelist tools on first connection or ensure default whitelisted tools are enabled
301
+ tool_names = [tool['name'] for tool in tools]
302
+ self._ensure_default_whitelisted_tools(server_id, config, tool_names)
303
+
304
+ logger.info(f"[MCP] ✓ Connected to MCP server: {config['name']} ({len(tools)} tools)")
305
+
306
+ # Ensure default whitelisted tools are enabled (final check)
307
+ self._ensure_default_whitelisted_tools(server_id, config, tool_names)
308
+
309
+ return self._get_server_info(server_id, config)
310
+ except ValueError as e:
311
+ # Re-raise ValueError with original message
312
+ logger.error(f"[MCP] Configuration error for {server_id}: {str(e)}")
313
+ logger.error(f"[MCP] Stack trace:\n{traceback.format_exc()}")
314
+ raise
315
+ except Exception as e:
316
+ error_msg = f"Failed to connect to MCP server {server_id}: {type(e).__name__}: {str(e)}"
317
+ logger.error(f"[MCP] {error_msg}")
318
+ logger.error(f"[MCP] Full stack trace:\n{traceback.format_exc()}")
319
+ # Include the original exception type in the error message
320
+ raise RuntimeError(error_msg) from e
321
+
322
+ async def disconnect(self, server_id: str) -> bool:
323
+ """Disconnect from an MCP server"""
324
+ try:
325
+ if server_id in self.connections:
326
+ connection = self.connections[server_id]
327
+ await connection.disconnect()
328
+ del self.connections[server_id]
329
+
330
+ if server_id in self.tools_cache:
331
+ del self.tools_cache[server_id]
332
+
333
+ logger.info(f"Disconnected from MCP server: {server_id}")
334
+ return True
335
+ return False
336
+ except Exception as e:
337
+ logger.error(f"Error disconnecting from MCP server {server_id}: {e}")
338
+ return False
339
+
340
+ async def list_tools(self, server_id: str) -> List[Dict]:
341
+ """List tools available from a connected MCP server"""
342
+ try:
343
+ # Check cache first
344
+ if server_id in self.tools_cache:
345
+ tools = self.tools_cache[server_id]
346
+ # Ensure default whitelisted tools are enabled even when using cache
347
+ config = self.get_server_config(server_id)
348
+ if config:
349
+ tool_names = [tool['name'] for tool in tools]
350
+ self._ensure_default_whitelisted_tools(server_id, config, tool_names)
351
+ return tools
352
+
353
+ # Otherwise fetch from connection
354
+ if server_id not in self.connections:
355
+ raise ValueError(f"Not connected to server: {server_id}")
356
+
357
+ connection = self.connections[server_id]
358
+ tools = await connection.list_tools()
359
+ self.tools_cache[server_id] = tools
360
+
361
+ # Ensure default whitelisted tools are enabled
362
+ config = self.get_server_config(server_id)
363
+ if config:
364
+ tool_names = [tool['name'] for tool in tools]
365
+ self._ensure_default_whitelisted_tools(server_id, config, tool_names)
366
+
367
+ return tools
368
+ except Exception as e:
369
+ logger.error(f"Error listing tools from MCP server {server_id}: {e}")
370
+ raise
371
+
372
+ async def get_all_tools(self) -> List[Dict]:
373
+ """Get all tools from all connected servers"""
374
+ all_tools = []
375
+ for server_id in self.connections.keys():
376
+ try:
377
+ tools = await self.list_tools(server_id)
378
+ config = self.get_server_config(server_id)
379
+
380
+ # Add server info to each tool
381
+ for tool in tools:
382
+ tool['serverId'] = server_id
383
+ tool['serverName'] = config.get('name', server_id)
384
+
385
+ all_tools.extend(tools)
386
+ except Exception as e:
387
+ logger.error(f"Error getting tools from server {server_id}: {e}")
388
+
389
+ return all_tools
390
+
391
+ async def call_tool(self, server_id: str, tool_name: str, arguments: Dict) -> Any:
392
+ """Call a tool on an MCP server"""
393
+ try:
394
+ if server_id not in self.connections:
395
+ raise ValueError(f"Not connected to server: {server_id}")
396
+
397
+ connection = self.connections[server_id]
398
+ result = await connection.call_tool(tool_name, arguments)
399
+
400
+ logger.info(f"Called tool {tool_name} on server {server_id}")
401
+ return result
402
+ except Exception as e:
403
+ logger.error(f"Error calling tool {tool_name} on server {server_id}: {e}")
404
+ raise
405
+
406
+ def get_connection_status(self, server_id: str) -> str:
407
+ """Get connection status for a server"""
408
+ if server_id in self.connections:
409
+ return 'connected' if self.connections[server_id].is_connected() else 'error'
410
+ return 'disconnected'
411
+
412
+ def _get_server_info(self, server_id: str, config: Dict) -> Dict:
413
+ """Get server information for response"""
414
+ tools = self.tools_cache.get(server_id, [])
415
+ enabled_tools = config.get('enabledTools', [])
416
+
417
+ # Check if this is an OAuth integration
418
+ is_oauth = config.get('isOAuthIntegration', False)
419
+ if not is_oauth:
420
+ # Also check token store in case the flag wasn't set
421
+ token_store = get_oauth_token_store()
422
+ is_oauth = token_store.is_oauth_server(server_id)
423
+
424
+ result = {
425
+ 'serverId': server_id,
426
+ 'name': config.get('name', server_id),
427
+ 'status': self.get_connection_status(server_id),
428
+ 'type': config.get('type', 'command'),
429
+ 'toolCount': len(tools),
430
+ 'tools': tools,
431
+ 'enabled': config.get('enabled', True),
432
+ 'enabledTools': enabled_tools
433
+ }
434
+
435
+ # Add OAuth info if it's an OAuth integration
436
+ if is_oauth:
437
+ result['isOAuthIntegration'] = True
438
+ token_store = get_oauth_token_store()
439
+ result['integrationId'] = token_store.get_integration_id(server_id)
440
+
441
+ return result
442
+
443
+ def enable_server(self, server_id: str) -> bool:
444
+ """Enable an MCP server"""
445
+ try:
446
+ config = self.get_server_config(server_id)
447
+ if not config:
448
+ return False
449
+
450
+ config['enabled'] = True
451
+ self.save_server_config(config)
452
+ logger.info(f"Enabled MCP server: {server_id}")
453
+ return True
454
+ except Exception as e:
455
+ logger.error(f"Error enabling MCP server {server_id}: {e}")
456
+ return False
457
+
458
+ def disable_server(self, server_id: str) -> bool:
459
+ """Disable an MCP server and disconnect if connected"""
460
+ try:
461
+ config = self.get_server_config(server_id)
462
+ if not config:
463
+ return False
464
+
465
+ config['enabled'] = False
466
+ self.save_server_config(config)
467
+
468
+ # Disconnect if connected
469
+ if server_id in self.connections:
470
+ asyncio.create_task(self.disconnect(server_id))
471
+
472
+ logger.info(f"Disabled MCP server: {server_id}")
473
+ return True
474
+ except Exception as e:
475
+ logger.error(f"Error disabling MCP server {server_id}: {e}")
476
+ return False
477
+
478
+ def _ensure_default_whitelisted_tools(self, server_id: str, config: Dict, available_tool_names: List[str]) -> None:
479
+ """Ensure default whitelisted tools are always enabled for a server"""
480
+ try:
481
+ existing_enabled = set(config.get('enabledTools', []))
482
+ available_tools = set(available_tool_names)
483
+
484
+ # Get server name to determine which tools to whitelist
485
+ server_name = config.get('name', server_id)
486
+
487
+ # Check if this server should use selective whitelisting
488
+ # Only apply selective whitelisting for servers named "Notion", "Dbt", "Slack", or "Google"
489
+ should_use_selective_whitelist = any(
490
+ server_name.startswith(prefix) for prefix in ['Notion', 'Dbt', 'Slack', 'Google']
491
+ )
492
+
493
+ if should_use_selective_whitelist:
494
+ # Determine which server type based on name prefix
495
+ server_type = None
496
+ for prefix in ['Notion', 'Dbt', 'Slack', 'Google']:
497
+ if server_name.startswith(prefix):
498
+ server_type = prefix
499
+ break
500
+
501
+ # Get default whitelisted tools for this server type
502
+ if server_type and server_type in self.DEFAULT_WHITELISTED_TOOLS_BY_SERVER:
503
+ default_whitelisted = set(self.DEFAULT_WHITELISTED_TOOLS_BY_SERVER[server_type])
504
+ else:
505
+ # Fallback: use all default whitelisted tools
506
+ default_whitelisted = set(self.DEFAULT_WHITELISTED_TOOLS)
507
+ else:
508
+ # For other servers, use all default whitelisted tools
509
+ default_whitelisted = set(self.DEFAULT_WHITELISTED_TOOLS)
510
+
511
+ # Find default whitelisted tools that are available
512
+ available_default_tools = default_whitelisted & available_tools
513
+
514
+ if 'enabledTools' not in config or not config.get('enabledTools'):
515
+ # First connection
516
+ if should_use_selective_whitelist:
517
+ # Only enable default whitelisted tools for this server type
518
+ config['enabledTools'] = list(available_default_tools)
519
+ self.save_server_config(config)
520
+ logger.info(f"[MCP] Auto-whitelisted {len(available_default_tools)} default whitelisted tools for {server_id} ({server_type}) (out of {len(available_tool_names)} available): {sorted(available_default_tools)}")
521
+ else:
522
+ # Enable all tools for other servers
523
+ config['enabledTools'] = list(available_tools)
524
+ self.save_server_config(config)
525
+ logger.info(f"[MCP] Auto-whitelisted all {len(available_tools)} tools for {server_id} (not a selective whitelist server)")
526
+ else:
527
+ # On reconnect or update, ensure all default whitelisted tools are enabled
528
+ # This will re-enable any default whitelisted tools that were disabled
529
+ final_enabled = existing_enabled | available_default_tools
530
+
531
+ if final_enabled != existing_enabled:
532
+ tools_added = final_enabled - existing_enabled
533
+ config['enabledTools'] = list(final_enabled)
534
+ self.save_server_config(config)
535
+ logger.info(f"[MCP] Auto-enabled {len(tools_added)} default whitelisted tools for {server_id}: {sorted(tools_added)}")
536
+ except Exception as e:
537
+ logger.error(f"Error ensuring default whitelisted tools for {server_id}: {e}")
538
+ logger.error(f"Stack trace:\n{traceback.format_exc()}")
539
+
540
+ def update_tool_enabled(self, server_id: str, tool_name: str, enabled: bool) -> bool:
541
+ """Update enabled/disabled state for a specific tool"""
542
+ try:
543
+ config = self.get_server_config(server_id)
544
+ if not config:
545
+ return False
546
+
547
+ # Check if this is a default whitelisted tool
548
+ is_default_whitelisted = tool_name in self.DEFAULT_WHITELISTED_TOOLS
549
+
550
+ # If trying to disable a default whitelisted tool, warn but allow it
551
+ # (it will be re-enabled on next reconnect)
552
+ if not enabled and is_default_whitelisted:
553
+ logger.warning(f"Attempting to disable default whitelisted tool {tool_name} for server {server_id}. It will be re-enabled on reconnect.")
554
+
555
+ # Get current enabled tools, or initialize with all available tools if not set
556
+ enabled_tools = config.get('enabledTools')
557
+ if enabled_tools is None:
558
+ # If enabledTools is not set, initialize with all available tools from cache
559
+ if server_id in self.tools_cache:
560
+ tool_names = [tool['name'] for tool in self.tools_cache[server_id]]
561
+ enabled_tools = tool_names
562
+ logger.info(f"Initializing enabledTools for {server_id} with {len(tool_names)} tools")
563
+ else:
564
+ # If tools not cached, start with empty list (will be populated on next connect)
565
+ enabled_tools = []
566
+ logger.warning(f"No tools cached for {server_id}, starting with empty enabledTools")
567
+
568
+ enabled_tools_set = set(enabled_tools)
569
+
570
+ if enabled:
571
+ enabled_tools_set.add(tool_name)
572
+ else:
573
+ # Allow disabling even default whitelisted tools (they'll be re-enabled on reconnect)
574
+ enabled_tools_set.discard(tool_name)
575
+
576
+ config['enabledTools'] = list(enabled_tools_set)
577
+ self.save_server_config(config)
578
+
579
+ logger.info(f"{'Enabled' if enabled else 'Disabled'} tool {tool_name} for server {server_id}. Enabled tools: {len(enabled_tools_set)}")
580
+ return True
581
+ except Exception as e:
582
+ logger.error(f"Error updating tool enabled state for {server_id}/{tool_name}: {e}")
583
+ logger.error(f"Stack trace:\n{traceback.format_exc()}")
584
+ return False
585
+
586
+ async def connect_all_enabled(self) -> Dict[str, Dict]:
587
+ """Connect to all enabled MCP servers"""
588
+ results = {}
589
+ configs = self.load_all_configs()
590
+
591
+ for server_id, config in configs.items():
592
+ if config.get('enabled', True):
593
+ try:
594
+ logger.info(f"[MCP] Auto-connecting enabled server: {server_id}")
595
+ server_info = await self.connect(server_id)
596
+ results[server_id] = {'success': True, 'server': server_info}
597
+ except Exception as e:
598
+ logger.error(f"[MCP] Failed to auto-connect {server_id}: {e}")
599
+ results[server_id] = {'success': False, 'error': str(e)}
600
+ else:
601
+ logger.debug(f"[MCP] Skipping disabled server: {server_id}")
602
+
603
+ return results
604
+
605
+ def update_config_file(self, new_json_content: str) -> Dict[str, Any]:
606
+ """Update the entire config file and apply diff-based changes"""
607
+ try:
608
+ # Parse new JSON
609
+ try:
610
+ new_data = json.loads(new_json_content)
611
+ except json.JSONDecodeError as e:
612
+ raise ValueError(f"Invalid JSON: {e}")
613
+
614
+ if 'mcpServers' not in new_data:
615
+ raise ValueError("JSON must contain 'mcpServers' object")
616
+
617
+ new_servers = new_data.get('mcpServers', {})
618
+
619
+ # Load current configs
620
+ old_configs = self.load_all_configs()
621
+ old_server_ids = set(old_configs.keys())
622
+ new_server_ids = set(new_servers.keys())
623
+
624
+ changes = {
625
+ 'added': [],
626
+ 'removed': [],
627
+ 'modified': [],
628
+ 'enabled_changes': []
629
+ }
630
+
631
+ # Detect added servers
632
+ for server_id in new_server_ids - old_server_ids:
633
+ changes['added'].append(server_id)
634
+
635
+ # Detect removed servers
636
+ for server_id in old_server_ids - new_server_ids:
637
+ changes['removed'].append(server_id)
638
+ # Disconnect removed servers
639
+ if server_id in self.connections:
640
+ asyncio.create_task(self.disconnect(server_id))
641
+
642
+ # Detect modified servers
643
+ for server_id in new_server_ids & old_server_ids:
644
+ old_config = old_configs[server_id]
645
+ new_storage_config = new_servers[server_id]
646
+ new_config = self._normalize_config_from_storage(server_id, new_storage_config)
647
+
648
+ # Check if enabled status changed
649
+ old_enabled = old_config.get('enabled', True)
650
+ new_enabled = new_config.get('enabled', True)
651
+
652
+ if old_enabled != new_enabled:
653
+ changes['enabled_changes'].append({
654
+ 'server_id': server_id,
655
+ 'old_enabled': old_enabled,
656
+ 'new_enabled': new_enabled
657
+ })
658
+
659
+ # Check if config changed (simple comparison)
660
+ old_storage = self._normalize_config_for_storage(old_config)
661
+ if old_storage != new_storage_config:
662
+ changes['modified'].append(server_id)
663
+
664
+ # Write new config to file using home manager
665
+ if not self.home_manager.write_mcp_config(new_data):
666
+ raise RuntimeError(f"Failed to write updated config")
667
+
668
+ # Apply changes asynchronously
669
+ async def apply_changes():
670
+ # Connect newly added enabled servers
671
+ for server_id in changes['added']:
672
+ new_config = self._normalize_config_from_storage(server_id, new_servers[server_id])
673
+ if new_config.get('enabled', True):
674
+ try:
675
+ await self.connect(server_id)
676
+ except Exception as e:
677
+ logger.error(f"Failed to connect added server {server_id}: {e}")
678
+
679
+ # Handle enabled/disabled changes
680
+ for change in changes['enabled_changes']:
681
+ server_id = change['server_id']
682
+ if change['new_enabled']:
683
+ try:
684
+ await self.connect(server_id)
685
+ except Exception as e:
686
+ logger.error(f"Failed to connect enabled server {server_id}: {e}")
687
+ else:
688
+ await self.disconnect(server_id)
689
+
690
+ # Handle modified servers (disconnect and reconnect if enabled)
691
+ for server_id in changes['modified']:
692
+ new_config = self._normalize_config_from_storage(server_id, new_servers[server_id])
693
+ # Disconnect old connection
694
+ if server_id in self.connections:
695
+ await self.disconnect(server_id)
696
+ # Reconnect if enabled
697
+ if new_config.get('enabled', True):
698
+ try:
699
+ await self.connect(server_id)
700
+ except Exception as e:
701
+ logger.error(f"Failed to reconnect modified server {server_id}: {e}")
702
+
703
+ # Schedule async changes
704
+ asyncio.create_task(apply_changes())
705
+
706
+ logger.info(f"[MCP] Config file updated: {len(changes['added'])} added, {len(changes['removed'])} removed, {len(changes['modified'])} modified")
707
+
708
+ return {
709
+ 'success': True,
710
+ 'changes': changes
711
+ }
712
+
713
+ except Exception as e:
714
+ logger.error(f"Error updating config file: {e}")
715
+ logger.error(f"Stack trace:\n{traceback.format_exc()}")
716
+ raise
717
+
718
+ def get_config_file_content(self) -> str:
719
+ """Get the raw JSON file content"""
720
+ try:
721
+ cursor_data = self.home_manager.read_mcp_config()
722
+ return json.dumps(cursor_data, indent=2)
723
+ except Exception as e:
724
+ logger.error(f"Error reading config file: {e}")
725
+ return json.dumps({'mcpServers': {}}, indent=2)
726
+
727
+
728
+ class MCPConnection:
729
+ """Base class for MCP connections"""
730
+
731
+ def __init__(self, server_id: str, config: Dict):
732
+ self.server_id = server_id
733
+ self.config = config
734
+ self.connected = False
735
+
736
+ async def connect(self):
737
+ """Connect to the MCP server"""
738
+ raise NotImplementedError
739
+
740
+ async def disconnect(self):
741
+ """Disconnect from the MCP server"""
742
+ raise NotImplementedError
743
+
744
+ async def list_tools(self) -> List[Dict]:
745
+ """List available tools"""
746
+ raise NotImplementedError
747
+
748
+ async def call_tool(self, tool_name: str, arguments: Dict) -> Any:
749
+ """Call a tool"""
750
+ raise NotImplementedError
751
+
752
+ def is_connected(self) -> bool:
753
+ """Check if connected"""
754
+ return self.connected
755
+
756
+
757
+ class MCPCommandConnection(MCPConnection):
758
+ """Command-based MCP connection using stdio"""
759
+
760
+ def __init__(self, server_id: str, config: Dict):
761
+ super().__init__(server_id, config)
762
+ self.process = None
763
+ self.request_id = 0
764
+ self._stdout_queue = asyncio.Queue()
765
+ self._stderr_queue = asyncio.Queue()
766
+ self._reader_tasks = []
767
+
768
+ async def connect(self):
769
+ """Start subprocess and establish stdio connection using subprocess.Popen (Windows-compatible)"""
770
+ try:
771
+ command = self.config.get('command')
772
+ args = self.config.get('args', [])
773
+ env = self.config.get('env', {})
774
+
775
+ if not command:
776
+ raise ValueError("Command is required for command-based MCP")
777
+
778
+ # Check if this is an OAuth integration and inject tokens from secure store
779
+ is_oauth = self.config.get('isOAuthIntegration', False)
780
+ token_store = get_oauth_token_store()
781
+
782
+ # Also check token store directly in case flag is not set
783
+ if not is_oauth:
784
+ is_oauth = token_store.is_oauth_server(self.server_id)
785
+
786
+ if is_oauth:
787
+ oauth_env = token_store.get_tokens(self.server_id)
788
+ if oauth_env:
789
+ logger.debug(f"[MCP] Injecting OAuth tokens for server {self.server_id}")
790
+ env = {**env, **oauth_env} # OAuth tokens override any existing env vars
791
+ else:
792
+ logger.warning(f"[MCP] Server {self.server_id} is marked as OAuth but no tokens found in store")
793
+
794
+ # Merge environment variables
795
+ import os
796
+ import shlex
797
+ full_env = os.environ.copy()
798
+ full_env.update(env)
799
+
800
+ is_windows = platform.system() == 'Windows'
801
+ logger.debug(f"[MCP] Platform: {platform.system()} (Windows={is_windows})")
802
+ logger.debug(f"[MCP] Python version: {sys.version}")
803
+ logger.debug(f"[MCP] Working directory: {os.getcwd()}")
804
+
805
+ # Build command as a list (works on all platforms, including Windows)
806
+ # This is the proper way to handle subprocess on Windows
807
+ cmd_list = [command] + args
808
+
809
+ logger.info(f"[MCP] Executing command: {' '.join(cmd_list)}")
810
+ logger.debug(f"[MCP] Command as list: {cmd_list}")
811
+
812
+ # Log PATH for debugging
813
+ path_var = full_env.get('PATH', '')
814
+ if is_windows:
815
+ # On Windows, also check Path and path (case-insensitive)
816
+ for key in full_env.keys():
817
+ if key.lower() == 'path':
818
+ path_var = full_env[key]
819
+ break
820
+
821
+ if path_var:
822
+ path_entries = path_var.split(os.pathsep)
823
+ logger.debug(f"[MCP] PATH has {len(path_entries)} entries:")
824
+ for i, entry in enumerate(path_entries[:5]): # Log first 5 entries
825
+ logger.debug(f"[MCP] [{i}] {entry}")
826
+ if len(path_entries) > 5:
827
+ logger.debug(f"[MCP] ... and {len(path_entries) - 5} more entries")
828
+ else:
829
+ logger.warning(f"[MCP] PATH environment variable not found!")
830
+
831
+ # Log custom environment variables (helpful for debugging)
832
+ if env:
833
+ logger.debug(f"[MCP] Custom env vars: {list(env.keys())}")
834
+ for key, value in env.items():
835
+ # Log first 100 chars of each env var value
836
+ value_preview = str(value)[:100] + ('...' if len(str(value)) > 100 else '')
837
+ logger.debug(f"[MCP] {key}={value_preview}")
838
+
839
+ # Create subprocess using subprocess.Popen (works on all platforms, all event loops)
840
+ # This is more reliable than asyncio.create_subprocess_* which requires ProactorEventLoop on Windows
841
+ logger.debug(f"[MCP] Creating subprocess using subprocess.Popen (cross-platform compatible)")
842
+
843
+ try:
844
+ # Use subprocess.Popen which works with any event loop
845
+ self.process = subprocess.Popen(
846
+ [command] + args,
847
+ stdin=subprocess.PIPE,
848
+ stdout=subprocess.PIPE,
849
+ stderr=subprocess.PIPE,
850
+ env=full_env,
851
+ # Important: on Windows, don't create a visible console window
852
+ creationflags=subprocess.CREATE_NO_WINDOW if is_windows else 0
853
+ )
854
+ logger.debug(f"[MCP] Subprocess created with PID: {self.process.pid}")
855
+
856
+ # Start background tasks to read stdout/stderr asynchronously
857
+ self._start_reader_tasks()
858
+
859
+ except FileNotFoundError as e:
860
+ error_msg = f"Command not found: {command}. Make sure the executable is in PATH or provide full path."
861
+ logger.error(f"[MCP] {error_msg}")
862
+ logger.error(f"[MCP] FileNotFoundError details: {e}")
863
+
864
+ # On Windows, try with .exe, .cmd, .bat extensions
865
+ if is_windows and not any(command.endswith(ext) for ext in ['.exe', '.cmd', '.bat']):
866
+ logger.debug(f"[MCP] Windows: Trying with common executable extensions...")
867
+ for ext in ['.exe', '.cmd', '.bat']:
868
+ try:
869
+ logger.debug(f"[MCP] Trying: {command}{ext}")
870
+ self.process = subprocess.Popen(
871
+ [command + ext] + args,
872
+ stdin=subprocess.PIPE,
873
+ stdout=subprocess.PIPE,
874
+ stderr=subprocess.PIPE,
875
+ env=full_env,
876
+ creationflags=subprocess.CREATE_NO_WINDOW if is_windows else 0
877
+ )
878
+ logger.debug(f"[MCP] Success with {command}{ext}, PID: {self.process.pid}")
879
+ self._start_reader_tasks()
880
+ break
881
+ except FileNotFoundError:
882
+ continue
883
+ else:
884
+ # None of the extensions worked
885
+ raise RuntimeError(error_msg) from e
886
+ else:
887
+ raise RuntimeError(error_msg) from e
888
+ except Exception as e:
889
+ error_msg = f"Failed to start subprocess: {type(e).__name__}: {str(e)}"
890
+ logger.error(f"[MCP] {error_msg}")
891
+ logger.error(f"[MCP] Stack trace:\n{traceback.format_exc()}")
892
+ raise RuntimeError(error_msg) from e
893
+
894
+ # Give the process a moment to start
895
+ await asyncio.sleep(0.5)
896
+
897
+ # Check if process is still running
898
+ poll_result = self.process.poll()
899
+ if poll_result is not None:
900
+ # Process already exited, try to capture stderr
901
+ stderr_text = "No error output"
902
+ try:
903
+ # With subprocess.Popen, we need to read synchronously from stderr
904
+ # But do it in a non-blocking way via a thread
905
+ loop = asyncio.get_event_loop()
906
+ stderr_data = await loop.run_in_executor(None, self.process.stderr.read)
907
+ if stderr_data:
908
+ stderr_text = stderr_data.decode('utf-8', errors='replace')
909
+ except Exception as e:
910
+ stderr_text = f"Could not read stderr: {e}"
911
+
912
+ error_msg = f"MCP server exited immediately with code {poll_result}. Error output: {stderr_text}"
913
+ logger.error(f"[MCP] {error_msg}")
914
+ raise RuntimeError(error_msg)
915
+
916
+ logger.debug(f"[MCP] Process started successfully, sending initialization request...")
917
+ self.connected = True
918
+
919
+ # Send initialization request
920
+ try:
921
+ await self._send_request('initialize', {
922
+ 'protocolVersion': '2024-11-05',
923
+ 'capabilities': {},
924
+ 'clientInfo': {
925
+ 'name': 'signalpilot-ai-internal',
926
+ 'version': '0.10.1'
927
+ }
928
+ })
929
+ logger.debug(f"[MCP] Initialization successful")
930
+ except Exception as e:
931
+ error_msg = f"Failed to initialize MCP protocol: {type(e).__name__}: {str(e)}"
932
+ logger.error(f"[MCP] {error_msg}")
933
+ raise RuntimeError(error_msg) from e
934
+
935
+ except Exception as e:
936
+ logger.error(f"[MCP] Error starting MCP command: {type(e).__name__}: {str(e)}")
937
+ logger.error(f"[MCP] Full stack trace:\n{traceback.format_exc()}")
938
+ self.connected = False
939
+
940
+ # Try to capture stderr if process exists
941
+ if self.process and hasattr(self.process, 'stderr') and self.process.stderr:
942
+ try:
943
+ # Use run_in_executor for sync read
944
+ loop = asyncio.get_event_loop()
945
+ stderr_data = await asyncio.wait_for(
946
+ loop.run_in_executor(None, lambda: self.process.stderr.read(4096)),
947
+ timeout=1.0
948
+ )
949
+ if stderr_data:
950
+ stderr_text = stderr_data.decode('utf-8', errors='replace')
951
+ logger.error(f"[MCP] Server stderr output:\n{stderr_text}")
952
+ # Re-raise with stderr included
953
+ raise RuntimeError(f"{str(e)}\n\nServer error output:\n{stderr_text}") from e
954
+ except asyncio.TimeoutError:
955
+ logger.warning(f"[MCP] Timeout reading stderr")
956
+ except Exception as stderr_e:
957
+ logger.warning(f"[MCP] Could not read stderr: {stderr_e}")
958
+
959
+ raise
960
+
961
+ def _start_reader_tasks(self):
962
+ """Start background tasks to read from stdout/stderr"""
963
+ loop = asyncio.get_event_loop()
964
+
965
+ # Start stdout reader
966
+ stdout_task = loop.create_task(self._read_stream(self.process.stdout, self._stdout_queue, 'stdout'))
967
+ stderr_task = loop.create_task(self._read_stream(self.process.stderr, self._stderr_queue, 'stderr'))
968
+
969
+ self._reader_tasks = [stdout_task, stderr_task]
970
+
971
+ async def _read_stream(self, stream, queue, name):
972
+ """Read from a stream in a background thread and put lines into a queue"""
973
+ loop = asyncio.get_event_loop()
974
+ try:
975
+ while True:
976
+ # Read line in executor to avoid blocking
977
+ line = await loop.run_in_executor(None, stream.readline)
978
+ if not line:
979
+ logger.debug(f"[MCP] {name} stream closed")
980
+ break
981
+ await queue.put(line)
982
+ except Exception as e:
983
+ logger.error(f"[MCP] Error reading {name}: {e}")
984
+
985
+ async def disconnect(self):
986
+ """Terminate subprocess"""
987
+ if self.process:
988
+ try:
989
+ # Cancel reader tasks
990
+ for task in self._reader_tasks:
991
+ task.cancel()
992
+
993
+ # Terminate process
994
+ self.process.terminate()
995
+
996
+ # Wait for process to exit (with timeout)
997
+ loop = asyncio.get_event_loop()
998
+ try:
999
+ await asyncio.wait_for(
1000
+ loop.run_in_executor(None, self.process.wait),
1001
+ timeout=5.0
1002
+ )
1003
+ except asyncio.TimeoutError:
1004
+ logger.warning(f"[MCP] Process did not terminate, killing...")
1005
+ self.process.kill()
1006
+ await loop.run_in_executor(None, self.process.wait)
1007
+ except Exception as e:
1008
+ logger.error(f"[MCP] Error during disconnect: {e}")
1009
+ finally:
1010
+ self.connected = False
1011
+
1012
+ def is_connected(self) -> bool:
1013
+ """Check if connected - also verify subprocess is still alive"""
1014
+ if not self.connected:
1015
+ return False
1016
+ # Check if subprocess is still running
1017
+ if self.process is None:
1018
+ return False
1019
+ poll_result = self.process.poll()
1020
+ if poll_result is not None:
1021
+ # Process has exited, update connected flag
1022
+ logger.warning(f"[MCP] Process for {self.server_id} has exited with code {poll_result}")
1023
+ self.connected = False
1024
+ return False
1025
+ return True
1026
+
1027
+ async def list_tools(self) -> List[Dict]:
1028
+ """List tools via JSON-RPC"""
1029
+ try:
1030
+ response = await self._send_request('tools/list', {})
1031
+ tools = response.get('result', {}).get('tools', [])
1032
+
1033
+ # Convert to standard format
1034
+ return [self._convert_tool_schema(tool) for tool in tools]
1035
+ except Exception as e:
1036
+ logger.error(f"Error listing tools: {e}")
1037
+ return []
1038
+
1039
+ async def call_tool(self, tool_name: str, arguments: Dict) -> Any:
1040
+ """Call a tool via JSON-RPC"""
1041
+ try:
1042
+ response = await self._send_request('tools/call', {
1043
+ 'name': tool_name,
1044
+ 'arguments': arguments
1045
+ })
1046
+ return response.get('result', {})
1047
+ except Exception as e:
1048
+ logger.error(f"Error calling tool {tool_name}: {e}")
1049
+ raise
1050
+
1051
+ async def _send_request(self, method: str, params: Dict) -> Dict:
1052
+ """Send JSON-RPC request and get response (works with subprocess.Popen)"""
1053
+ if not self.process or not self.connected:
1054
+ raise RuntimeError("Not connected to MCP server")
1055
+
1056
+ # Check if process is still alive
1057
+ poll_result = self.process.poll()
1058
+ if poll_result is not None:
1059
+ # Try to get stderr from queue
1060
+ stderr_lines = []
1061
+ while not self._stderr_queue.empty():
1062
+ try:
1063
+ line = self._stderr_queue.get_nowait()
1064
+ stderr_lines.append(line.decode('utf-8', errors='replace'))
1065
+ except:
1066
+ break
1067
+
1068
+ stderr_text = ''.join(stderr_lines) if stderr_lines else "No error output available"
1069
+ error_msg = f"MCP server process has exited with code {poll_result}. Server output: {stderr_text}"
1070
+ logger.error(f"[MCP] {error_msg}")
1071
+ raise RuntimeError(error_msg)
1072
+
1073
+ self.request_id += 1
1074
+ request = {
1075
+ 'jsonrpc': '2.0',
1076
+ 'id': self.request_id,
1077
+ 'method': method,
1078
+ 'params': params
1079
+ }
1080
+
1081
+ # Send request to stdin
1082
+ request_data = json.dumps(request) + '\n'
1083
+ logger.debug(f"[MCP] Sending request: {request_data.strip()}")
1084
+
1085
+ try:
1086
+ # Write to stdin (synchronously via executor)
1087
+ loop = asyncio.get_event_loop()
1088
+ await loop.run_in_executor(None, self.process.stdin.write, request_data.encode())
1089
+ await loop.run_in_executor(None, self.process.stdin.flush)
1090
+ except Exception as e:
1091
+ error_msg = f"Failed to send request to MCP server: {type(e).__name__}: {str(e)}"
1092
+ logger.error(f"[MCP] {error_msg}")
1093
+ raise RuntimeError(error_msg) from e
1094
+
1095
+ # Read response from stdout queue with timeout
1096
+ try:
1097
+ response_line = await asyncio.wait_for(
1098
+ self._stdout_queue.get(),
1099
+ timeout=30.0
1100
+ )
1101
+ except asyncio.TimeoutError:
1102
+ # Try to get stderr from queue
1103
+ stderr_lines = []
1104
+ while not self._stderr_queue.empty():
1105
+ try:
1106
+ line = self._stderr_queue.get_nowait()
1107
+ stderr_lines.append(line.decode('utf-8', errors='replace'))
1108
+ except:
1109
+ break
1110
+
1111
+ stderr_text = ''.join(stderr_lines) if stderr_lines else "No error output"
1112
+ error_msg = f"MCP server response timeout after 30 seconds for method '{method}'. Server stderr: {stderr_text}"
1113
+ logger.error(f"[MCP] {error_msg}")
1114
+ raise RuntimeError(error_msg)
1115
+
1116
+ if not response_line:
1117
+ # Try to get stderr from queue
1118
+ stderr_lines = []
1119
+ while not self._stderr_queue.empty():
1120
+ try:
1121
+ line = self._stderr_queue.get_nowait()
1122
+ stderr_lines.append(line.decode('utf-8', errors='replace'))
1123
+ except:
1124
+ break
1125
+
1126
+ stderr_text = ''.join(stderr_lines) if stderr_lines else "No error output"
1127
+ error_msg = f"MCP server closed connection. Server stderr: {stderr_text}"
1128
+ logger.error(f"[MCP] {error_msg}")
1129
+ raise RuntimeError(error_msg)
1130
+
1131
+ response_text = response_line.decode('utf-8', errors='replace').strip()
1132
+ logger.debug(f"[MCP] Received response: {response_text}")
1133
+
1134
+ try:
1135
+ response = json.loads(response_text)
1136
+ except json.JSONDecodeError as e:
1137
+ error_msg = f"Failed to parse MCP server response as JSON: {e}. Response: {response_text[:200]}"
1138
+ logger.error(f"[MCP] {error_msg}")
1139
+ raise RuntimeError(error_msg) from e
1140
+
1141
+ if 'error' in response:
1142
+ error_details = response['error']
1143
+ error_msg = f"MCP server error for method '{method}': {json.dumps(error_details, indent=2)}"
1144
+ logger.error(f"[MCP] {error_msg}")
1145
+ raise RuntimeError(error_msg)
1146
+
1147
+ return response
1148
+
1149
+ def _convert_tool_schema(self, tool: Dict) -> Dict:
1150
+ """Convert MCP tool schema to standard format"""
1151
+ return {
1152
+ 'name': tool.get('name'),
1153
+ 'description': tool.get('description', ''),
1154
+ 'inputSchema': tool.get('inputSchema', {
1155
+ 'type': 'object',
1156
+ 'properties': {}
1157
+ })
1158
+ }
1159
+
1160
+
1161
+ class MCPHTTPConnection(MCPConnection):
1162
+ """HTTP/SSE-based MCP connection"""
1163
+
1164
+ def __init__(self, server_id: str, config: Dict):
1165
+ super().__init__(server_id, config)
1166
+ self.session: Optional[aiohttp.ClientSession] = None
1167
+ self.base_url = config.get('url', '').rstrip('/')
1168
+
1169
+ async def connect(self):
1170
+ """Establish HTTP/SSE connection"""
1171
+ try:
1172
+ if not self.base_url:
1173
+ raise ValueError("URL is required for HTTP/SSE MCP")
1174
+
1175
+ # Create session with timeout
1176
+ timeout = aiohttp.ClientTimeout(total=30)
1177
+ self.session = aiohttp.ClientSession(timeout=timeout)
1178
+
1179
+ # Test connection by listing tools
1180
+ await self._request('POST', '/tools/list', {})
1181
+
1182
+ self.connected = True
1183
+ logger.info(f"Connected to MCP HTTP server: {self.base_url}")
1184
+ except Exception as e:
1185
+ logger.error(f"Error connecting to MCP HTTP server: {e}")
1186
+ if self.session:
1187
+ await self.session.close()
1188
+ self.connected = False
1189
+ raise
1190
+
1191
+ async def disconnect(self):
1192
+ """Close HTTP session"""
1193
+ if self.session:
1194
+ await self.session.close()
1195
+ self.session = None
1196
+ self.connected = False
1197
+
1198
+ async def list_tools(self) -> List[Dict]:
1199
+ """List tools via HTTP"""
1200
+ try:
1201
+ response = await self._request('POST', '/tools/list', {})
1202
+ tools = response.get('tools', [])
1203
+
1204
+ return [self._convert_tool_schema(tool) for tool in tools]
1205
+ except Exception as e:
1206
+ logger.error(f"Error listing tools: {e}")
1207
+ return []
1208
+
1209
+ async def call_tool(self, tool_name: str, arguments: Dict) -> Any:
1210
+ """Call a tool via HTTP"""
1211
+ try:
1212
+ response = await self._request('POST', '/tools/call', {
1213
+ 'name': tool_name,
1214
+ 'arguments': arguments
1215
+ })
1216
+ return response
1217
+ except Exception as e:
1218
+ logger.error(f"Error calling tool {tool_name}: {e}")
1219
+ raise
1220
+
1221
+ async def _request(self, method: str, path: str, data: Dict) -> Dict:
1222
+ """Make HTTP request to MCP server"""
1223
+ if not self.session or not self.connected:
1224
+ raise RuntimeError("Not connected to MCP server")
1225
+
1226
+ url = f"{self.base_url}{path}"
1227
+ headers = {
1228
+ 'Content-Type': 'application/json'
1229
+ }
1230
+
1231
+ # Add auth token if provided
1232
+ token = self.config.get('token')
1233
+ if token:
1234
+ headers['Authorization'] = f"Bearer {token}"
1235
+
1236
+ async with self.session.request(method, url, json=data, headers=headers) as response:
1237
+ response.raise_for_status()
1238
+ return await response.json()
1239
+
1240
+ def _convert_tool_schema(self, tool: Dict) -> Dict:
1241
+ """Convert MCP tool schema to standard format"""
1242
+ return {
1243
+ 'name': tool.get('name'),
1244
+ 'description': tool.get('description', ''),
1245
+ 'inputSchema': tool.get('inputSchema', {
1246
+ 'type': 'object',
1247
+ 'properties': {}
1248
+ })
1249
+ }
1250
+
1251
+
1252
+ def get_mcp_service() -> MCPConnectionService:
1253
+ """Get singleton instance of MCP service"""
1254
+ return MCPConnectionService.get_instance()
1255
+