signalpilot-ai-internal 0.10.22__py3-none-any.whl → 0.11.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. signalpilot_ai_internal/_version.py +1 -1
  2. signalpilot_ai_internal/cache_service.py +22 -21
  3. signalpilot_ai_internal/composio_handlers.py +224 -0
  4. signalpilot_ai_internal/composio_service.py +511 -0
  5. signalpilot_ai_internal/database_config_handlers.py +182 -0
  6. signalpilot_ai_internal/database_config_service.py +166 -0
  7. signalpilot_ai_internal/databricks_schema_service.py +19 -14
  8. signalpilot_ai_internal/file_scanner_service.py +5 -146
  9. signalpilot_ai_internal/handlers.py +317 -8
  10. signalpilot_ai_internal/integrations_config.py +256 -0
  11. signalpilot_ai_internal/log_utils.py +31 -0
  12. signalpilot_ai_internal/mcp_handlers.py +33 -9
  13. signalpilot_ai_internal/mcp_service.py +94 -142
  14. signalpilot_ai_internal/oauth_token_store.py +141 -0
  15. signalpilot_ai_internal/schema_search_config.yml +17 -11
  16. signalpilot_ai_internal/schema_search_service.py +30 -10
  17. signalpilot_ai_internal/signalpilot_home.py +961 -0
  18. signalpilot_ai_internal/snowflake_schema_service.py +2 -0
  19. signalpilot_ai_internal/unified_database_schema_service.py +2 -0
  20. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +15 -48
  21. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +9 -52
  22. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.bab318d6caadb055e29c.js +1 -0
  23. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/129.868ca665e6fc225c20a0.js +1 -0
  24. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/179.fd45a2e75d471d0aa3b9.js +7 -0
  25. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.81105a94aa873fc51a94.js +1 -0
  26. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.a002dd4630d3b6404a90.js +1 -0
  27. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.cc6f6ecacd703bcdb468.js +1 -0
  28. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.817a883549d55a0e0576.js +1 -0
  29. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.a4daecd44f1e9364e44a.js +1 -0
  30. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.667225aab294fb5ed161.js +1 -0
  31. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8138af2522716e5a926f.js +1 -0
  32. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.925c73e32f3c07448da0.js +1 -0
  33. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/477.aaa4cc9e87801fb45f5b.js +1 -0
  34. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.370056149a59022b700c.js +1 -0
  35. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/510.868ca665e6fc225c20a0.js +1 -0
  36. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.835f97f7ccfc70ff5c93.js +1 -0
  37. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.6c13335f73de089d6b1e.js +1 -0
  38. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/574.ad2709e91ebcac5bbe68.js +1 -0
  39. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.bddbab8e464fe31f0393.js +1 -0
  40. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.fda1bcdb10497b0a6ade.js +1 -0
  41. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.d046701f475fcbf6697d.js +1 -0
  42. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.c306dffd4cfe8a613d13.js +1 -0
  43. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.e39898b6f336539f228c.js +1 -0
  44. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.77cc0ca10a1860df1b52.js +1 -0
  45. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.4e2850b2af985ed0d378.js +1 -0
  46. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js +2 -0
  47. signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.055f50d20a31f3068c72.js +1 -0
  48. {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +29 -29
  49. {signalpilot_ai_internal-0.10.22.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/METADATA +13 -31
  50. signalpilot_ai_internal-0.11.24.dist-info/RECORD +66 -0
  51. signalpilot_ai_internal-0.11.24.dist-info/licenses/LICENSE +7 -0
  52. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/110.224e83db03814fd03955.js +0 -7
  53. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -1
  54. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -1
  55. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -1
  56. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +0 -1
  57. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -1
  58. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -1
  59. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -1
  60. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +0 -1
  61. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -1
  62. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -1
  63. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -2
  64. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.c4232851631fb2e7e59a.js +0 -1
  65. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -1
  66. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -1
  67. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/726.318e4e791edb63cc788f.js +0 -1
  68. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -1
  69. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -1
  70. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +0 -1
  71. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +0 -1
  72. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.d9914229e4f120e7e9e4.js +0 -1
  73. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -1
  74. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.d80de1e4da5b520d2f3b.js +0 -1
  75. signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b63c429ca81e743b403c.js +0 -1
  76. signalpilot_ai_internal-0.10.22.dist-info/RECORD +0 -56
  77. signalpilot_ai_internal-0.10.22.dist-info/licenses/LICENSE +0 -29
  78. {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
  79. {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
  80. {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
  81. /signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt → /signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js.LICENSE.txt +0 -0
  82. {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
  83. {signalpilot_ai_internal-0.10.22.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/WHEEL +0 -0
@@ -0,0 +1,511 @@
1
+ """
2
+ Composio Integration Service
3
+ Manages OAuth integrations via Cloudflare Worker and creates MCP server configurations.
4
+ """
5
+
6
+ import asyncio
7
+ import hashlib
8
+ import json
9
+ import logging
10
+ import os
11
+ import uuid
12
+ from datetime import datetime
13
+ from pathlib import Path
14
+ from typing import Any, Dict, List, Optional
15
+ import aiohttp
16
+
17
+ from .cache_service import CacheDirectoryManager, RobustFileOperations
18
+ from .integrations_config import (
19
+ get_integration_config,
20
+ get_all_integration_ids,
21
+ get_integration_info_for_frontend,
22
+ get_mcp_server_config,
23
+ get_mcp_server_config_for_storage,
24
+ build_env_from_credentials,
25
+ )
26
+ from .mcp_service import get_mcp_service
27
+ from .oauth_token_store import get_oauth_token_store
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+ # Backend URL - set via environment variable
32
+ # Use Node.js backend (8788) for credential retrieval with Composio SDK
33
+ DEFAULT_WORKER_URL = os.environ.get(
34
+ 'COMPOSIO_WORKER_URL',
35
+ 'https://oauth.signalpilot.ai' # Node.js backend with Composio SDK
36
+ )
37
+
38
+ # Token refresh interval in seconds (15 minutes)
39
+ TOKEN_REFRESH_INTERVAL_SECONDS = 15 * 60
40
+
41
+
42
+ class ComposioIntegrationService:
43
+ """Service for managing Composio OAuth integrations."""
44
+
45
+ _instance = None
46
+
47
+ def __init__(self):
48
+ self._worker_url: str = DEFAULT_WORKER_URL
49
+ self._user_id: Optional[str] = None
50
+ self._integrations_file: Optional[Path] = None
51
+ self._refresh_task: Optional[asyncio.Task] = None
52
+ self._setup_storage()
53
+
54
+ @classmethod
55
+ def get_instance(cls) -> 'ComposioIntegrationService':
56
+ """Get singleton instance."""
57
+ if cls._instance is None:
58
+ cls._instance = ComposioIntegrationService()
59
+ return cls._instance
60
+
61
+ def _setup_storage(self):
62
+ """Set up storage for integration state."""
63
+ cache_dir = CacheDirectoryManager.find_usable_cache_directory()
64
+ if cache_dir:
65
+ self._integrations_file = cache_dir / 'composio_integrations.json'
66
+ logger.debug(f"[Composio] Using integrations file: {self._integrations_file}")
67
+ else:
68
+ logger.warning("[Composio] No usable cache directory found")
69
+
70
+ def _get_user_id(self) -> str:
71
+ """Get or create a unique user ID for this installation."""
72
+ if self._user_id:
73
+ return self._user_id
74
+
75
+ # Try to load from storage
76
+ state = self._load_state()
77
+ if 'user_id' in state and state['user_id']:
78
+ self._user_id = state['user_id']
79
+ else:
80
+ # Generate new user ID
81
+ self._user_id = str(uuid.uuid4())
82
+ state['user_id'] = self._user_id
83
+ self._save_state(state)
84
+
85
+ return self._user_id
86
+
87
+ def _load_state(self) -> Dict[str, Any]:
88
+ """Load integration state from file."""
89
+ if not self._integrations_file or not self._integrations_file.exists():
90
+ return {'user_id': None, 'connections': {}}
91
+
92
+ try:
93
+ with open(self._integrations_file, 'r') as f:
94
+ return json.load(f)
95
+ except Exception as e:
96
+ logger.error(f"[Composio] Error loading state: {e}")
97
+ return {'user_id': None, 'connections': {}}
98
+
99
+ def _save_state(self, state: Dict[str, Any]):
100
+ """Save integration state to file."""
101
+ if not self._integrations_file:
102
+ logger.warning("[Composio] No integrations file configured")
103
+ return
104
+
105
+ try:
106
+ RobustFileOperations.safe_write_json(self._integrations_file, state)
107
+ logger.debug("[Composio] State saved successfully")
108
+ except Exception as e:
109
+ logger.error(f"[Composio] Error saving state: {e}")
110
+
111
+ def _compute_credentials_hash(self, credentials: Dict[str, Any]) -> str:
112
+ """Compute a hash of credentials for change detection."""
113
+ # Use access_token as the primary indicator of credential changes
114
+ token = credentials.get('access_token', '')
115
+ if not token:
116
+ # Fallback to hashing all credential values
117
+ token = json.dumps(credentials, sort_keys=True)
118
+ return hashlib.sha256(token.encode()).hexdigest()[:16]
119
+
120
+ def _has_connected_integrations(self) -> bool:
121
+ """Check if there are any connected integrations."""
122
+ state = self._load_state()
123
+ connections = state.get('connections', {})
124
+ return any(
125
+ conn.get('status') == 'connected'
126
+ for conn in connections.values()
127
+ )
128
+
129
+ def _start_refresh_task(self):
130
+ """Start the background token refresh task if not already running."""
131
+ if self._refresh_task is not None and not self._refresh_task.done():
132
+ logger.debug("[Composio] Refresh task already running")
133
+ return
134
+
135
+ try:
136
+ loop = asyncio.get_event_loop()
137
+ self._refresh_task = loop.create_task(self._refresh_loop())
138
+ logger.info("[Composio] Started background token refresh task")
139
+ except RuntimeError:
140
+ logger.warning("[Composio] No event loop available, refresh task not started")
141
+
142
+ def _stop_refresh_task(self):
143
+ """Stop the background token refresh task."""
144
+ if self._refresh_task is not None and not self._refresh_task.done():
145
+ self._refresh_task.cancel()
146
+ logger.info("[Composio] Stopped background token refresh task")
147
+ self._refresh_task = None
148
+
149
+ async def _refresh_loop(self):
150
+ """Background loop that periodically refreshes tokens for all connected integrations."""
151
+ logger.info(f"[Composio] Token refresh loop started (interval: {TOKEN_REFRESH_INTERVAL_SECONDS}s)")
152
+
153
+ while True:
154
+ try:
155
+ await asyncio.sleep(TOKEN_REFRESH_INTERVAL_SECONDS)
156
+
157
+ # Check if there are still connected integrations
158
+ if not self._has_connected_integrations():
159
+ logger.info("[Composio] No connected integrations, stopping refresh loop")
160
+ break
161
+
162
+ # Refresh tokens for all connected integrations
163
+ state = self._load_state()
164
+ connections = state.get('connections', {})
165
+
166
+ for integration_id, connection in connections.items():
167
+ if connection.get('status') != 'connected':
168
+ continue
169
+
170
+ try:
171
+ result = await self.refresh_token(integration_id)
172
+ if result.get('tokens_updated'):
173
+ logger.info(f"[Composio] Tokens updated for {integration_id}")
174
+ else:
175
+ logger.debug(f"[Composio] Tokens unchanged for {integration_id}")
176
+ except Exception as e:
177
+ logger.warning(f"[Composio] Failed to refresh {integration_id}: {e}")
178
+
179
+ except asyncio.CancelledError:
180
+ logger.info("[Composio] Refresh loop cancelled")
181
+ break
182
+ except Exception as e:
183
+ logger.error(f"[Composio] Error in refresh loop: {e}")
184
+ # Continue loop despite errors
185
+
186
+ async def refresh_token(self, integration_id: str) -> Dict[str, Any]:
187
+ """
188
+ Refresh tokens for an integration by fetching fresh credentials from Composio.
189
+
190
+ Returns:
191
+ Dict with 'success', 'tokens_updated', and optionally error info
192
+ """
193
+ config = get_integration_config(integration_id)
194
+ if not config:
195
+ raise ValueError(f"Unknown integration: {integration_id}")
196
+
197
+ state = self._load_state()
198
+ connection = state.get('connections', {}).get(integration_id, {})
199
+
200
+ if connection.get('status') != 'connected':
201
+ return {'success': False, 'error': 'Integration not connected'}
202
+
203
+ user_id = self._get_user_id()
204
+
205
+ # Fetch fresh credentials from Composio
206
+ try:
207
+ result = await self._make_worker_request(
208
+ 'POST',
209
+ f'/credentials/{integration_id}/{user_id}'
210
+ )
211
+ except Exception as e:
212
+ logger.error(f"[Composio] Failed to fetch credentials for {integration_id}: {e}")
213
+ return {'success': False, 'error': str(e)}
214
+
215
+ credentials = result.get('credentials', {})
216
+ if not credentials:
217
+ return {'success': False, 'error': 'No credentials received'}
218
+
219
+ # Check if tokens have changed
220
+ new_hash = self._compute_credentials_hash(credentials)
221
+ old_hash = connection.get('credentials_hash', '')
222
+
223
+ if new_hash == old_hash:
224
+ # Update last_refresh timestamp even if tokens unchanged
225
+ connection['last_refresh'] = datetime.utcnow().isoformat()
226
+ state['connections'][integration_id] = connection
227
+ self._save_state(state)
228
+ return {'success': True, 'tokens_updated': False}
229
+
230
+ logger.info(f"[Composio] Token change detected for {integration_id}")
231
+
232
+ # Get MCP server ID from connection
233
+ mcp_server_id = connection.get('mcp_server_id')
234
+ if not mcp_server_id:
235
+ return {'success': False, 'error': 'No MCP server ID found'}
236
+
237
+ # Update tokens in secure store (NOT in mcp.json)
238
+ token_store = get_oauth_token_store()
239
+ env_vars = build_env_from_credentials(integration_id, credentials)
240
+ token_store.update_tokens(mcp_server_id, env_vars)
241
+
242
+ # Reconnect MCP server to pick up new tokens
243
+ mcp_service = get_mcp_service()
244
+
245
+ try:
246
+ # Disconnect existing server
247
+ try:
248
+ await mcp_service.disconnect(mcp_server_id)
249
+ logger.debug(f"[Composio] Disconnected MCP server: {mcp_server_id}")
250
+ except Exception as e:
251
+ logger.warning(f"[Composio] Error disconnecting MCP server: {e}")
252
+
253
+ # Reconnect with new tokens (tokens will be injected from store)
254
+ await mcp_service.connect(mcp_server_id)
255
+ logger.info(f"[Composio] Reconnected MCP server {mcp_server_id} with new tokens")
256
+
257
+ except Exception as e:
258
+ logger.error(f"[Composio] Failed to update MCP server: {e}")
259
+ # Still update the hash since we have new credentials
260
+ # The MCP server may need manual intervention
261
+
262
+ # Update state with new hash and timestamp
263
+ connection['credentials_hash'] = new_hash
264
+ connection['last_refresh'] = datetime.utcnow().isoformat()
265
+ state['connections'][integration_id] = connection
266
+ self._save_state(state)
267
+
268
+ return {'success': True, 'tokens_updated': True}
269
+
270
+ def is_configured(self) -> bool:
271
+ """Check if the worker URL is properly configured."""
272
+ return bool(self._worker_url and 'YOUR_SUBDOMAIN' not in self._worker_url)
273
+
274
+ def get_worker_url(self) -> str:
275
+ """Get the worker URL for frontend to call directly."""
276
+ return self._worker_url
277
+
278
+ async def _make_worker_request(
279
+ self,
280
+ method: str,
281
+ path: str,
282
+ data: Optional[Dict[str, Any]] = None
283
+ ) -> Dict[str, Any]:
284
+ """Make a request to the Cloudflare Worker."""
285
+ if not self.is_configured():
286
+ raise ValueError("Composio worker URL not configured. Set COMPOSIO_WORKER_URL environment variable.")
287
+
288
+ url = f"{self._worker_url}{path}"
289
+ headers = {
290
+ 'Content-Type': 'application/json',
291
+ }
292
+
293
+ async with aiohttp.ClientSession() as session:
294
+ async with session.request(
295
+ method,
296
+ url,
297
+ headers=headers,
298
+ json=data,
299
+ ) as response:
300
+ if not response.ok:
301
+ error_text = await response.text()
302
+ logger.error(f"[Composio] Worker error: {response.status} - {error_text}")
303
+ raise Exception(f"Worker request failed: {error_text}")
304
+
305
+ return await response.json()
306
+
307
+ def get_integrations(self) -> List[Dict[str, Any]]:
308
+ """
309
+ Get list of integrations with their connection status.
310
+ Safe to expose to frontend.
311
+ """
312
+ integrations = get_integration_info_for_frontend()
313
+ state = self._load_state()
314
+ connections = state.get('connections', {})
315
+
316
+ # Add connection status to each integration
317
+ for integration in integrations:
318
+ integration_id = integration['id']
319
+ connection = connections.get(integration_id, {})
320
+ integration['status'] = connection.get('status', 'disconnected')
321
+ integration['mcpServerId'] = connection.get('mcp_server_id')
322
+
323
+ return integrations
324
+
325
+ def get_initiate_url(self, integration_id: str) -> Dict[str, str]:
326
+ """
327
+ Get the worker URL for initiating OAuth connection.
328
+ Frontend will call the worker directly.
329
+
330
+ Returns:
331
+ Dict with 'workerUrl' and 'userId' for the frontend to use
332
+ """
333
+ config = get_integration_config(integration_id)
334
+ if not config:
335
+ raise ValueError(f"Unknown integration: {integration_id}")
336
+
337
+ if not self.is_configured():
338
+ raise ValueError("Composio worker URL not configured. Set COMPOSIO_WORKER_URL environment variable.")
339
+
340
+ user_id = self._get_user_id()
341
+
342
+ # Update state to connecting
343
+ state = self._load_state()
344
+ if 'connections' not in state:
345
+ state['connections'] = {}
346
+
347
+ state['connections'][integration_id] = {
348
+ 'status': 'connecting',
349
+ }
350
+ self._save_state(state)
351
+
352
+ return {
353
+ 'workerUrl': f"{self._worker_url}/initiate/{integration_id}",
354
+ 'userId': user_id,
355
+ }
356
+
357
+ async def complete_connection(self, integration_id: str) -> Dict[str, Any]:
358
+ """
359
+ Complete OAuth connection and create MCP server.
360
+ Called after OAuth callback is received.
361
+
362
+ Returns:
363
+ Dict with connection details and MCP server ID
364
+ """
365
+ config = get_integration_config(integration_id)
366
+ if not config:
367
+ raise ValueError(f"Unknown integration: {integration_id}")
368
+
369
+ user_id = self._get_user_id()
370
+
371
+ # Get credentials from worker
372
+ result = await self._make_worker_request(
373
+ 'POST',
374
+ f'/credentials/{integration_id}/{user_id}'
375
+ )
376
+
377
+ credentials = result.get('credentials', {})
378
+ if not credentials:
379
+ raise Exception(f"No credentials received for {integration_id}")
380
+
381
+ # Create MCP server configuration WITHOUT tokens (for storage in mcp.json)
382
+ mcp_config = get_mcp_server_config_for_storage(integration_id)
383
+ if not mcp_config:
384
+ raise Exception(f"Failed to create MCP config for {integration_id}")
385
+
386
+ # Store tokens securely (NOT in mcp.json)
387
+ token_store = get_oauth_token_store()
388
+ env_vars = build_env_from_credentials(integration_id, credentials)
389
+ token_store.store_tokens(integration_id, mcp_config['id'], env_vars)
390
+
391
+ # Save MCP server config (without tokens) to mcp.json
392
+ mcp_service = get_mcp_service()
393
+ mcp_service.save_server_config(mcp_config)
394
+
395
+ # Connect to the new MCP server (tokens will be injected at runtime)
396
+ try:
397
+ await mcp_service.connect(mcp_config['id'])
398
+ logger.info(f"[Composio] Connected to MCP server: {mcp_config['id']}")
399
+ except Exception as e:
400
+ logger.warning(f"[Composio] Failed to auto-connect MCP server: {e}")
401
+
402
+ # Update state to connected with credentials hash for refresh detection
403
+ state = self._load_state()
404
+ if 'connections' not in state:
405
+ state['connections'] = {}
406
+
407
+ state['connections'][integration_id] = {
408
+ 'status': 'connected',
409
+ 'account_id': result.get('accountId'),
410
+ 'mcp_server_id': mcp_config['id'],
411
+ 'credentials_hash': self._compute_credentials_hash(credentials),
412
+ 'last_refresh': datetime.utcnow().isoformat(),
413
+ }
414
+ self._save_state(state)
415
+
416
+ # Start background refresh task if not already running
417
+ self._start_refresh_task()
418
+
419
+ return {
420
+ 'status': 'connected',
421
+ 'mcpServerId': mcp_config['id'],
422
+ 'mcpServerName': mcp_config['name'],
423
+ }
424
+
425
+ async def check_connection_status(self, integration_id: str) -> Dict[str, Any]:
426
+ """
427
+ Check the connection status for an integration.
428
+ """
429
+ config = get_integration_config(integration_id)
430
+ if not config:
431
+ raise ValueError(f"Unknown integration: {integration_id}")
432
+
433
+ user_id = self._get_user_id()
434
+
435
+ # Check status with worker
436
+ result = await self._make_worker_request(
437
+ 'GET',
438
+ f'/status/{user_id}'
439
+ )
440
+
441
+ status_info = result.get('status', {}).get(integration_id, {})
442
+ connected = status_info.get('connected', False)
443
+
444
+ # Update local state
445
+ state = self._load_state()
446
+ connection = state.get('connections', {}).get(integration_id, {})
447
+
448
+ if connected and connection.get('status') != 'connected':
449
+ # Connection became active, need to complete setup
450
+ return await self.complete_connection(integration_id)
451
+
452
+ return {
453
+ 'status': 'connected' if connected else connection.get('status', 'disconnected'),
454
+ 'mcpServerId': connection.get('mcp_server_id'),
455
+ }
456
+
457
+ async def disconnect(self, integration_id: str) -> Dict[str, Any]:
458
+ """
459
+ Disconnect an integration and remove MCP server.
460
+ """
461
+ config = get_integration_config(integration_id)
462
+ if not config:
463
+ raise ValueError(f"Unknown integration: {integration_id}")
464
+
465
+ user_id = self._get_user_id()
466
+ state = self._load_state()
467
+ connection = state.get('connections', {}).get(integration_id, {})
468
+
469
+ # Remove MCP server if it exists (sync function, don't await)
470
+ mcp_server_id = connection.get('mcp_server_id')
471
+ if mcp_server_id:
472
+ try:
473
+ mcp_service = get_mcp_service()
474
+ mcp_service.delete_server_config(mcp_server_id)
475
+ logger.info(f"[Composio] Removed MCP server: {mcp_server_id}")
476
+ except Exception as e:
477
+ logger.warning(f"[Composio] Failed to remove MCP server: {e}")
478
+
479
+ # Remove tokens from secure store
480
+ try:
481
+ token_store = get_oauth_token_store()
482
+ token_store.remove_tokens(mcp_server_id)
483
+ logger.info(f"[Composio] Removed tokens for: {mcp_server_id}")
484
+ except Exception as e:
485
+ logger.warning(f"[Composio] Failed to remove tokens: {e}")
486
+
487
+ # Call worker to disconnect from Composio
488
+ try:
489
+ await self._make_worker_request(
490
+ 'POST',
491
+ f'/disconnect/{integration_id}/{user_id}'
492
+ )
493
+ except Exception as e:
494
+ logger.warning(f"[Composio] Failed to disconnect from Composio: {e}")
495
+
496
+ # Update state
497
+ if 'connections' in state and integration_id in state['connections']:
498
+ del state['connections'][integration_id]
499
+ self._save_state(state)
500
+
501
+ # Stop refresh task if no more connected integrations
502
+ if not self._has_connected_integrations():
503
+ self._stop_refresh_task()
504
+
505
+ return {'status': 'disconnected'}
506
+
507
+
508
+ # Singleton accessor
509
+ def get_composio_service() -> ComposioIntegrationService:
510
+ """Get the singleton instance of the Composio integration service."""
511
+ return ComposioIntegrationService.get_instance()
@@ -0,0 +1,182 @@
1
+ """
2
+ HTTP Handlers for Database Configuration API
3
+ Provides REST endpoints for managing db.toml configurations
4
+ """
5
+
6
+ import json
7
+
8
+ from jupyter_server.base.handlers import APIHandler
9
+ import tornado
10
+
11
+ from .database_config_service import get_database_config_service
12
+
13
+
14
+ class DatabaseConfigsHandler(APIHandler):
15
+ """Handler for database configuration operations."""
16
+
17
+ @tornado.web.authenticated
18
+ def get(self, db_type=None):
19
+ """Get database configurations."""
20
+ try:
21
+ service = get_database_config_service()
22
+
23
+ if db_type:
24
+ configs = service.get_configs_by_type(db_type)
25
+ else:
26
+ configs = service.get_all_configs()
27
+
28
+ self.finish(json.dumps({
29
+ "configurations": configs,
30
+ "count": len(configs)
31
+ }))
32
+ except Exception as e:
33
+ self.set_status(500)
34
+ self.finish(json.dumps({"error": str(e)}))
35
+
36
+ @tornado.web.authenticated
37
+ def post(self, db_type=None):
38
+ """Add a new database configuration."""
39
+ try:
40
+ if not db_type:
41
+ self.set_status(400)
42
+ self.finish(json.dumps({"error": "Database type required in URL path"}))
43
+ return
44
+
45
+ body = json.loads(self.request.body.decode('utf-8'))
46
+ service = get_database_config_service()
47
+
48
+ success = service.add_config(db_type, body)
49
+
50
+ if success:
51
+ self.finish(json.dumps({
52
+ "success": True,
53
+ "message": f"Added {db_type} configuration: {body.get('name', 'unnamed')}"
54
+ }))
55
+ else:
56
+ self.set_status(400)
57
+ self.finish(json.dumps({"error": "Failed to add configuration. Check name uniqueness and required fields."}))
58
+ except json.JSONDecodeError as e:
59
+ self.set_status(400)
60
+ self.finish(json.dumps({"error": f"Invalid JSON: {str(e)}"}))
61
+ except Exception as e:
62
+ self.set_status(500)
63
+ self.finish(json.dumps({"error": str(e)}))
64
+
65
+ @tornado.web.authenticated
66
+ def put(self, db_type=None):
67
+ """Update a database configuration."""
68
+ try:
69
+ if not db_type:
70
+ self.set_status(400)
71
+ self.finish(json.dumps({"error": "Database type required in URL path"}))
72
+ return
73
+
74
+ body = json.loads(self.request.body.decode('utf-8'))
75
+ name = body.pop("name", None)
76
+
77
+ if not name:
78
+ self.set_status(400)
79
+ self.finish(json.dumps({"error": "Configuration 'name' required in body"}))
80
+ return
81
+
82
+ service = get_database_config_service()
83
+ success = service.update_config(db_type, name, body)
84
+
85
+ if success:
86
+ self.finish(json.dumps({
87
+ "success": True,
88
+ "message": f"Updated {db_type} configuration: {name}"
89
+ }))
90
+ else:
91
+ self.set_status(404)
92
+ self.finish(json.dumps({"error": f"Configuration '{name}' not found"}))
93
+ except json.JSONDecodeError as e:
94
+ self.set_status(400)
95
+ self.finish(json.dumps({"error": f"Invalid JSON: {str(e)}"}))
96
+ except Exception as e:
97
+ self.set_status(500)
98
+ self.finish(json.dumps({"error": str(e)}))
99
+
100
+ @tornado.web.authenticated
101
+ def delete(self, db_type=None):
102
+ """Delete a database configuration."""
103
+ try:
104
+ if not db_type:
105
+ self.set_status(400)
106
+ self.finish(json.dumps({"error": "Database type required in URL path"}))
107
+ return
108
+
109
+ name = self.get_argument("name", None)
110
+ if not name:
111
+ self.set_status(400)
112
+ self.finish(json.dumps({"error": "Configuration 'name' required as query parameter"}))
113
+ return
114
+
115
+ service = get_database_config_service()
116
+ success = service.remove_config(db_type, name)
117
+
118
+ if success:
119
+ self.finish(json.dumps({
120
+ "success": True,
121
+ "message": f"Deleted {db_type} configuration: {name}"
122
+ }))
123
+ else:
124
+ self.set_status(404)
125
+ self.finish(json.dumps({"error": f"Configuration '{name}' not found"}))
126
+ except Exception as e:
127
+ self.set_status(500)
128
+ self.finish(json.dumps({"error": str(e)}))
129
+
130
+
131
+ class DatabaseDefaultsHandler(APIHandler):
132
+ """Handler for database defaults."""
133
+
134
+ @tornado.web.authenticated
135
+ def get(self):
136
+ """Get defaults."""
137
+ try:
138
+ service = get_database_config_service()
139
+ defaults = service.get_defaults()
140
+ self.finish(json.dumps({"defaults": defaults}))
141
+ except Exception as e:
142
+ self.set_status(500)
143
+ self.finish(json.dumps({"error": str(e)}))
144
+
145
+ @tornado.web.authenticated
146
+ def post(self):
147
+ """Set defaults."""
148
+ try:
149
+ body = json.loads(self.request.body.decode('utf-8'))
150
+ service = get_database_config_service()
151
+ success = service.set_defaults(body)
152
+
153
+ if success:
154
+ self.finish(json.dumps({
155
+ "success": True,
156
+ "message": "Defaults updated"
157
+ }))
158
+ else:
159
+ self.set_status(500)
160
+ self.finish(json.dumps({"error": "Failed to set defaults"}))
161
+ except json.JSONDecodeError as e:
162
+ self.set_status(400)
163
+ self.finish(json.dumps({"error": f"Invalid JSON: {str(e)}"}))
164
+ except Exception as e:
165
+ self.set_status(500)
166
+ self.finish(json.dumps({"error": str(e)}))
167
+
168
+
169
+ class SignalPilotHomeInfoHandler(APIHandler):
170
+ """Handler for getting SignalPilotHome directory info."""
171
+
172
+ @tornado.web.authenticated
173
+ def get(self):
174
+ """Get SignalPilotHome information."""
175
+ try:
176
+ from .signalpilot_home import get_signalpilot_home
177
+ home_manager = get_signalpilot_home()
178
+ info = home_manager.get_info()
179
+ self.finish(json.dumps(info))
180
+ except Exception as e:
181
+ self.set_status(500)
182
+ self.finish(json.dumps({"error": str(e)}))