signalpilot-ai-internal 0.10.0__py3-none-any.whl → 0.11.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- signalpilot_ai_internal/__init__.py +1 -0
- signalpilot_ai_internal/_version.py +1 -1
- signalpilot_ai_internal/cache_service.py +22 -21
- signalpilot_ai_internal/composio_handlers.py +224 -0
- signalpilot_ai_internal/composio_service.py +511 -0
- signalpilot_ai_internal/database_config_handlers.py +182 -0
- signalpilot_ai_internal/database_config_service.py +166 -0
- signalpilot_ai_internal/databricks_schema_service.py +907 -0
- signalpilot_ai_internal/file_scanner_service.py +5 -146
- signalpilot_ai_internal/handlers.py +388 -9
- signalpilot_ai_internal/integrations_config.py +256 -0
- signalpilot_ai_internal/log_utils.py +31 -0
- signalpilot_ai_internal/mcp_handlers.py +532 -0
- signalpilot_ai_internal/mcp_server_manager.py +298 -0
- signalpilot_ai_internal/mcp_service.py +1255 -0
- signalpilot_ai_internal/oauth_token_store.py +141 -0
- signalpilot_ai_internal/schema_search_config.yml +17 -11
- signalpilot_ai_internal/schema_search_service.py +85 -4
- signalpilot_ai_internal/signalpilot_home.py +961 -0
- signalpilot_ai_internal/snowflake_schema_service.py +2 -0
- signalpilot_ai_internal/test_dbt_mcp_server.py +180 -0
- signalpilot_ai_internal/unified_database_schema_service.py +2 -0
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +15 -48
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +9 -52
- {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +7 -1
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.bab318d6caadb055e29c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/129.868ca665e6fc225c20a0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/179.fd45a2e75d471d0aa3b9.js +7 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.81105a94aa873fc51a94.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.a002dd4630d3b6404a90.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.cc6f6ecacd703bcdb468.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.817a883549d55a0e0576.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.a4daecd44f1e9364e44a.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.667225aab294fb5ed161.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8138af2522716e5a926f.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.925c73e32f3c07448da0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/477.aaa4cc9e87801fb45f5b.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.370056149a59022b700c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/510.868ca665e6fc225c20a0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.835f97f7ccfc70ff5c93.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.6c13335f73de089d6b1e.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/574.ad2709e91ebcac5bbe68.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.bddbab8e464fe31f0393.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.fda1bcdb10497b0a6ade.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.d046701f475fcbf6697d.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.c306dffd4cfe8a613d13.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.e39898b6f336539f228c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.77cc0ca10a1860df1b52.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.4e2850b2af985ed0d378.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js +2 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.055f50d20a31f3068c72.js +1 -0
- {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +47 -29
- {signalpilot_ai_internal-0.10.0.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/METADATA +14 -31
- signalpilot_ai_internal-0.11.24.dist-info/RECORD +66 -0
- signalpilot_ai_internal-0.11.24.dist-info/licenses/LICENSE +7 -0
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/330.af2e9cb5def5ae2b84d5.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -2
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.e9acd2e1f9739037f1ab.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/786.770dc7bcab77e14cc135.js +0 -7
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.25ddd15aca09421d3765.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -1
- signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b05b2f0c9617ba28370d.js +0 -1
- signalpilot_ai_internal-0.10.0.dist-info/RECORD +0 -50
- signalpilot_ai_internal-0.10.0.dist-info/licenses/LICENSE +0 -29
- {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
- {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
- /signalpilot_ai_internal-0.10.0.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt → /signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.10.0.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
- {signalpilot_ai_internal-0.10.0.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/WHEEL +0 -0
|
@@ -19,6 +19,7 @@ import pyarrow.dataset as ds
|
|
|
19
19
|
from openpyxl import load_workbook
|
|
20
20
|
|
|
21
21
|
from .cache_service import get_cache_service, get_file_scan_cache_manager
|
|
22
|
+
from .log_utils import print
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
class FileScannerService:
|
|
@@ -30,7 +31,7 @@ class FileScannerService:
|
|
|
30
31
|
self._lock = threading.RLock()
|
|
31
32
|
|
|
32
33
|
# Data file extensions
|
|
33
|
-
self.DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet',
|
|
34
|
+
self.DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet',
|
|
34
35
|
'.feather', '.hdf5', '.h5', '.sql', '.db', '.sqlite', '.tsv', '.txt', '.ipynb'}
|
|
35
36
|
|
|
36
37
|
# Directories to exclude from search
|
|
@@ -76,100 +77,6 @@ class FileScannerService:
|
|
|
76
77
|
return (printable / len(chunk)) < 0.7
|
|
77
78
|
except (IOError, OSError):
|
|
78
79
|
return True
|
|
79
|
-
|
|
80
|
-
def _generate_pickle_data_preview(self, data: Any, max_items: int = 3, max_chars: int = 1000) -> Tuple[str, bool]:
|
|
81
|
-
"""
|
|
82
|
-
Generate a content preview for non-DataFrame pickle data.
|
|
83
|
-
Returns (preview_content, is_truncated)
|
|
84
|
-
"""
|
|
85
|
-
try:
|
|
86
|
-
data_type = type(data).__name__
|
|
87
|
-
|
|
88
|
-
if isinstance(data, (list, tuple)):
|
|
89
|
-
if len(data) == 0:
|
|
90
|
-
return f"Empty {data_type}", False
|
|
91
|
-
|
|
92
|
-
preview_items = []
|
|
93
|
-
for i, item in enumerate(data[:max_items]):
|
|
94
|
-
item_str = str(item)
|
|
95
|
-
if len(item_str) > 200:
|
|
96
|
-
item_str = item_str[:200] + "..."
|
|
97
|
-
preview_items.append(f"[{i}]: {item_str}")
|
|
98
|
-
|
|
99
|
-
preview = f"{data_type} with {len(data)} items:\n" + "\n".join(preview_items)
|
|
100
|
-
is_truncated = len(data) > max_items
|
|
101
|
-
|
|
102
|
-
if len(preview) > max_chars:
|
|
103
|
-
preview = preview[:max_chars] + "..."
|
|
104
|
-
is_truncated = True
|
|
105
|
-
|
|
106
|
-
return preview, is_truncated
|
|
107
|
-
|
|
108
|
-
elif isinstance(data, dict):
|
|
109
|
-
if len(data) == 0:
|
|
110
|
-
return f"Empty {data_type}", False
|
|
111
|
-
|
|
112
|
-
preview_items = []
|
|
113
|
-
for i, (key, value) in enumerate(list(data.items())[:max_items]):
|
|
114
|
-
key_str = str(key)
|
|
115
|
-
value_str = str(value)
|
|
116
|
-
if len(value_str) > 150:
|
|
117
|
-
value_str = value_str[:150] + "..."
|
|
118
|
-
preview_items.append(f"'{key_str}': {value_str}")
|
|
119
|
-
|
|
120
|
-
preview = f"{data_type} with {len(data)} keys:\n" + "\n".join(preview_items)
|
|
121
|
-
is_truncated = len(data) > max_items
|
|
122
|
-
|
|
123
|
-
if len(preview) > max_chars:
|
|
124
|
-
preview = preview[:max_chars] + "..."
|
|
125
|
-
is_truncated = True
|
|
126
|
-
|
|
127
|
-
return preview, is_truncated
|
|
128
|
-
|
|
129
|
-
elif isinstance(data, np.ndarray):
|
|
130
|
-
shape_str = str(data.shape)
|
|
131
|
-
dtype_str = str(data.dtype)
|
|
132
|
-
|
|
133
|
-
if data.size == 0:
|
|
134
|
-
return f"Empty numpy array: shape={shape_str}, dtype={dtype_str}", False
|
|
135
|
-
|
|
136
|
-
# Show first few elements
|
|
137
|
-
flat_data = data.flatten()[:max_items]
|
|
138
|
-
elements_str = ", ".join([str(x) for x in flat_data])
|
|
139
|
-
|
|
140
|
-
preview = f"numpy.ndarray: shape={shape_str}, dtype={dtype_str}\nFirst elements: [{elements_str}]"
|
|
141
|
-
is_truncated = data.size > max_items
|
|
142
|
-
|
|
143
|
-
if len(preview) > max_chars:
|
|
144
|
-
preview = preview[:max_chars] + "..."
|
|
145
|
-
is_truncated = True
|
|
146
|
-
|
|
147
|
-
return preview, is_truncated
|
|
148
|
-
|
|
149
|
-
elif isinstance(data, str):
|
|
150
|
-
if len(data) == 0:
|
|
151
|
-
return "Empty string", False
|
|
152
|
-
|
|
153
|
-
preview = f"String ({len(data)} chars): {data[:max_chars]}"
|
|
154
|
-
is_truncated = len(data) > max_chars
|
|
155
|
-
return preview, is_truncated
|
|
156
|
-
|
|
157
|
-
elif isinstance(data, (int, float, bool)):
|
|
158
|
-
return f"{data_type}: {data}", False
|
|
159
|
-
|
|
160
|
-
else:
|
|
161
|
-
# For other types, try to convert to string
|
|
162
|
-
data_str = str(data)
|
|
163
|
-
if len(data_str) > max_chars:
|
|
164
|
-
data_str = data_str[:max_chars] + "..."
|
|
165
|
-
is_truncated = True
|
|
166
|
-
else:
|
|
167
|
-
is_truncated = False
|
|
168
|
-
|
|
169
|
-
return f"{data_type}: {data_str}", is_truncated
|
|
170
|
-
|
|
171
|
-
except Exception as e:
|
|
172
|
-
return f"Error generating preview for {type(data).__name__}: {str(e)}", False
|
|
173
80
|
|
|
174
81
|
def _parse_json_array_simple(self, filepath: str, max_items: int = 5) -> Tuple[List[Any], bool]:
|
|
175
82
|
"""
|
|
@@ -613,12 +520,11 @@ class FileScannerService:
|
|
|
613
520
|
'is_tsv': extension == '.tsv',
|
|
614
521
|
'is_json': extension == '.json',
|
|
615
522
|
'is_parquet': extension == '.parquet',
|
|
616
|
-
'is_pkl': extension in ['.pkl', '.pickle'],
|
|
617
523
|
'is_xlsx': extension == '.xlsx',
|
|
618
524
|
'is_ipynb': extension == '.ipynb',
|
|
619
525
|
'is_text': extension in ['.txt', '.md', '.py', '.js', '.ts', '.html', '.xml', '.ipynb'],
|
|
620
|
-
'is_data': extension in ['.csv', '.tsv', '.json', '.jsonl', '.parquet', '.
|
|
621
|
-
'is_binary': extension in ['.parquet', '.
|
|
526
|
+
'is_data': extension in ['.csv', '.tsv', '.json', '.jsonl', '.parquet', '.xlsx'],
|
|
527
|
+
'is_binary': extension in ['.parquet', '.xlsx'] # Will be set later based on actual binary detection
|
|
622
528
|
}
|
|
623
529
|
|
|
624
530
|
try:
|
|
@@ -1121,8 +1027,6 @@ class FileScannerService:
|
|
|
1121
1027
|
file_type = 'tsv'
|
|
1122
1028
|
elif extension == '.parquet':
|
|
1123
1029
|
file_type = 'parquet'
|
|
1124
|
-
elif extension in ['.pkl', '.pickle']:
|
|
1125
|
-
file_type = 'pkl'
|
|
1126
1030
|
elif extension == '.xlsx':
|
|
1127
1031
|
file_type = 'xlsx'
|
|
1128
1032
|
elif extension == '.json' or extension == '.jsonl':
|
|
@@ -1174,51 +1078,6 @@ class FileScannerService:
|
|
|
1174
1078
|
except Exception:
|
|
1175
1079
|
sheet_names = ['Sheet1'] # Default sheet name
|
|
1176
1080
|
total_sheets = 1 # Default to 1 if we can't determine
|
|
1177
|
-
elif file_type == 'pkl':
|
|
1178
|
-
print(f"Reading pickle file: {abs_path}")
|
|
1179
|
-
data = pd.read_pickle(abs_path)
|
|
1180
|
-
print(f"Data: {data}")
|
|
1181
|
-
if isinstance(data, pd.DataFrame):
|
|
1182
|
-
print(f"Data is a DataFrame: {data.head(5)}")
|
|
1183
|
-
df = data.head(5) # Limit to first 5 rows
|
|
1184
|
-
else:
|
|
1185
|
-
# Handle non-DataFrame pickle data
|
|
1186
|
-
print(f"Data is not a DataFrame: {type(data).__name__}")
|
|
1187
|
-
|
|
1188
|
-
# Get file info
|
|
1189
|
-
file_info = self._get_file_type_info(str(item), extension)
|
|
1190
|
-
entry['file_info'] = file_info
|
|
1191
|
-
|
|
1192
|
-
# Check if file is binary (pickle files are always binary)
|
|
1193
|
-
is_binary = True
|
|
1194
|
-
file_info['is_binary'] = True
|
|
1195
|
-
|
|
1196
|
-
# Generate content preview for the pickle data
|
|
1197
|
-
content_preview, is_truncated = self._generate_pickle_data_preview(data)
|
|
1198
|
-
entry['content_preview'] = content_preview
|
|
1199
|
-
entry['is_truncated'] = is_truncated
|
|
1200
|
-
|
|
1201
|
-
# Create schema for non-DataFrame pickle data
|
|
1202
|
-
schema = {
|
|
1203
|
-
'success': True,
|
|
1204
|
-
'fileId': abs_path,
|
|
1205
|
-
'fileName': item.name,
|
|
1206
|
-
'filePath': abs_path,
|
|
1207
|
-
'fileType': file_type,
|
|
1208
|
-
'extractedAt': datetime.now().isoformat(),
|
|
1209
|
-
'summary': f'Pickle file containing {type(data).__name__}',
|
|
1210
|
-
'columns': [],
|
|
1211
|
-
'totalRows': 1 if not hasattr(data, '__len__') else len(data) if hasattr(data, '__len__') else 1,
|
|
1212
|
-
'totalColumns': 0,
|
|
1213
|
-
'fileMtime': current_mtime
|
|
1214
|
-
}
|
|
1215
|
-
|
|
1216
|
-
# Cache the entry
|
|
1217
|
-
if entry:
|
|
1218
|
-
entry['schema'] = schema
|
|
1219
|
-
self.file_scan_cache.set_file_entry(abs_path, entry)
|
|
1220
|
-
|
|
1221
|
-
return schema
|
|
1222
1081
|
elif file_type == 'json':
|
|
1223
1082
|
# Read and analyze JSON file
|
|
1224
1083
|
json_data, file_format, is_truncated = self._read_json_file(abs_path)
|
|
@@ -1273,7 +1132,7 @@ class FileScannerService:
|
|
|
1273
1132
|
|
|
1274
1133
|
return schema
|
|
1275
1134
|
|
|
1276
|
-
# Get file info for
|
|
1135
|
+
# Get file info for other file types
|
|
1277
1136
|
file_info = self._get_file_type_info(str(item), extension)
|
|
1278
1137
|
entry['file_info'] = file_info
|
|
1279
1138
|
|
|
@@ -8,12 +8,42 @@ from jupyter_server.base.handlers import APIHandler
|
|
|
8
8
|
from jupyter_server.utils import url_path_join
|
|
9
9
|
import tornado
|
|
10
10
|
|
|
11
|
+
# Import controlled print function
|
|
12
|
+
from .log_utils import print
|
|
11
13
|
from .cache_service import get_cache_service
|
|
12
14
|
from .cache_handlers import ChatHistoriesHandler, AppValuesHandler, CacheInfoHandler
|
|
13
15
|
from .unified_database_schema_service import UnifiedDatabaseSchemaHandler, UnifiedDatabaseQueryHandler
|
|
14
16
|
from .snowflake_schema_service import SnowflakeSchemaHandler, SnowflakeQueryHandler
|
|
17
|
+
from .databricks_schema_service import DatabricksSchemaHandler, DatabricksQueryHandler, DatabricksTestHandler
|
|
15
18
|
from .file_scanner_service import get_file_scanner_service
|
|
16
19
|
from .schema_search_service import SchemaSearchHandler
|
|
20
|
+
from .mcp_handlers import (
|
|
21
|
+
MCPServersHandler,
|
|
22
|
+
MCPServerHandler,
|
|
23
|
+
MCPConnectHandler,
|
|
24
|
+
MCPDisconnectHandler,
|
|
25
|
+
MCPToolsHandler,
|
|
26
|
+
MCPAllToolsHandler,
|
|
27
|
+
MCPToolCallHandler,
|
|
28
|
+
MCPServerEnableHandler,
|
|
29
|
+
MCPServerDisableHandler,
|
|
30
|
+
MCPToolEnableHandler,
|
|
31
|
+
MCPConfigFileHandler
|
|
32
|
+
)
|
|
33
|
+
from .composio_handlers import (
|
|
34
|
+
IntegrationsHandler,
|
|
35
|
+
IntegrationConnectHandler,
|
|
36
|
+
IntegrationCompleteHandler,
|
|
37
|
+
IntegrationStatusHandler,
|
|
38
|
+
IntegrationDisconnectHandler,
|
|
39
|
+
IntegrationRefreshHandler,
|
|
40
|
+
)
|
|
41
|
+
from .database_config_handlers import (
|
|
42
|
+
DatabaseConfigsHandler,
|
|
43
|
+
DatabaseDefaultsHandler,
|
|
44
|
+
SignalPilotHomeInfoHandler,
|
|
45
|
+
)
|
|
46
|
+
from .signalpilot_home import get_user_rules_manager
|
|
17
47
|
|
|
18
48
|
|
|
19
49
|
class HelloWorldHandler(APIHandler):
|
|
@@ -28,11 +58,205 @@ class HelloWorldHandler(APIHandler):
|
|
|
28
58
|
}))
|
|
29
59
|
|
|
30
60
|
|
|
61
|
+
class UserRulesHandler(APIHandler):
|
|
62
|
+
"""Handler for user rules (snippets) stored as markdown files."""
|
|
63
|
+
|
|
64
|
+
@tornado.web.authenticated
|
|
65
|
+
def get(self, rule_id=None):
|
|
66
|
+
"""Get all rules or a specific rule by ID."""
|
|
67
|
+
try:
|
|
68
|
+
rules_manager = get_user_rules_manager()
|
|
69
|
+
|
|
70
|
+
if not rules_manager.is_available():
|
|
71
|
+
self.set_status(503)
|
|
72
|
+
self.finish(json.dumps({
|
|
73
|
+
"error": "User rules service not available",
|
|
74
|
+
"message": "Rules directory is not accessible"
|
|
75
|
+
}))
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
if rule_id:
|
|
79
|
+
# Get specific rule
|
|
80
|
+
rule = rules_manager.get_rule(rule_id)
|
|
81
|
+
if rule is None:
|
|
82
|
+
self.set_status(404)
|
|
83
|
+
self.finish(json.dumps({
|
|
84
|
+
"error": "Rule not found",
|
|
85
|
+
"rule_id": rule_id
|
|
86
|
+
}))
|
|
87
|
+
else:
|
|
88
|
+
self.finish(json.dumps(rule))
|
|
89
|
+
else:
|
|
90
|
+
# Get all rules
|
|
91
|
+
rules = rules_manager.list_rules()
|
|
92
|
+
self.finish(json.dumps({
|
|
93
|
+
"rules": rules,
|
|
94
|
+
"count": len(rules)
|
|
95
|
+
}))
|
|
96
|
+
|
|
97
|
+
except Exception as e:
|
|
98
|
+
self.set_status(500)
|
|
99
|
+
self.finish(json.dumps({
|
|
100
|
+
"error": "Internal server error",
|
|
101
|
+
"message": str(e)
|
|
102
|
+
}))
|
|
103
|
+
|
|
104
|
+
@tornado.web.authenticated
|
|
105
|
+
def post(self, rule_id=None):
|
|
106
|
+
"""Create a new rule or update an existing one."""
|
|
107
|
+
try:
|
|
108
|
+
rules_manager = get_user_rules_manager()
|
|
109
|
+
|
|
110
|
+
if not rules_manager.is_available():
|
|
111
|
+
self.set_status(503)
|
|
112
|
+
self.finish(json.dumps({
|
|
113
|
+
"error": "User rules service not available",
|
|
114
|
+
"message": "Rules directory is not accessible"
|
|
115
|
+
}))
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
# Parse request body
|
|
119
|
+
try:
|
|
120
|
+
body = json.loads(self.request.body.decode('utf-8'))
|
|
121
|
+
except json.JSONDecodeError:
|
|
122
|
+
self.set_status(400)
|
|
123
|
+
self.finish(json.dumps({
|
|
124
|
+
"error": "Invalid JSON in request body"
|
|
125
|
+
}))
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
if rule_id:
|
|
129
|
+
# Update existing rule
|
|
130
|
+
title = body.get('title')
|
|
131
|
+
content = body.get('content')
|
|
132
|
+
description = body.get('description')
|
|
133
|
+
|
|
134
|
+
result = rules_manager.update_rule(
|
|
135
|
+
rule_id=rule_id,
|
|
136
|
+
title=title,
|
|
137
|
+
content=content,
|
|
138
|
+
description=description
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
if result:
|
|
142
|
+
self.finish(json.dumps({
|
|
143
|
+
"success": True,
|
|
144
|
+
"rule": result,
|
|
145
|
+
"message": "Rule updated successfully"
|
|
146
|
+
}))
|
|
147
|
+
else:
|
|
148
|
+
self.set_status(404)
|
|
149
|
+
self.finish(json.dumps({
|
|
150
|
+
"error": "Rule not found",
|
|
151
|
+
"rule_id": rule_id
|
|
152
|
+
}))
|
|
153
|
+
else:
|
|
154
|
+
# Create new rule
|
|
155
|
+
title = body.get('title')
|
|
156
|
+
content = body.get('content', '')
|
|
157
|
+
description = body.get('description', '')
|
|
158
|
+
provided_id = body.get('id')
|
|
159
|
+
|
|
160
|
+
if not title:
|
|
161
|
+
self.set_status(400)
|
|
162
|
+
self.finish(json.dumps({
|
|
163
|
+
"error": "Missing required field: title"
|
|
164
|
+
}))
|
|
165
|
+
return
|
|
166
|
+
|
|
167
|
+
result = rules_manager.create_rule(
|
|
168
|
+
title=title,
|
|
169
|
+
content=content,
|
|
170
|
+
description=description,
|
|
171
|
+
rule_id=provided_id
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if result:
|
|
175
|
+
self.set_status(201)
|
|
176
|
+
self.finish(json.dumps({
|
|
177
|
+
"success": True,
|
|
178
|
+
"rule": result,
|
|
179
|
+
"message": "Rule created successfully"
|
|
180
|
+
}))
|
|
181
|
+
else:
|
|
182
|
+
self.set_status(500)
|
|
183
|
+
self.finish(json.dumps({
|
|
184
|
+
"error": "Failed to create rule"
|
|
185
|
+
}))
|
|
186
|
+
|
|
187
|
+
except Exception as e:
|
|
188
|
+
self.set_status(500)
|
|
189
|
+
self.finish(json.dumps({
|
|
190
|
+
"error": "Internal server error",
|
|
191
|
+
"message": str(e)
|
|
192
|
+
}))
|
|
193
|
+
|
|
194
|
+
@tornado.web.authenticated
|
|
195
|
+
def delete(self, rule_id=None):
|
|
196
|
+
"""Delete a rule by ID."""
|
|
197
|
+
try:
|
|
198
|
+
rules_manager = get_user_rules_manager()
|
|
199
|
+
|
|
200
|
+
if not rules_manager.is_available():
|
|
201
|
+
self.set_status(503)
|
|
202
|
+
self.finish(json.dumps({
|
|
203
|
+
"error": "User rules service not available",
|
|
204
|
+
"message": "Rules directory is not accessible"
|
|
205
|
+
}))
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
if not rule_id:
|
|
209
|
+
self.set_status(400)
|
|
210
|
+
self.finish(json.dumps({
|
|
211
|
+
"error": "Rule ID is required for deletion"
|
|
212
|
+
}))
|
|
213
|
+
return
|
|
214
|
+
|
|
215
|
+
success = rules_manager.delete_rule(rule_id)
|
|
216
|
+
|
|
217
|
+
if success:
|
|
218
|
+
self.finish(json.dumps({
|
|
219
|
+
"success": True,
|
|
220
|
+
"message": f"Rule '{rule_id}' deleted successfully"
|
|
221
|
+
}))
|
|
222
|
+
else:
|
|
223
|
+
self.set_status(404)
|
|
224
|
+
self.finish(json.dumps({
|
|
225
|
+
"error": "Rule not found",
|
|
226
|
+
"rule_id": rule_id
|
|
227
|
+
}))
|
|
228
|
+
|
|
229
|
+
except Exception as e:
|
|
230
|
+
self.set_status(500)
|
|
231
|
+
self.finish(json.dumps({
|
|
232
|
+
"error": "Internal server error",
|
|
233
|
+
"message": str(e)
|
|
234
|
+
}))
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class UserRulesInfoHandler(APIHandler):
|
|
238
|
+
"""Handler for user rules service information."""
|
|
239
|
+
|
|
240
|
+
@tornado.web.authenticated
|
|
241
|
+
def get(self):
|
|
242
|
+
"""Get user rules service information."""
|
|
243
|
+
try:
|
|
244
|
+
rules_manager = get_user_rules_manager()
|
|
245
|
+
info = rules_manager.get_info()
|
|
246
|
+
self.finish(json.dumps(info))
|
|
247
|
+
except Exception as e:
|
|
248
|
+
self.set_status(500)
|
|
249
|
+
self.finish(json.dumps({
|
|
250
|
+
"error": "Internal server error",
|
|
251
|
+
"message": str(e)
|
|
252
|
+
}))
|
|
253
|
+
|
|
254
|
+
|
|
31
255
|
class ReadAllFilesHandler(APIHandler):
|
|
32
256
|
"""Handler for reading all notebook and data files in the workspace"""
|
|
33
257
|
|
|
34
258
|
# Common data file extensions
|
|
35
|
-
DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet',
|
|
259
|
+
DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet',
|
|
36
260
|
'.feather', '.hdf5', '.h5', '.sql', '.db', '.sqlite', '.tsv', '.txt'}
|
|
37
261
|
|
|
38
262
|
# Directories to exclude from search
|
|
@@ -178,9 +402,9 @@ class ReadAllFilesHandler(APIHandler):
|
|
|
178
402
|
# Search for data file references
|
|
179
403
|
# Common patterns: pd.read_csv('file.csv'), open('file.csv'), 'path/to/file.csv'
|
|
180
404
|
patterns = [
|
|
181
|
-
r'["\']([^"\']+\.(?:csv|json|xlsx?|parquet|
|
|
182
|
-
r'read_(?:csv|json|excel|parquet|
|
|
183
|
-
r'to_(?:csv|json|excel|parquet|
|
|
405
|
+
r'["\']([^"\']+\.(?:csv|json|xlsx?|parquet|feather|hdf5|h5|sql|db|sqlite|tsv|txt))["\']',
|
|
406
|
+
r'read_(?:csv|json|excel|parquet|feather|hdf|sql|table)\(["\']([^"\']+)["\']',
|
|
407
|
+
r'to_(?:csv|json|excel|parquet|feather|hdf|sql)\(["\']([^"\']+)["\']',
|
|
184
408
|
]
|
|
185
409
|
|
|
186
410
|
found_references = set()
|
|
@@ -437,18 +661,72 @@ class ScannedDirectoriesHandler(APIHandler):
|
|
|
437
661
|
|
|
438
662
|
|
|
439
663
|
class WorkDirHandler(APIHandler):
|
|
440
|
-
"""Handler for returning current working directory"""
|
|
664
|
+
"""Handler for returning current working directory and setup manager type"""
|
|
441
665
|
|
|
442
666
|
@tornado.web.authenticated
|
|
443
667
|
def get(self):
|
|
444
668
|
try:
|
|
445
|
-
|
|
669
|
+
# Detect the setup manager type
|
|
670
|
+
setup_manager = self._detect_setup_manager()
|
|
671
|
+
|
|
672
|
+
self.finish(json.dumps({
|
|
673
|
+
"workdir": os.getcwd(),
|
|
674
|
+
"setupManager": setup_manager
|
|
675
|
+
}))
|
|
446
676
|
except Exception as e:
|
|
447
677
|
self.set_status(500)
|
|
448
678
|
self.finish(json.dumps({
|
|
449
679
|
"error": str(e)
|
|
450
680
|
}))
|
|
451
681
|
|
|
682
|
+
def _detect_setup_manager(self) -> str:
|
|
683
|
+
"""
|
|
684
|
+
Detect the Python environment manager being used.
|
|
685
|
+
Returns: 'conda', 'venv', 'uv', or 'system'
|
|
686
|
+
"""
|
|
687
|
+
env_type, _ = self._detect_kernel_env()
|
|
688
|
+
return env_type
|
|
689
|
+
|
|
690
|
+
def _detect_kernel_env(self):
|
|
691
|
+
"""
|
|
692
|
+
Detect the actual environment running this kernel,
|
|
693
|
+
not what shell env vars claim.
|
|
694
|
+
"""
|
|
695
|
+
import sys
|
|
696
|
+
|
|
697
|
+
prefix = Path(sys.prefix)
|
|
698
|
+
executable = Path(sys.executable)
|
|
699
|
+
|
|
700
|
+
# Check pyvenv.cfg first — exists for venv/uv, not conda
|
|
701
|
+
pyvenv_cfg = prefix / 'pyvenv.cfg'
|
|
702
|
+
if pyvenv_cfg.exists():
|
|
703
|
+
try:
|
|
704
|
+
content = pyvenv_cfg.read_text().lower()
|
|
705
|
+
if 'uv' in content:
|
|
706
|
+
return 'uv', str(prefix)
|
|
707
|
+
return 'venv', str(prefix)
|
|
708
|
+
except (IOError, OSError):
|
|
709
|
+
return 'venv', str(prefix)
|
|
710
|
+
|
|
711
|
+
# No pyvenv.cfg — check if we're in a conda env
|
|
712
|
+
# Conda envs live under <conda_root>/envs/<name>/ or are the base env
|
|
713
|
+
# Key marker: conda-meta directory exists
|
|
714
|
+
conda_meta = prefix / 'conda-meta'
|
|
715
|
+
if conda_meta.exists():
|
|
716
|
+
# Extract env name from path
|
|
717
|
+
if 'envs' in prefix.parts:
|
|
718
|
+
idx = prefix.parts.index('envs')
|
|
719
|
+
env_name = prefix.parts[idx + 1] if len(prefix.parts) > idx + 1 else 'base'
|
|
720
|
+
else:
|
|
721
|
+
env_name = 'base'
|
|
722
|
+
return 'conda', env_name
|
|
723
|
+
|
|
724
|
+
# Fallback: check if prefix differs from base (generic venv)
|
|
725
|
+
if sys.prefix != sys.base_prefix:
|
|
726
|
+
return 'venv', str(prefix)
|
|
727
|
+
|
|
728
|
+
return 'system', str(prefix)
|
|
729
|
+
|
|
452
730
|
|
|
453
731
|
class TerminalExecuteHandler(APIHandler):
|
|
454
732
|
"""Handler for executing terminal commands"""
|
|
@@ -476,9 +754,15 @@ class TerminalExecuteHandler(APIHandler):
|
|
|
476
754
|
timeout=300
|
|
477
755
|
)
|
|
478
756
|
|
|
479
|
-
def truncate_output(output: str, max_lines: int = 50) -> str:
|
|
757
|
+
def truncate_output(output: str, max_lines: int = 50, max_chars: int = 20000) -> str:
|
|
480
758
|
if not output:
|
|
481
759
|
return output
|
|
760
|
+
# First, truncate by character limit
|
|
761
|
+
if len(output) > max_chars:
|
|
762
|
+
half = max_chars // 2
|
|
763
|
+
truncated_chars = len(output) - max_chars
|
|
764
|
+
output = output[:half] + f'\n... {truncated_chars} characters truncated ...\n' + output[-half:]
|
|
765
|
+
# Then, truncate by line count
|
|
482
766
|
lines = output.splitlines()
|
|
483
767
|
if len(lines) <= max_lines * 2:
|
|
484
768
|
return output
|
|
@@ -741,12 +1025,49 @@ def setup_handlers(web_app):
|
|
|
741
1025
|
# Snowflake service endpoints
|
|
742
1026
|
snowflake_schema_route = url_path_join(base_url, "signalpilot-ai-internal", "snowflake", "schema")
|
|
743
1027
|
snowflake_query_route = url_path_join(base_url, "signalpilot-ai-internal", "snowflake", "query")
|
|
744
|
-
|
|
1028
|
+
|
|
1029
|
+
# Databricks service endpoints
|
|
1030
|
+
databricks_schema_route = url_path_join(base_url, "signalpilot-ai-internal", "databricks", "schema")
|
|
1031
|
+
databricks_query_route = url_path_join(base_url, "signalpilot-ai-internal", "databricks", "query")
|
|
1032
|
+
databricks_test_route = url_path_join(base_url, "signalpilot-ai-internal", "databricks", "test")
|
|
1033
|
+
|
|
745
1034
|
# Notebook HTML export endpoint
|
|
746
1035
|
notebook_html_route = url_path_join(base_url, "signalpilot-ai-internal", "notebook", "to-html")
|
|
747
1036
|
|
|
748
1037
|
# Terminal endpoint
|
|
749
1038
|
terminal_execute_route = url_path_join(base_url, "signalpilot-ai-internal", "terminal", "execute")
|
|
1039
|
+
|
|
1040
|
+
# MCP service endpoints
|
|
1041
|
+
mcp_servers_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers")
|
|
1042
|
+
mcp_server_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)")
|
|
1043
|
+
mcp_connect_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "connect")
|
|
1044
|
+
mcp_disconnect_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)", "disconnect")
|
|
1045
|
+
mcp_tools_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)", "tools")
|
|
1046
|
+
mcp_all_tools_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "tools")
|
|
1047
|
+
mcp_tool_call_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "call-tool")
|
|
1048
|
+
mcp_tool_enable_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)", "tools", "([^/]+)")
|
|
1049
|
+
mcp_server_enable_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)", "enable")
|
|
1050
|
+
mcp_server_disable_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)", "disable")
|
|
1051
|
+
mcp_config_file_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "config-file")
|
|
1052
|
+
|
|
1053
|
+
# Composio integration endpoints
|
|
1054
|
+
integrations_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations")
|
|
1055
|
+
integration_connect_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "connect")
|
|
1056
|
+
integration_complete_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "complete")
|
|
1057
|
+
integration_status_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "status")
|
|
1058
|
+
integration_refresh_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "refresh")
|
|
1059
|
+
integration_disconnect_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)")
|
|
1060
|
+
|
|
1061
|
+
# Database config endpoints (db.toml)
|
|
1062
|
+
db_configs_route = url_path_join(base_url, "signalpilot-ai-internal", "db-configs")
|
|
1063
|
+
db_configs_type_route = url_path_join(base_url, "signalpilot-ai-internal", "db-configs", "([^/]+)")
|
|
1064
|
+
db_defaults_route = url_path_join(base_url, "signalpilot-ai-internal", "db-defaults")
|
|
1065
|
+
signalpilot_home_info_route = url_path_join(base_url, "signalpilot-ai-internal", "home-info")
|
|
1066
|
+
|
|
1067
|
+
# User rules endpoints (markdown files in user-rules/)
|
|
1068
|
+
user_rules_route = url_path_join(base_url, "signalpilot-ai-internal", "rules")
|
|
1069
|
+
user_rule_route = url_path_join(base_url, "signalpilot-ai-internal", "rules", "([^/]+)")
|
|
1070
|
+
user_rules_info_route = url_path_join(base_url, "signalpilot-ai-internal", "rules-info")
|
|
750
1071
|
|
|
751
1072
|
handlers = [
|
|
752
1073
|
# Original endpoint
|
|
@@ -791,9 +1112,48 @@ def setup_handlers(web_app):
|
|
|
791
1112
|
# Snowflake service endpoints
|
|
792
1113
|
(snowflake_schema_route, SnowflakeSchemaHandler),
|
|
793
1114
|
(snowflake_query_route, SnowflakeQueryHandler),
|
|
794
|
-
|
|
1115
|
+
|
|
1116
|
+
# Databricks service endpoints
|
|
1117
|
+
(databricks_schema_route, DatabricksSchemaHandler),
|
|
1118
|
+
(databricks_query_route, DatabricksQueryHandler),
|
|
1119
|
+
(databricks_test_route, DatabricksTestHandler),
|
|
1120
|
+
|
|
795
1121
|
# Notebook HTML export endpoint
|
|
796
1122
|
(notebook_html_route, NotebookToHTMLHandler),
|
|
1123
|
+
|
|
1124
|
+
# MCP service endpoints
|
|
1125
|
+
# Note: More specific routes should come before parameterized routes
|
|
1126
|
+
(mcp_config_file_route, MCPConfigFileHandler),
|
|
1127
|
+
(mcp_servers_route, MCPServersHandler),
|
|
1128
|
+
(mcp_server_route, MCPServerHandler),
|
|
1129
|
+
(mcp_connect_route, MCPConnectHandler),
|
|
1130
|
+
(mcp_disconnect_route, MCPDisconnectHandler),
|
|
1131
|
+
(mcp_tools_route, MCPToolsHandler),
|
|
1132
|
+
(mcp_all_tools_route, MCPAllToolsHandler),
|
|
1133
|
+
(mcp_tool_call_route, MCPToolCallHandler),
|
|
1134
|
+
(mcp_tool_enable_route, MCPToolEnableHandler),
|
|
1135
|
+
(mcp_server_enable_route, MCPServerEnableHandler),
|
|
1136
|
+
(mcp_server_disable_route, MCPServerDisableHandler),
|
|
1137
|
+
|
|
1138
|
+
# Composio integration endpoints
|
|
1139
|
+
# Note: More specific routes should come before parameterized routes
|
|
1140
|
+
(integrations_route, IntegrationsHandler),
|
|
1141
|
+
(integration_connect_route, IntegrationConnectHandler),
|
|
1142
|
+
(integration_complete_route, IntegrationCompleteHandler),
|
|
1143
|
+
(integration_status_route, IntegrationStatusHandler),
|
|
1144
|
+
(integration_refresh_route, IntegrationRefreshHandler),
|
|
1145
|
+
(integration_disconnect_route, IntegrationDisconnectHandler),
|
|
1146
|
+
|
|
1147
|
+
# Database config endpoints (db.toml in cache_dir/connect/)
|
|
1148
|
+
(db_configs_route, DatabaseConfigsHandler),
|
|
1149
|
+
(db_configs_type_route, DatabaseConfigsHandler),
|
|
1150
|
+
(db_defaults_route, DatabaseDefaultsHandler),
|
|
1151
|
+
(signalpilot_home_info_route, SignalPilotHomeInfoHandler),
|
|
1152
|
+
|
|
1153
|
+
# User rules endpoints (markdown files in user-rules/)
|
|
1154
|
+
(user_rules_route, UserRulesHandler),
|
|
1155
|
+
(user_rule_route, UserRulesHandler),
|
|
1156
|
+
(user_rules_info_route, UserRulesInfoHandler),
|
|
797
1157
|
]
|
|
798
1158
|
|
|
799
1159
|
web_app.add_handlers(host_pattern, handlers)
|
|
@@ -806,6 +1166,18 @@ def setup_handlers(web_app):
|
|
|
806
1166
|
else:
|
|
807
1167
|
print("WARNING: SignalPilot AI cache service failed to initialize!")
|
|
808
1168
|
|
|
1169
|
+
# Register cleanup handler for MCP servers on shutdown
|
|
1170
|
+
from .mcp_server_manager import get_mcp_server_manager
|
|
1171
|
+
|
|
1172
|
+
def cleanup_mcp_servers():
|
|
1173
|
+
"""Stop all MCP servers on shutdown"""
|
|
1174
|
+
manager = get_mcp_server_manager()
|
|
1175
|
+
manager.stop_all_servers()
|
|
1176
|
+
|
|
1177
|
+
# Register cleanup with web app
|
|
1178
|
+
import atexit
|
|
1179
|
+
atexit.register(cleanup_mcp_servers)
|
|
1180
|
+
|
|
809
1181
|
print("SignalPilot AI backend handlers registered:")
|
|
810
1182
|
print(f" - Hello World: {hello_route}")
|
|
811
1183
|
print(f" - Read All Files: {read_all_files_route}")
|
|
@@ -821,5 +1193,12 @@ def setup_handlers(web_app):
|
|
|
821
1193
|
print(f" - MySQL Query: {mysql_query_route}")
|
|
822
1194
|
print(f" - Snowflake Schema: {snowflake_schema_route}")
|
|
823
1195
|
print(f" - Snowflake Query: {snowflake_query_route}")
|
|
1196
|
+
print(f" - Databricks Schema: {databricks_schema_route}")
|
|
1197
|
+
print(f" - Databricks Query: {databricks_query_route}")
|
|
1198
|
+
print(f" - Databricks Test: {databricks_test_route}")
|
|
824
1199
|
print(f" - Notebook Cells: {notebook_cells_route}")
|
|
825
1200
|
print(f" - Notebook to HTML: {notebook_html_route}")
|
|
1201
|
+
print(f" - MCP Servers: {mcp_servers_route}")
|
|
1202
|
+
print(f" - MCP Connect: {mcp_connect_route}")
|
|
1203
|
+
print(f" - MCP Tools: {mcp_all_tools_route}")
|
|
1204
|
+
print(f" - MCP Tool Call: {mcp_tool_call_route}")
|