signalpilot-ai-internal 0.10.22__py3-none-any.whl → 0.11.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- signalpilot_ai_internal/_version.py +1 -1
- signalpilot_ai_internal/cache_service.py +22 -21
- signalpilot_ai_internal/composio_handlers.py +224 -0
- signalpilot_ai_internal/composio_service.py +511 -0
- signalpilot_ai_internal/database_config_handlers.py +182 -0
- signalpilot_ai_internal/database_config_service.py +166 -0
- signalpilot_ai_internal/databricks_schema_service.py +19 -14
- signalpilot_ai_internal/file_scanner_service.py +5 -146
- signalpilot_ai_internal/handlers.py +317 -8
- signalpilot_ai_internal/integrations_config.py +256 -0
- signalpilot_ai_internal/log_utils.py +31 -0
- signalpilot_ai_internal/mcp_handlers.py +33 -9
- signalpilot_ai_internal/mcp_service.py +94 -142
- signalpilot_ai_internal/oauth_token_store.py +141 -0
- signalpilot_ai_internal/schema_search_config.yml +17 -11
- signalpilot_ai_internal/schema_search_service.py +30 -10
- signalpilot_ai_internal/signalpilot_home.py +961 -0
- signalpilot_ai_internal/snowflake_schema_service.py +2 -0
- signalpilot_ai_internal/unified_database_schema_service.py +2 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +15 -48
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json → signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +9 -52
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.bab318d6caadb055e29c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/129.868ca665e6fc225c20a0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/179.fd45a2e75d471d0aa3b9.js +7 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.81105a94aa873fc51a94.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.a002dd4630d3b6404a90.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.cc6f6ecacd703bcdb468.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.817a883549d55a0e0576.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.a4daecd44f1e9364e44a.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.667225aab294fb5ed161.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8138af2522716e5a926f.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.925c73e32f3c07448da0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/477.aaa4cc9e87801fb45f5b.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.370056149a59022b700c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/510.868ca665e6fc225c20a0.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.835f97f7ccfc70ff5c93.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.6c13335f73de089d6b1e.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/574.ad2709e91ebcac5bbe68.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.bddbab8e464fe31f0393.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.fda1bcdb10497b0a6ade.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.d046701f475fcbf6697d.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.c306dffd4cfe8a613d13.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.e39898b6f336539f228c.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.77cc0ca10a1860df1b52.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.4e2850b2af985ed0d378.js +1 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js +2 -0
- signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.055f50d20a31f3068c72.js +1 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +29 -29
- {signalpilot_ai_internal-0.10.22.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/METADATA +13 -31
- signalpilot_ai_internal-0.11.24.dist-info/RECORD +66 -0
- signalpilot_ai_internal-0.11.24.dist-info/licenses/LICENSE +7 -0
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/110.224e83db03814fd03955.js +0 -7
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.972abe1d2d66f083f9cc.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.ad22ccddd74ee306fb56.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -2
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/57.c4232851631fb2e7e59a.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/726.318e4e791edb63cc788f.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/741.dc49867fafb03ea2ba4d.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.2d75de1a8d2c3131a8db.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/801.ca9e114a30896b669a3c.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/880.d9914229e4f120e7e9e4.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/936.d80de1e4da5b520d2f3b.js +0 -1
- signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.b63c429ca81e743b403c.js +0 -1
- signalpilot_ai_internal-0.10.22.dist-info/RECORD +0 -56
- signalpilot_ai_internal-0.10.22.dist-info/licenses/LICENSE +0 -29
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
- /signalpilot_ai_internal-0.10.22.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt → /signalpilot_ai_internal-0.11.24.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/956.eeffe67d7781fd63ef4b.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.10.22.data → signalpilot_ai_internal-0.11.24.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
- {signalpilot_ai_internal-0.10.22.dist-info → signalpilot_ai_internal-0.11.24.dist-info}/WHEEL +0 -0
|
@@ -8,6 +8,8 @@ from jupyter_server.base.handlers import APIHandler
|
|
|
8
8
|
from jupyter_server.utils import url_path_join
|
|
9
9
|
import tornado
|
|
10
10
|
|
|
11
|
+
# Import controlled print function
|
|
12
|
+
from .log_utils import print
|
|
11
13
|
from .cache_service import get_cache_service
|
|
12
14
|
from .cache_handlers import ChatHistoriesHandler, AppValuesHandler, CacheInfoHandler
|
|
13
15
|
from .unified_database_schema_service import UnifiedDatabaseSchemaHandler, UnifiedDatabaseQueryHandler
|
|
@@ -28,6 +30,20 @@ from .mcp_handlers import (
|
|
|
28
30
|
MCPToolEnableHandler,
|
|
29
31
|
MCPConfigFileHandler
|
|
30
32
|
)
|
|
33
|
+
from .composio_handlers import (
|
|
34
|
+
IntegrationsHandler,
|
|
35
|
+
IntegrationConnectHandler,
|
|
36
|
+
IntegrationCompleteHandler,
|
|
37
|
+
IntegrationStatusHandler,
|
|
38
|
+
IntegrationDisconnectHandler,
|
|
39
|
+
IntegrationRefreshHandler,
|
|
40
|
+
)
|
|
41
|
+
from .database_config_handlers import (
|
|
42
|
+
DatabaseConfigsHandler,
|
|
43
|
+
DatabaseDefaultsHandler,
|
|
44
|
+
SignalPilotHomeInfoHandler,
|
|
45
|
+
)
|
|
46
|
+
from .signalpilot_home import get_user_rules_manager
|
|
31
47
|
|
|
32
48
|
|
|
33
49
|
class HelloWorldHandler(APIHandler):
|
|
@@ -42,11 +58,205 @@ class HelloWorldHandler(APIHandler):
|
|
|
42
58
|
}))
|
|
43
59
|
|
|
44
60
|
|
|
61
|
+
class UserRulesHandler(APIHandler):
|
|
62
|
+
"""Handler for user rules (snippets) stored as markdown files."""
|
|
63
|
+
|
|
64
|
+
@tornado.web.authenticated
|
|
65
|
+
def get(self, rule_id=None):
|
|
66
|
+
"""Get all rules or a specific rule by ID."""
|
|
67
|
+
try:
|
|
68
|
+
rules_manager = get_user_rules_manager()
|
|
69
|
+
|
|
70
|
+
if not rules_manager.is_available():
|
|
71
|
+
self.set_status(503)
|
|
72
|
+
self.finish(json.dumps({
|
|
73
|
+
"error": "User rules service not available",
|
|
74
|
+
"message": "Rules directory is not accessible"
|
|
75
|
+
}))
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
if rule_id:
|
|
79
|
+
# Get specific rule
|
|
80
|
+
rule = rules_manager.get_rule(rule_id)
|
|
81
|
+
if rule is None:
|
|
82
|
+
self.set_status(404)
|
|
83
|
+
self.finish(json.dumps({
|
|
84
|
+
"error": "Rule not found",
|
|
85
|
+
"rule_id": rule_id
|
|
86
|
+
}))
|
|
87
|
+
else:
|
|
88
|
+
self.finish(json.dumps(rule))
|
|
89
|
+
else:
|
|
90
|
+
# Get all rules
|
|
91
|
+
rules = rules_manager.list_rules()
|
|
92
|
+
self.finish(json.dumps({
|
|
93
|
+
"rules": rules,
|
|
94
|
+
"count": len(rules)
|
|
95
|
+
}))
|
|
96
|
+
|
|
97
|
+
except Exception as e:
|
|
98
|
+
self.set_status(500)
|
|
99
|
+
self.finish(json.dumps({
|
|
100
|
+
"error": "Internal server error",
|
|
101
|
+
"message": str(e)
|
|
102
|
+
}))
|
|
103
|
+
|
|
104
|
+
@tornado.web.authenticated
|
|
105
|
+
def post(self, rule_id=None):
|
|
106
|
+
"""Create a new rule or update an existing one."""
|
|
107
|
+
try:
|
|
108
|
+
rules_manager = get_user_rules_manager()
|
|
109
|
+
|
|
110
|
+
if not rules_manager.is_available():
|
|
111
|
+
self.set_status(503)
|
|
112
|
+
self.finish(json.dumps({
|
|
113
|
+
"error": "User rules service not available",
|
|
114
|
+
"message": "Rules directory is not accessible"
|
|
115
|
+
}))
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
# Parse request body
|
|
119
|
+
try:
|
|
120
|
+
body = json.loads(self.request.body.decode('utf-8'))
|
|
121
|
+
except json.JSONDecodeError:
|
|
122
|
+
self.set_status(400)
|
|
123
|
+
self.finish(json.dumps({
|
|
124
|
+
"error": "Invalid JSON in request body"
|
|
125
|
+
}))
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
if rule_id:
|
|
129
|
+
# Update existing rule
|
|
130
|
+
title = body.get('title')
|
|
131
|
+
content = body.get('content')
|
|
132
|
+
description = body.get('description')
|
|
133
|
+
|
|
134
|
+
result = rules_manager.update_rule(
|
|
135
|
+
rule_id=rule_id,
|
|
136
|
+
title=title,
|
|
137
|
+
content=content,
|
|
138
|
+
description=description
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
if result:
|
|
142
|
+
self.finish(json.dumps({
|
|
143
|
+
"success": True,
|
|
144
|
+
"rule": result,
|
|
145
|
+
"message": "Rule updated successfully"
|
|
146
|
+
}))
|
|
147
|
+
else:
|
|
148
|
+
self.set_status(404)
|
|
149
|
+
self.finish(json.dumps({
|
|
150
|
+
"error": "Rule not found",
|
|
151
|
+
"rule_id": rule_id
|
|
152
|
+
}))
|
|
153
|
+
else:
|
|
154
|
+
# Create new rule
|
|
155
|
+
title = body.get('title')
|
|
156
|
+
content = body.get('content', '')
|
|
157
|
+
description = body.get('description', '')
|
|
158
|
+
provided_id = body.get('id')
|
|
159
|
+
|
|
160
|
+
if not title:
|
|
161
|
+
self.set_status(400)
|
|
162
|
+
self.finish(json.dumps({
|
|
163
|
+
"error": "Missing required field: title"
|
|
164
|
+
}))
|
|
165
|
+
return
|
|
166
|
+
|
|
167
|
+
result = rules_manager.create_rule(
|
|
168
|
+
title=title,
|
|
169
|
+
content=content,
|
|
170
|
+
description=description,
|
|
171
|
+
rule_id=provided_id
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if result:
|
|
175
|
+
self.set_status(201)
|
|
176
|
+
self.finish(json.dumps({
|
|
177
|
+
"success": True,
|
|
178
|
+
"rule": result,
|
|
179
|
+
"message": "Rule created successfully"
|
|
180
|
+
}))
|
|
181
|
+
else:
|
|
182
|
+
self.set_status(500)
|
|
183
|
+
self.finish(json.dumps({
|
|
184
|
+
"error": "Failed to create rule"
|
|
185
|
+
}))
|
|
186
|
+
|
|
187
|
+
except Exception as e:
|
|
188
|
+
self.set_status(500)
|
|
189
|
+
self.finish(json.dumps({
|
|
190
|
+
"error": "Internal server error",
|
|
191
|
+
"message": str(e)
|
|
192
|
+
}))
|
|
193
|
+
|
|
194
|
+
@tornado.web.authenticated
|
|
195
|
+
def delete(self, rule_id=None):
|
|
196
|
+
"""Delete a rule by ID."""
|
|
197
|
+
try:
|
|
198
|
+
rules_manager = get_user_rules_manager()
|
|
199
|
+
|
|
200
|
+
if not rules_manager.is_available():
|
|
201
|
+
self.set_status(503)
|
|
202
|
+
self.finish(json.dumps({
|
|
203
|
+
"error": "User rules service not available",
|
|
204
|
+
"message": "Rules directory is not accessible"
|
|
205
|
+
}))
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
if not rule_id:
|
|
209
|
+
self.set_status(400)
|
|
210
|
+
self.finish(json.dumps({
|
|
211
|
+
"error": "Rule ID is required for deletion"
|
|
212
|
+
}))
|
|
213
|
+
return
|
|
214
|
+
|
|
215
|
+
success = rules_manager.delete_rule(rule_id)
|
|
216
|
+
|
|
217
|
+
if success:
|
|
218
|
+
self.finish(json.dumps({
|
|
219
|
+
"success": True,
|
|
220
|
+
"message": f"Rule '{rule_id}' deleted successfully"
|
|
221
|
+
}))
|
|
222
|
+
else:
|
|
223
|
+
self.set_status(404)
|
|
224
|
+
self.finish(json.dumps({
|
|
225
|
+
"error": "Rule not found",
|
|
226
|
+
"rule_id": rule_id
|
|
227
|
+
}))
|
|
228
|
+
|
|
229
|
+
except Exception as e:
|
|
230
|
+
self.set_status(500)
|
|
231
|
+
self.finish(json.dumps({
|
|
232
|
+
"error": "Internal server error",
|
|
233
|
+
"message": str(e)
|
|
234
|
+
}))
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class UserRulesInfoHandler(APIHandler):
|
|
238
|
+
"""Handler for user rules service information."""
|
|
239
|
+
|
|
240
|
+
@tornado.web.authenticated
|
|
241
|
+
def get(self):
|
|
242
|
+
"""Get user rules service information."""
|
|
243
|
+
try:
|
|
244
|
+
rules_manager = get_user_rules_manager()
|
|
245
|
+
info = rules_manager.get_info()
|
|
246
|
+
self.finish(json.dumps(info))
|
|
247
|
+
except Exception as e:
|
|
248
|
+
self.set_status(500)
|
|
249
|
+
self.finish(json.dumps({
|
|
250
|
+
"error": "Internal server error",
|
|
251
|
+
"message": str(e)
|
|
252
|
+
}))
|
|
253
|
+
|
|
254
|
+
|
|
45
255
|
class ReadAllFilesHandler(APIHandler):
|
|
46
256
|
"""Handler for reading all notebook and data files in the workspace"""
|
|
47
257
|
|
|
48
258
|
# Common data file extensions
|
|
49
|
-
DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet',
|
|
259
|
+
DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet',
|
|
50
260
|
'.feather', '.hdf5', '.h5', '.sql', '.db', '.sqlite', '.tsv', '.txt'}
|
|
51
261
|
|
|
52
262
|
# Directories to exclude from search
|
|
@@ -192,9 +402,9 @@ class ReadAllFilesHandler(APIHandler):
|
|
|
192
402
|
# Search for data file references
|
|
193
403
|
# Common patterns: pd.read_csv('file.csv'), open('file.csv'), 'path/to/file.csv'
|
|
194
404
|
patterns = [
|
|
195
|
-
r'["\']([^"\']+\.(?:csv|json|xlsx?|parquet|
|
|
196
|
-
r'read_(?:csv|json|excel|parquet|
|
|
197
|
-
r'to_(?:csv|json|excel|parquet|
|
|
405
|
+
r'["\']([^"\']+\.(?:csv|json|xlsx?|parquet|feather|hdf5|h5|sql|db|sqlite|tsv|txt))["\']',
|
|
406
|
+
r'read_(?:csv|json|excel|parquet|feather|hdf|sql|table)\(["\']([^"\']+)["\']',
|
|
407
|
+
r'to_(?:csv|json|excel|parquet|feather|hdf|sql)\(["\']([^"\']+)["\']',
|
|
198
408
|
]
|
|
199
409
|
|
|
200
410
|
found_references = set()
|
|
@@ -451,18 +661,72 @@ class ScannedDirectoriesHandler(APIHandler):
|
|
|
451
661
|
|
|
452
662
|
|
|
453
663
|
class WorkDirHandler(APIHandler):
|
|
454
|
-
"""Handler for returning current working directory"""
|
|
664
|
+
"""Handler for returning current working directory and setup manager type"""
|
|
455
665
|
|
|
456
666
|
@tornado.web.authenticated
|
|
457
667
|
def get(self):
|
|
458
668
|
try:
|
|
459
|
-
|
|
669
|
+
# Detect the setup manager type
|
|
670
|
+
setup_manager = self._detect_setup_manager()
|
|
671
|
+
|
|
672
|
+
self.finish(json.dumps({
|
|
673
|
+
"workdir": os.getcwd(),
|
|
674
|
+
"setupManager": setup_manager
|
|
675
|
+
}))
|
|
460
676
|
except Exception as e:
|
|
461
677
|
self.set_status(500)
|
|
462
678
|
self.finish(json.dumps({
|
|
463
679
|
"error": str(e)
|
|
464
680
|
}))
|
|
465
681
|
|
|
682
|
+
def _detect_setup_manager(self) -> str:
|
|
683
|
+
"""
|
|
684
|
+
Detect the Python environment manager being used.
|
|
685
|
+
Returns: 'conda', 'venv', 'uv', or 'system'
|
|
686
|
+
"""
|
|
687
|
+
env_type, _ = self._detect_kernel_env()
|
|
688
|
+
return env_type
|
|
689
|
+
|
|
690
|
+
def _detect_kernel_env(self):
|
|
691
|
+
"""
|
|
692
|
+
Detect the actual environment running this kernel,
|
|
693
|
+
not what shell env vars claim.
|
|
694
|
+
"""
|
|
695
|
+
import sys
|
|
696
|
+
|
|
697
|
+
prefix = Path(sys.prefix)
|
|
698
|
+
executable = Path(sys.executable)
|
|
699
|
+
|
|
700
|
+
# Check pyvenv.cfg first — exists for venv/uv, not conda
|
|
701
|
+
pyvenv_cfg = prefix / 'pyvenv.cfg'
|
|
702
|
+
if pyvenv_cfg.exists():
|
|
703
|
+
try:
|
|
704
|
+
content = pyvenv_cfg.read_text().lower()
|
|
705
|
+
if 'uv' in content:
|
|
706
|
+
return 'uv', str(prefix)
|
|
707
|
+
return 'venv', str(prefix)
|
|
708
|
+
except (IOError, OSError):
|
|
709
|
+
return 'venv', str(prefix)
|
|
710
|
+
|
|
711
|
+
# No pyvenv.cfg — check if we're in a conda env
|
|
712
|
+
# Conda envs live under <conda_root>/envs/<name>/ or are the base env
|
|
713
|
+
# Key marker: conda-meta directory exists
|
|
714
|
+
conda_meta = prefix / 'conda-meta'
|
|
715
|
+
if conda_meta.exists():
|
|
716
|
+
# Extract env name from path
|
|
717
|
+
if 'envs' in prefix.parts:
|
|
718
|
+
idx = prefix.parts.index('envs')
|
|
719
|
+
env_name = prefix.parts[idx + 1] if len(prefix.parts) > idx + 1 else 'base'
|
|
720
|
+
else:
|
|
721
|
+
env_name = 'base'
|
|
722
|
+
return 'conda', env_name
|
|
723
|
+
|
|
724
|
+
# Fallback: check if prefix differs from base (generic venv)
|
|
725
|
+
if sys.prefix != sys.base_prefix:
|
|
726
|
+
return 'venv', str(prefix)
|
|
727
|
+
|
|
728
|
+
return 'system', str(prefix)
|
|
729
|
+
|
|
466
730
|
|
|
467
731
|
class TerminalExecuteHandler(APIHandler):
|
|
468
732
|
"""Handler for executing terminal commands"""
|
|
@@ -490,9 +754,15 @@ class TerminalExecuteHandler(APIHandler):
|
|
|
490
754
|
timeout=300
|
|
491
755
|
)
|
|
492
756
|
|
|
493
|
-
def truncate_output(output: str, max_lines: int = 50) -> str:
|
|
757
|
+
def truncate_output(output: str, max_lines: int = 50, max_chars: int = 20000) -> str:
|
|
494
758
|
if not output:
|
|
495
759
|
return output
|
|
760
|
+
# First, truncate by character limit
|
|
761
|
+
if len(output) > max_chars:
|
|
762
|
+
half = max_chars // 2
|
|
763
|
+
truncated_chars = len(output) - max_chars
|
|
764
|
+
output = output[:half] + f'\n... {truncated_chars} characters truncated ...\n' + output[-half:]
|
|
765
|
+
# Then, truncate by line count
|
|
496
766
|
lines = output.splitlines()
|
|
497
767
|
if len(lines) <= max_lines * 2:
|
|
498
768
|
return output
|
|
@@ -780,6 +1050,25 @@ def setup_handlers(web_app):
|
|
|
780
1050
|
mcp_server_disable_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "servers", "([^/]+)", "disable")
|
|
781
1051
|
mcp_config_file_route = url_path_join(base_url, "signalpilot-ai-internal", "mcp", "config-file")
|
|
782
1052
|
|
|
1053
|
+
# Composio integration endpoints
|
|
1054
|
+
integrations_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations")
|
|
1055
|
+
integration_connect_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "connect")
|
|
1056
|
+
integration_complete_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "complete")
|
|
1057
|
+
integration_status_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "status")
|
|
1058
|
+
integration_refresh_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)", "refresh")
|
|
1059
|
+
integration_disconnect_route = url_path_join(base_url, "signalpilot-ai-internal", "integrations", "([^/]+)")
|
|
1060
|
+
|
|
1061
|
+
# Database config endpoints (db.toml)
|
|
1062
|
+
db_configs_route = url_path_join(base_url, "signalpilot-ai-internal", "db-configs")
|
|
1063
|
+
db_configs_type_route = url_path_join(base_url, "signalpilot-ai-internal", "db-configs", "([^/]+)")
|
|
1064
|
+
db_defaults_route = url_path_join(base_url, "signalpilot-ai-internal", "db-defaults")
|
|
1065
|
+
signalpilot_home_info_route = url_path_join(base_url, "signalpilot-ai-internal", "home-info")
|
|
1066
|
+
|
|
1067
|
+
# User rules endpoints (markdown files in user-rules/)
|
|
1068
|
+
user_rules_route = url_path_join(base_url, "signalpilot-ai-internal", "rules")
|
|
1069
|
+
user_rule_route = url_path_join(base_url, "signalpilot-ai-internal", "rules", "([^/]+)")
|
|
1070
|
+
user_rules_info_route = url_path_join(base_url, "signalpilot-ai-internal", "rules-info")
|
|
1071
|
+
|
|
783
1072
|
handlers = [
|
|
784
1073
|
# Original endpoint
|
|
785
1074
|
(hello_route, HelloWorldHandler),
|
|
@@ -845,6 +1134,26 @@ def setup_handlers(web_app):
|
|
|
845
1134
|
(mcp_tool_enable_route, MCPToolEnableHandler),
|
|
846
1135
|
(mcp_server_enable_route, MCPServerEnableHandler),
|
|
847
1136
|
(mcp_server_disable_route, MCPServerDisableHandler),
|
|
1137
|
+
|
|
1138
|
+
# Composio integration endpoints
|
|
1139
|
+
# Note: More specific routes should come before parameterized routes
|
|
1140
|
+
(integrations_route, IntegrationsHandler),
|
|
1141
|
+
(integration_connect_route, IntegrationConnectHandler),
|
|
1142
|
+
(integration_complete_route, IntegrationCompleteHandler),
|
|
1143
|
+
(integration_status_route, IntegrationStatusHandler),
|
|
1144
|
+
(integration_refresh_route, IntegrationRefreshHandler),
|
|
1145
|
+
(integration_disconnect_route, IntegrationDisconnectHandler),
|
|
1146
|
+
|
|
1147
|
+
# Database config endpoints (db.toml in cache_dir/connect/)
|
|
1148
|
+
(db_configs_route, DatabaseConfigsHandler),
|
|
1149
|
+
(db_configs_type_route, DatabaseConfigsHandler),
|
|
1150
|
+
(db_defaults_route, DatabaseDefaultsHandler),
|
|
1151
|
+
(signalpilot_home_info_route, SignalPilotHomeInfoHandler),
|
|
1152
|
+
|
|
1153
|
+
# User rules endpoints (markdown files in user-rules/)
|
|
1154
|
+
(user_rules_route, UserRulesHandler),
|
|
1155
|
+
(user_rule_route, UserRulesHandler),
|
|
1156
|
+
(user_rules_info_route, UserRulesInfoHandler),
|
|
848
1157
|
]
|
|
849
1158
|
|
|
850
1159
|
web_app.add_handlers(host_pattern, handlers)
|
|
@@ -868,7 +1177,7 @@ def setup_handlers(web_app):
|
|
|
868
1177
|
# Register cleanup with web app
|
|
869
1178
|
import atexit
|
|
870
1179
|
atexit.register(cleanup_mcp_servers)
|
|
871
|
-
|
|
1180
|
+
|
|
872
1181
|
print("SignalPilot AI backend handlers registered:")
|
|
873
1182
|
print(f" - Hello World: {hello_route}")
|
|
874
1183
|
print(f" - Read All Files: {read_all_files_route}")
|
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Integration Configuration for Composio OAuth Apps
|
|
3
|
+
This file contains static configuration for integrations with MCP servers.
|
|
4
|
+
IMPORTANT: This file is NOT exposed to the frontend for security reasons.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import base64
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from typing import Dict, List, Any
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
# Integration configuration - defines how to set up MCP servers after OAuth
|
|
15
|
+
INTEGRATION_CONFIG: Dict[str, Dict[str, Any]] = {
|
|
16
|
+
'notion': {
|
|
17
|
+
'id': 'notion',
|
|
18
|
+
'name': 'Notion',
|
|
19
|
+
'description': 'Connect to Notion workspaces for reading and searching pages',
|
|
20
|
+
'mcp_server_id': 'notion-integration',
|
|
21
|
+
'mcp_server_name': 'Notion (Composio)',
|
|
22
|
+
'mcp_command': 'npx',
|
|
23
|
+
'mcp_args': ['-y', '@notionhq/notion-mcp-server'],
|
|
24
|
+
# Maps Composio credential fields to MCP server env vars
|
|
25
|
+
'env_mapping': {
|
|
26
|
+
'access_token': 'NOTION_TOKEN',
|
|
27
|
+
},
|
|
28
|
+
'whitelisted_tools': [
|
|
29
|
+
'API-post-search',
|
|
30
|
+
'API-get-block-children',
|
|
31
|
+
'API-retrieve-a-page',
|
|
32
|
+
'API-retrieve-a-database',
|
|
33
|
+
'API-post-database-query',
|
|
34
|
+
],
|
|
35
|
+
},
|
|
36
|
+
'slack': {
|
|
37
|
+
'id': 'slack',
|
|
38
|
+
'name': 'Slack',
|
|
39
|
+
'description': 'Connect to Slack workspaces for searching and reading messages',
|
|
40
|
+
'mcp_server_id': 'slack-integration',
|
|
41
|
+
'mcp_server_name': 'Slack (Composio)',
|
|
42
|
+
# Note: This requires the slack-mcp-server to be installed
|
|
43
|
+
# Using npx with a placeholder - user may need to adjust based on their setup
|
|
44
|
+
'mcp_command': 'npx',
|
|
45
|
+
'mcp_args': ['-y', 'slack-mcp-server@latest', '--transport', 'stdio'],
|
|
46
|
+
# Maps Composio credential fields to MCP server env vars
|
|
47
|
+
'env_mapping': {
|
|
48
|
+
'access_token': 'SLACK_MCP_XOXP_TOKEN',
|
|
49
|
+
},
|
|
50
|
+
'whitelisted_tools': [
|
|
51
|
+
'conversations_search_messages',
|
|
52
|
+
'conversations_history',
|
|
53
|
+
'conversations_replies',
|
|
54
|
+
'channels_list',
|
|
55
|
+
],
|
|
56
|
+
},
|
|
57
|
+
'google': {
|
|
58
|
+
'id': 'google',
|
|
59
|
+
'name': 'Google Docs',
|
|
60
|
+
'description': 'Connect to Google Drive and Docs for searching and reading documents',
|
|
61
|
+
'mcp_server_id': 'google-integration',
|
|
62
|
+
'mcp_server_name': 'Google Docs (Composio)',
|
|
63
|
+
# Note: This requires uvx to be installed
|
|
64
|
+
# --single-user makes USER_GOOGLE_EMAIL the default, skipping email in tool calls
|
|
65
|
+
'mcp_command': 'uvx',
|
|
66
|
+
'mcp_args': ['workspace-mcp', '--tools', 'drive', 'docs', '--single-user'],
|
|
67
|
+
# Maps Composio credential fields to MCP server env vars
|
|
68
|
+
'env_mapping': {
|
|
69
|
+
'client_id': 'GOOGLE_OAUTH_CLIENT_ID',
|
|
70
|
+
'client_secret': 'GOOGLE_OAUTH_CLIENT_SECRET',
|
|
71
|
+
'access_token': 'GOOGLE_ACCESS_TOKEN',
|
|
72
|
+
'refresh_token': 'GOOGLE_REFRESH_TOKEN',
|
|
73
|
+
'email': 'USER_GOOGLE_EMAIL', # User's Google email from OAuth
|
|
74
|
+
},
|
|
75
|
+
'whitelisted_tools': [
|
|
76
|
+
'start_google_auth',
|
|
77
|
+
'search_docs',
|
|
78
|
+
'get_doc_content',
|
|
79
|
+
'list_docs_in_folder',
|
|
80
|
+
'inspect_doc_structure',
|
|
81
|
+
'read_document_comments',
|
|
82
|
+
'create_document_comment',
|
|
83
|
+
'reply_to_document_comment',
|
|
84
|
+
'resolve_document_comment',
|
|
85
|
+
'search_drive_files',
|
|
86
|
+
'list_drive_items',
|
|
87
|
+
'get_drive_file_content',
|
|
88
|
+
'get_drive_file_download_url',
|
|
89
|
+
'list_drive_items_in_folder',
|
|
90
|
+
],
|
|
91
|
+
},
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def get_integration_config(integration_id: str) -> Dict[str, Any] | None:
|
|
96
|
+
"""Get configuration for a specific integration."""
|
|
97
|
+
return INTEGRATION_CONFIG.get(integration_id)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def get_all_integration_ids() -> List[str]:
|
|
101
|
+
"""Get list of all integration IDs."""
|
|
102
|
+
return list(INTEGRATION_CONFIG.keys())
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def get_integration_info_for_frontend() -> List[Dict[str, str]]:
|
|
106
|
+
"""
|
|
107
|
+
Get integration info safe to expose to frontend.
|
|
108
|
+
Only includes id, name, and description - no MCP commands or env mappings.
|
|
109
|
+
"""
|
|
110
|
+
return [
|
|
111
|
+
{
|
|
112
|
+
'id': config['id'],
|
|
113
|
+
'name': config['name'],
|
|
114
|
+
'description': config['description'],
|
|
115
|
+
}
|
|
116
|
+
for config in INTEGRATION_CONFIG.values()
|
|
117
|
+
]
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def get_mcp_server_config(integration_id: str, credentials: Dict[str, Any]) -> Dict[str, Any] | None:
|
|
121
|
+
"""
|
|
122
|
+
Generate MCP server configuration for an integration.
|
|
123
|
+
Maps credentials from Composio to the appropriate env vars for the MCP server.
|
|
124
|
+
|
|
125
|
+
DEPRECATED: Use get_mcp_server_config_for_storage() and store tokens separately.
|
|
126
|
+
"""
|
|
127
|
+
config = INTEGRATION_CONFIG.get(integration_id)
|
|
128
|
+
if not config:
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
# Build environment variables from credentials
|
|
132
|
+
env = {}
|
|
133
|
+
for cred_field, env_var in config['env_mapping'].items():
|
|
134
|
+
if cred_field in credentials:
|
|
135
|
+
env[env_var] = credentials[cred_field]
|
|
136
|
+
|
|
137
|
+
return {
|
|
138
|
+
'id': config['mcp_server_id'],
|
|
139
|
+
'name': config['mcp_server_name'],
|
|
140
|
+
'command': config['mcp_command'],
|
|
141
|
+
'args': config['mcp_args'],
|
|
142
|
+
'env': env,
|
|
143
|
+
'enabled': True,
|
|
144
|
+
'enabledTools': config['whitelisted_tools'],
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def get_mcp_server_config_for_storage(integration_id: str) -> Dict[str, Any] | None:
|
|
149
|
+
"""
|
|
150
|
+
Generate MCP server configuration for storage (WITHOUT credentials).
|
|
151
|
+
Tokens should be stored separately using OAuthTokenStore and injected at runtime.
|
|
152
|
+
|
|
153
|
+
Returns config suitable for mcp.json without sensitive data.
|
|
154
|
+
"""
|
|
155
|
+
config = INTEGRATION_CONFIG.get(integration_id)
|
|
156
|
+
if not config:
|
|
157
|
+
return None
|
|
158
|
+
|
|
159
|
+
return {
|
|
160
|
+
'id': config['mcp_server_id'],
|
|
161
|
+
'name': config['mcp_server_name'],
|
|
162
|
+
'command': config['mcp_command'],
|
|
163
|
+
'args': config['mcp_args'],
|
|
164
|
+
# No env vars with tokens - they're stored securely elsewhere
|
|
165
|
+
'enabled': True,
|
|
166
|
+
'enabledTools': config['whitelisted_tools'],
|
|
167
|
+
# Mark as OAuth integration so MCP service knows to look up tokens
|
|
168
|
+
'isOAuthIntegration': True,
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _extract_email_from_id_token(id_token: str) -> str | None:
|
|
173
|
+
"""
|
|
174
|
+
Extract email from a Google OAuth id_token (JWT).
|
|
175
|
+
|
|
176
|
+
The id_token is a JWT with three parts: header.payload.signature
|
|
177
|
+
We decode the payload to get the email claim.
|
|
178
|
+
"""
|
|
179
|
+
try:
|
|
180
|
+
# JWT has 3 parts separated by dots
|
|
181
|
+
parts = id_token.split('.')
|
|
182
|
+
if len(parts) != 3:
|
|
183
|
+
return None
|
|
184
|
+
|
|
185
|
+
# Decode the payload (second part)
|
|
186
|
+
# Add padding if needed for base64 decoding
|
|
187
|
+
payload_b64 = parts[1]
|
|
188
|
+
padding = 4 - len(payload_b64) % 4
|
|
189
|
+
if padding != 4:
|
|
190
|
+
payload_b64 += '=' * padding
|
|
191
|
+
|
|
192
|
+
payload_json = base64.urlsafe_b64decode(payload_b64)
|
|
193
|
+
payload = json.loads(payload_json)
|
|
194
|
+
|
|
195
|
+
# Extract email from claims
|
|
196
|
+
email = payload.get('email')
|
|
197
|
+
if email:
|
|
198
|
+
logger.debug(f"[Integrations] Extracted email from id_token: {email}")
|
|
199
|
+
return email
|
|
200
|
+
|
|
201
|
+
return None
|
|
202
|
+
except Exception as e:
|
|
203
|
+
logger.warning(f"[Integrations] Failed to extract email from id_token: {e}")
|
|
204
|
+
return None
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def build_env_from_credentials(integration_id: str, credentials: Dict[str, Any]) -> Dict[str, str]:
|
|
208
|
+
"""
|
|
209
|
+
Build environment variables from Composio credentials.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
integration_id: The integration ID
|
|
213
|
+
credentials: Credentials from Composio
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
Dict of environment variable name -> value
|
|
217
|
+
"""
|
|
218
|
+
config = INTEGRATION_CONFIG.get(integration_id)
|
|
219
|
+
if not config:
|
|
220
|
+
return {}
|
|
221
|
+
|
|
222
|
+
env = {}
|
|
223
|
+
for cred_field, env_var in config['env_mapping'].items():
|
|
224
|
+
if cred_field in credentials:
|
|
225
|
+
env[env_var] = credentials[cred_field]
|
|
226
|
+
|
|
227
|
+
# For Google integration, try to extract email from various sources if not directly available
|
|
228
|
+
if integration_id == 'google' and 'USER_GOOGLE_EMAIL' not in env:
|
|
229
|
+
email = None
|
|
230
|
+
|
|
231
|
+
# Try common field names for email
|
|
232
|
+
for field_name in ['email', 'userEmail', 'user_email', 'Email']:
|
|
233
|
+
if field_name in credentials:
|
|
234
|
+
email = credentials[field_name]
|
|
235
|
+
break
|
|
236
|
+
|
|
237
|
+
# Try to extract from id_token if not found directly
|
|
238
|
+
if not email:
|
|
239
|
+
id_token = credentials.get('id_token')
|
|
240
|
+
if id_token:
|
|
241
|
+
email = _extract_email_from_id_token(id_token)
|
|
242
|
+
|
|
243
|
+
if email:
|
|
244
|
+
env['USER_GOOGLE_EMAIL'] = email
|
|
245
|
+
else:
|
|
246
|
+
logger.warning(f"[Integrations] Could not find email in Google credentials. Keys: {list(credentials.keys())}")
|
|
247
|
+
|
|
248
|
+
return env
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def get_mcp_server_id_for_integration(integration_id: str) -> str | None:
|
|
252
|
+
"""Get the MCP server ID for an integration."""
|
|
253
|
+
config = INTEGRATION_CONFIG.get(integration_id)
|
|
254
|
+
if config:
|
|
255
|
+
return config['mcp_server_id']
|
|
256
|
+
return None
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logging utilities for SignalPilot AI.
|
|
3
|
+
Provides controlled print function that respects SIGNALPILOT_DEBUG environment variable.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import builtins
|
|
7
|
+
import os
|
|
8
|
+
|
|
9
|
+
# Debug mode controlled by environment variable
|
|
10
|
+
# Set SIGNALPILOT_DEBUG=1 to enable verbose logging (disabled by default)
|
|
11
|
+
_DEBUG = os.environ.get('SIGNALPILOT_DEBUG', '0').lower() not in ('0', 'false', 'no')
|
|
12
|
+
|
|
13
|
+
# Store the original print function
|
|
14
|
+
_original_print = builtins.print
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def print(*args, **kwargs) -> None:
|
|
18
|
+
"""Conditionally print messages based on debug mode.
|
|
19
|
+
|
|
20
|
+
Only prints when SIGNALPILOT_DEBUG environment variable is not set to 0/false/no.
|
|
21
|
+
Error messages (containing 'ERROR') are always printed regardless of debug mode.
|
|
22
|
+
"""
|
|
23
|
+
message = ' '.join(str(arg) for arg in args)
|
|
24
|
+
# Always print errors, otherwise only print in debug mode
|
|
25
|
+
if 'ERROR' in message or _DEBUG:
|
|
26
|
+
_original_print(*args, **kwargs)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def is_debug_enabled() -> bool:
|
|
30
|
+
"""Check if debug mode is enabled."""
|
|
31
|
+
return _DEBUG
|