signalpilot-ai-internal 0.3.2__py3-none-any.whl → 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of signalpilot-ai-internal might be problematic. Click here for more details.

Files changed (46) hide show
  1. signalpilot_ai_internal/_version.py +1 -1
  2. signalpilot_ai_internal/cache_handlers.py +383 -0
  3. signalpilot_ai_internal/cache_service.py +552 -0
  4. signalpilot_ai_internal/handlers.py +35 -915
  5. signalpilot_ai_internal/snowflake_schema_service.py +639 -0
  6. signalpilot_ai_internal/unified_database_schema_service.py +742 -0
  7. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +3 -2
  8. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +2 -1
  9. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +1 -0
  10. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.0fea0d444fc7ba458d5a.js +1 -0
  11. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.c61f5bc4d0da4a0781d6.js +1 -0
  12. signalpilot_ai_internal-0.3.3.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.2e2c6ae0baa591126b0a.js +1 -0
  13. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +6 -0
  14. {signalpilot_ai_internal-0.3.2.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/METADATA +1 -1
  15. signalpilot_ai_internal-0.3.3.dist-info/RECORD +45 -0
  16. signalpilot_ai_internal-0.3.2.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.8d3d5d0480ba7396f2f5.js +0 -1
  17. signalpilot_ai_internal-0.3.2.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.5a362da0c4b891e005b3.js +0 -1
  18. signalpilot_ai_internal-0.3.2.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.57019ad0ad044a0f8ad8.js +0 -1
  19. signalpilot_ai_internal-0.3.2.dist-info/RECORD +0 -40
  20. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
  21. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
  22. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
  23. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js +0 -0
  24. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js.LICENSE.txt +0 -0
  25. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
  26. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
  27. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
  28. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js +0 -0
  29. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js.LICENSE.txt +0 -0
  30. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -0
  31. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
  32. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
  33. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -0
  34. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
  35. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
  36. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
  37. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
  38. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/606.90aaaae46b73dc3c08fb.js +0 -0
  39. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
  40. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
  41. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
  42. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -0
  43. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
  44. {signalpilot_ai_internal-0.3.2.data → signalpilot_ai_internal-0.3.3.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
  45. {signalpilot_ai_internal-0.3.2.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/WHEEL +0 -0
  46. {signalpilot_ai_internal-0.3.2.dist-info → signalpilot_ai_internal-0.3.3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,742 @@
1
+ """
2
+ Unified database schema service handlers for SignalPilot AI.
3
+ Provides REST API handlers for PostgreSQL and MySQL database schema retrieval and query execution.
4
+ """
5
+
6
+ import json
7
+ import os
8
+ import subprocess
9
+ import sys
10
+ from typing import Any, Dict, Optional
11
+
12
+ from jupyter_server.base.handlers import APIHandler
13
+ import tornado
14
+
15
+
16
+ class UnifiedDatabaseSchemaHandler(APIHandler):
17
+ """Handler for unified database schema operations (PostgreSQL and MySQL)"""
18
+
19
+ def _setup_database_environment(self, db_type: str = 'postgresql'):
20
+ """Install required database packages if not available"""
21
+ def install_package(package_name):
22
+ try:
23
+ subprocess.check_call([sys.executable, "-m", "pip", "install", package_name])
24
+ return True
25
+ except subprocess.CalledProcessError:
26
+ return False
27
+
28
+ missing_packages = []
29
+
30
+ try:
31
+ import sqlalchemy
32
+ from sqlalchemy import create_engine, text
33
+ except ImportError:
34
+ if install_package("sqlalchemy"):
35
+ try:
36
+ import sqlalchemy
37
+ from sqlalchemy import create_engine, text
38
+ except ImportError as e:
39
+ missing_packages.append(f"sqlalchemy: {str(e)}")
40
+ else:
41
+ missing_packages.append("sqlalchemy: installation failed")
42
+
43
+ # Install database-specific drivers
44
+ if db_type == 'mysql':
45
+ try:
46
+ import pymysql
47
+ except ImportError:
48
+ if install_package("pymysql"):
49
+ try:
50
+ import pymysql
51
+ except ImportError as e:
52
+ missing_packages.append(f"pymysql: {str(e)}")
53
+ else:
54
+ missing_packages.append("pymysql: installation failed")
55
+ else: # postgresql
56
+ try:
57
+ import psycopg2
58
+ except ImportError:
59
+ if install_package("psycopg2-binary"):
60
+ try:
61
+ import psycopg2
62
+ except ImportError as e:
63
+ missing_packages.append(f"psycopg2: {str(e)}")
64
+ else:
65
+ missing_packages.append("psycopg2: installation failed")
66
+
67
+ if missing_packages:
68
+ raise ImportError("Required modules could not be installed: " + ", ".join(missing_packages))
69
+
70
+ from sqlalchemy import create_engine, text
71
+ return create_engine, text
72
+
73
+ def _detect_database_type(self, db_url: str) -> str:
74
+ """Detect database type from connection URL"""
75
+ if db_url.startswith('mysql'):
76
+ return 'mysql'
77
+ elif db_url.startswith('postgresql') or db_url.startswith('postgres'):
78
+ return 'postgresql'
79
+ else:
80
+ # Default to postgresql for backward compatibility
81
+ return 'postgresql'
82
+
83
+ def _get_database_url(self, provided_url: Optional[str] = None, db_type: Optional[str] = None) -> tuple[Optional[str], str]:
84
+ """Get database URL from request or environment variables
85
+ Returns: (db_url, db_type)
86
+ """
87
+ if provided_url:
88
+ detected_type = self._detect_database_type(provided_url)
89
+ return provided_url, detected_type
90
+
91
+ # First try the new multi-database environment format
92
+ # Look for any database configuration in the environment
93
+ db_configs = {}
94
+
95
+ # Scan environment for database configurations
96
+ for key, value in os.environ.items():
97
+ if key.endswith('_CONNECTION_JSON'):
98
+ try:
99
+ config = json.loads(value)
100
+ db_name = key.replace('_CONNECTION_JSON', '')
101
+ db_configs[db_name] = config
102
+
103
+ # Use the first available database configuration matching the requested type
104
+ # Or use the first one if no specific type requested
105
+ config_type = config.get('type', 'postgresql')
106
+ if db_type is None or config_type == db_type:
107
+ if 'connectionUrl' in config:
108
+ db_url = config['connectionUrl']
109
+ else:
110
+ # Build connection URL from components
111
+ if config_type == 'postgresql':
112
+ db_url = f"postgresql://{config['username']}:{config['password']}@{config['host']}:{config['port']}/{config['database']}"
113
+ elif config_type == 'mysql':
114
+ db_url = f"mysql+pymysql://{config['username']}:{config['password']}@{config['host']}:{config['port']}/{config['database']}"
115
+
116
+ if db_url:
117
+ return db_url, config_type
118
+ except Exception as e:
119
+ print(f"[UnifiedDatabaseSchemaHandler] Error parsing database config {key}: {e}")
120
+ continue
121
+
122
+ # Fallback to legacy DB_URL environment variable if no multi-db config found
123
+ db_url = os.environ.get('DB_URL')
124
+ if db_url:
125
+ detected_type = self._detect_database_type(db_url)
126
+ return db_url, detected_type
127
+
128
+ return None, db_type or 'postgresql'
129
+
130
+ def _get_mysql_schema(self, conn, text):
131
+ """Get MySQL database schema"""
132
+ # Get the current database name
133
+ db_name_result = conn.execute(text("SELECT DATABASE()"))
134
+ current_db = db_name_result.scalar()
135
+
136
+ # Get all tables from the current database
137
+ tables_query = """
138
+ SELECT TABLE_SCHEMA, TABLE_NAME
139
+ FROM information_schema.TABLES
140
+ WHERE TABLE_TYPE = 'BASE TABLE'
141
+ AND TABLE_SCHEMA = :schema
142
+ ORDER BY TABLE_NAME
143
+ LIMIT 50;
144
+ """
145
+
146
+ tables_result = conn.execute(text(tables_query), {"schema": current_db})
147
+ tables = [dict(row._mapping) for row in tables_result]
148
+
149
+ if not tables:
150
+ return {
151
+ "result": "Database connected successfully, but no tables found.",
152
+ "table_schemas": {}
153
+ }
154
+
155
+ # Start building markdown formatted output
156
+ markdown_output = f"# MySQL Database Schema: {current_db}\n\nFound **{len(tables)}** table(s)\n\n"
157
+
158
+ # Store individual table schemas for mention context
159
+ table_schemas = {}
160
+
161
+ # Get detailed information for each table
162
+ for table in tables:
163
+ table_schema = table['TABLE_SCHEMA']
164
+ table_name = table['TABLE_NAME']
165
+ full_table_name = f"{table_schema}.{table_name}"
166
+
167
+ # Get columns
168
+ columns_query = """
169
+ SELECT
170
+ COLUMN_NAME as column_name,
171
+ DATA_TYPE as data_type,
172
+ IS_NULLABLE as is_nullable,
173
+ COLUMN_DEFAULT as column_default,
174
+ CHARACTER_MAXIMUM_LENGTH as character_maximum_length,
175
+ NUMERIC_PRECISION as numeric_precision,
176
+ NUMERIC_SCALE as numeric_scale,
177
+ COLUMN_KEY as column_key,
178
+ EXTRA as extra
179
+ FROM information_schema.COLUMNS
180
+ WHERE TABLE_SCHEMA = :schema AND TABLE_NAME = :table
181
+ ORDER BY ORDINAL_POSITION
182
+ LIMIT 30;
183
+ """
184
+
185
+ columns_result = conn.execute(text(columns_query),
186
+ {"schema": table_schema, "table": table_name})
187
+ columns = [dict(row._mapping) for row in columns_result]
188
+
189
+ # Get primary keys
190
+ pk_query = """
191
+ SELECT COLUMN_NAME
192
+ FROM information_schema.KEY_COLUMN_USAGE
193
+ WHERE TABLE_SCHEMA = :schema
194
+ AND TABLE_NAME = :table
195
+ AND CONSTRAINT_NAME = 'PRIMARY'
196
+ ORDER BY ORDINAL_POSITION;
197
+ """
198
+
199
+ pk_result = conn.execute(text(pk_query),
200
+ {"schema": table_schema, "table": table_name})
201
+ primary_keys = [row[0] for row in pk_result]
202
+
203
+ # Get foreign keys
204
+ fk_query = """
205
+ SELECT
206
+ COLUMN_NAME as column_name,
207
+ REFERENCED_TABLE_SCHEMA as foreign_table_schema,
208
+ REFERENCED_TABLE_NAME as foreign_table_name,
209
+ REFERENCED_COLUMN_NAME as foreign_column_name
210
+ FROM information_schema.KEY_COLUMN_USAGE
211
+ WHERE TABLE_SCHEMA = :schema
212
+ AND TABLE_NAME = :table
213
+ AND REFERENCED_TABLE_NAME IS NOT NULL
214
+ ORDER BY ORDINAL_POSITION;
215
+ """
216
+
217
+ fk_result = conn.execute(text(fk_query),
218
+ {"schema": table_schema, "table": table_name})
219
+ foreign_keys = [dict(row._mapping) for row in fk_result]
220
+
221
+ # Get indices (excluding primary key)
222
+ index_query = """
223
+ SELECT
224
+ INDEX_NAME as index_name,
225
+ GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) as columns,
226
+ INDEX_TYPE as index_type,
227
+ NON_UNIQUE as non_unique
228
+ FROM information_schema.STATISTICS
229
+ WHERE TABLE_SCHEMA = :schema
230
+ AND TABLE_NAME = :table
231
+ AND INDEX_NAME != 'PRIMARY'
232
+ GROUP BY INDEX_NAME, INDEX_TYPE, NON_UNIQUE
233
+ ORDER BY INDEX_NAME;
234
+ """
235
+
236
+ index_result = conn.execute(text(index_query),
237
+ {"schema": table_schema, "table": table_name})
238
+ indices = [dict(row._mapping) for row in index_result]
239
+
240
+ # Store individual table schema for mention context
241
+ table_info = {
242
+ 'schema': table_schema,
243
+ 'table_name': table_name,
244
+ 'full_name': full_table_name,
245
+ 'columns': [dict(col) for col in columns],
246
+ 'primary_keys': primary_keys,
247
+ 'foreign_keys': [dict(fk) for fk in foreign_keys],
248
+ 'indices': [dict(idx) for idx in indices]
249
+ }
250
+ table_schemas[full_table_name] = table_info
251
+
252
+ # Build table section
253
+ markdown_output += f"## {full_table_name}\n\n"
254
+
255
+ # Columns section
256
+ markdown_output += f"### Columns ({len(columns)})\n\n"
257
+ for col in columns:
258
+ col_name = col['column_name']
259
+ data_type = col['data_type']
260
+
261
+ # Format data type with precision/scale
262
+ if col['character_maximum_length']:
263
+ data_type += f"({col['character_maximum_length']})"
264
+ elif col['numeric_precision'] and col['numeric_scale'] is not None:
265
+ data_type += f"({col['numeric_precision']},{col['numeric_scale']})"
266
+ elif col['numeric_precision']:
267
+ data_type += f"({col['numeric_precision']})"
268
+
269
+ # Add constraints
270
+ constraints = []
271
+ if col['is_nullable'] == 'NO':
272
+ constraints.append("NOT NULL")
273
+ if col['column_default']:
274
+ constraints.append(f"DEFAULT {col['column_default']}")
275
+ if col['column_key'] == 'PRI':
276
+ constraints.append("PRIMARY KEY")
277
+ if col['extra'] and 'auto_increment' in col['extra'].lower():
278
+ constraints.append("AUTO_INCREMENT")
279
+
280
+ constraint_text = f" ({', '.join(constraints)})" if constraints else ""
281
+
282
+ markdown_output += f"- **{col_name}**: {data_type}{constraint_text}\n"
283
+
284
+ # Primary keys section
285
+ if primary_keys:
286
+ markdown_output += f"\n### Primary Keys\n\n"
287
+ markdown_output += f"- {', '.join([f'**{pk}**' for pk in primary_keys])}\n"
288
+
289
+ # Foreign keys section
290
+ if foreign_keys:
291
+ markdown_output += f"\n### Foreign Keys\n\n"
292
+ for fk in foreign_keys:
293
+ markdown_output += f"- **{fk['column_name']}** → {fk['foreign_table_schema']}.{fk['foreign_table_name']}({fk['foreign_column_name']})\n"
294
+
295
+ # Indices section
296
+ if indices:
297
+ markdown_output += f"\n### Indices\n\n"
298
+ for idx in indices:
299
+ unique_text = "" if idx['non_unique'] else "UNIQUE "
300
+ markdown_output += f"- **{idx['index_name']}**: {unique_text}{idx['index_type']} on ({idx['columns']})\n"
301
+
302
+ markdown_output += "\n---\n\n"
303
+
304
+ return {
305
+ "result": markdown_output.strip(),
306
+ "table_schemas": table_schemas
307
+ }
308
+
309
+ def _get_postgresql_schema(self, conn, text):
310
+ """Get PostgreSQL database schema"""
311
+ # Get all tables from public and custom schemas (excluding system schemas)
312
+ tables_query = """
313
+ SELECT table_schema, table_name
314
+ FROM information_schema.tables
315
+ WHERE table_type = 'BASE TABLE'
316
+ AND table_schema NOT IN ('information_schema', 'pg_catalog', 'pg_toast')
317
+ ORDER BY table_schema, table_name
318
+ LIMIT 50;
319
+ """
320
+
321
+ tables_result = conn.execute(text(tables_query))
322
+ tables = [dict(row._mapping) for row in tables_result]
323
+
324
+ if not tables:
325
+ return {
326
+ "result": "Database connected successfully, but no tables found.",
327
+ "table_schemas": {}
328
+ }
329
+
330
+ # Start building markdown formatted output
331
+ markdown_output = f"# PostgreSQL Database Schema\n\nFound **{len(tables)}** table(s)\n\n"
332
+
333
+ # Store individual table schemas for mention context
334
+ table_schemas = {}
335
+
336
+ # Get detailed information for each table
337
+ for table in tables:
338
+ table_schema = table['table_schema']
339
+ table_name = table['table_name']
340
+ full_table_name = f"{table_schema}.{table_name}"
341
+
342
+ # Get columns
343
+ columns_query = """
344
+ SELECT
345
+ column_name,
346
+ data_type,
347
+ is_nullable,
348
+ column_default,
349
+ character_maximum_length,
350
+ numeric_precision,
351
+ numeric_scale
352
+ FROM information_schema.columns
353
+ WHERE table_schema = :schema AND table_name = :table
354
+ ORDER BY ordinal_position
355
+ LIMIT 30;
356
+ """
357
+
358
+ columns_result = conn.execute(text(columns_query),
359
+ {"schema": table_schema, "table": table_name})
360
+ columns = [dict(row._mapping) for row in columns_result]
361
+
362
+ # Get primary keys
363
+ pk_query = """
364
+ SELECT kcu.column_name
365
+ FROM information_schema.table_constraints tc
366
+ JOIN information_schema.key_column_usage kcu
367
+ ON tc.constraint_name = kcu.constraint_name
368
+ WHERE tc.constraint_type = 'PRIMARY KEY'
369
+ AND tc.table_schema = :schema
370
+ AND tc.table_name = :table
371
+ ORDER BY kcu.ordinal_position;
372
+ """
373
+
374
+ pk_result = conn.execute(text(pk_query),
375
+ {"schema": table_schema, "table": table_name})
376
+ primary_keys = [row[0] for row in pk_result]
377
+
378
+ # Get foreign keys
379
+ fk_query = """
380
+ SELECT
381
+ kcu.column_name,
382
+ ccu.table_schema AS foreign_table_schema,
383
+ ccu.table_name AS foreign_table_name,
384
+ ccu.column_name AS foreign_column_name
385
+ FROM information_schema.table_constraints tc
386
+ JOIN information_schema.key_column_usage kcu
387
+ ON tc.constraint_name = kcu.constraint_name
388
+ JOIN information_schema.constraint_column_usage ccu
389
+ ON ccu.constraint_name = tc.constraint_name
390
+ WHERE tc.constraint_type = 'FOREIGN KEY'
391
+ AND tc.table_schema = :schema
392
+ AND tc.table_name = :table
393
+ ORDER BY kcu.ordinal_position;
394
+ """
395
+
396
+ fk_result = conn.execute(text(fk_query),
397
+ {"schema": table_schema, "table": table_name})
398
+ foreign_keys = [dict(row._mapping) for row in fk_result]
399
+
400
+ # Get indices
401
+ index_query = """
402
+ SELECT
403
+ indexname,
404
+ indexdef
405
+ FROM pg_indexes
406
+ WHERE schemaname = :schema
407
+ AND tablename = :table
408
+ AND indexname NOT LIKE '%_pkey'
409
+ ORDER BY indexname;
410
+ """
411
+
412
+ index_result = conn.execute(text(index_query),
413
+ {"schema": table_schema, "table": table_name})
414
+ indices = [dict(row._mapping) for row in index_result]
415
+
416
+ # Store individual table schema for mention context
417
+ table_info = {
418
+ 'schema': table_schema,
419
+ 'table_name': table_name,
420
+ 'full_name': full_table_name,
421
+ 'columns': [dict(col) for col in columns],
422
+ 'primary_keys': primary_keys,
423
+ 'foreign_keys': [dict(fk) for fk in foreign_keys],
424
+ 'indices': [dict(idx) for idx in indices]
425
+ }
426
+ table_schemas[full_table_name] = table_info
427
+
428
+ # Build table section
429
+ markdown_output += f"## {full_table_name}\n\n"
430
+
431
+ # Columns section
432
+ markdown_output += f"### Columns ({len(columns)})\n\n"
433
+ for col in columns:
434
+ col_name = col['column_name']
435
+ data_type = col['data_type']
436
+
437
+ # Format data type with precision/scale
438
+ if col['character_maximum_length']:
439
+ data_type += f"({col['character_maximum_length']})"
440
+ elif col['numeric_precision'] and col['numeric_scale'] is not None:
441
+ data_type += f"({col['numeric_precision']},{col['numeric_scale']})"
442
+ elif col['numeric_precision']:
443
+ data_type += f"({col['numeric_precision']})"
444
+
445
+ # Add constraints
446
+ constraints = []
447
+ if col['is_nullable'] == 'NO':
448
+ constraints.append("NOT NULL")
449
+ if col['column_default']:
450
+ constraints.append(f"DEFAULT {col['column_default']}")
451
+ if col_name in primary_keys:
452
+ constraints.append("PRIMARY KEY")
453
+
454
+ constraint_text = f" ({', '.join(constraints)})" if constraints else ""
455
+
456
+ markdown_output += f"- **{col_name}**: {data_type}{constraint_text}\n"
457
+
458
+ # Primary keys section
459
+ if primary_keys:
460
+ markdown_output += f"\n### Primary Keys\n\n"
461
+ markdown_output += f"- {', '.join([f'**{pk}**' for pk in primary_keys])}\n"
462
+
463
+ # Foreign keys section
464
+ if foreign_keys:
465
+ markdown_output += f"\n### Foreign Keys\n\n"
466
+ for fk in foreign_keys:
467
+ markdown_output += f"- **{fk['column_name']}** → {fk['foreign_table_schema']}.{fk['foreign_table_name']}({fk['foreign_column_name']})\n"
468
+
469
+ # Indices section
470
+ if indices:
471
+ markdown_output += f"\n### Indices\n\n"
472
+ for idx in indices:
473
+ markdown_output += f"- **{idx['indexname']}**: {idx['indexdef']}\n"
474
+
475
+ markdown_output += "\n---\n\n"
476
+
477
+ return {
478
+ "result": markdown_output.strip(),
479
+ "table_schemas": table_schemas
480
+ }
481
+
482
+ @tornado.web.authenticated
483
+ def post(self):
484
+ """Get database schema information for PostgreSQL or MySQL"""
485
+ try:
486
+ # Parse request body
487
+ try:
488
+ body = json.loads(self.request.body.decode('utf-8'))
489
+ except json.JSONDecodeError:
490
+ self.set_status(400)
491
+ self.finish(json.dumps({
492
+ "error": "Invalid JSON in request body"
493
+ }))
494
+ return
495
+
496
+ # Get database URL and type from request or environment
497
+ db_url, db_type = self._get_database_url(body.get('dbUrl'))
498
+
499
+ if not db_url:
500
+ self.set_status(400)
501
+ self.finish(json.dumps({
502
+ "error": "No database URL provided and no database configurations found in environment"
503
+ }))
504
+ return
505
+
506
+ # Ensure MySQL URL uses pymysql driver
507
+ if db_type == 'mysql' and db_url.startswith('mysql://'):
508
+ db_url = db_url.replace('mysql://', 'mysql+pymysql://', 1)
509
+
510
+ # Setup database environment
511
+ try:
512
+ create_engine, text = self._setup_database_environment(db_type)
513
+ except ImportError as e:
514
+ self.set_status(500)
515
+ self.finish(json.dumps({
516
+ "error": str(e)
517
+ }))
518
+ return
519
+
520
+ # Get database schema
521
+ try:
522
+ engine = create_engine(db_url)
523
+
524
+ with engine.connect() as conn:
525
+ # Get schema based on database type
526
+ if db_type == 'mysql':
527
+ result = self._get_mysql_schema(conn, text)
528
+ else: # postgresql
529
+ result = self._get_postgresql_schema(conn, text)
530
+
531
+ self.finish(json.dumps(result))
532
+
533
+ except Exception as e:
534
+ self.set_status(500)
535
+ self.finish(json.dumps({
536
+ "error": f"Error connecting to {db_type} database: {str(e)}"
537
+ }))
538
+
539
+ except Exception as e:
540
+ self.set_status(500)
541
+ self.finish(json.dumps({
542
+ "error": "Internal server error",
543
+ "message": str(e)
544
+ }))
545
+
546
+
547
+ class UnifiedDatabaseQueryHandler(APIHandler):
548
+ """Handler for unified database query execution (PostgreSQL and MySQL)"""
549
+
550
+ def _setup_database_environment(self, db_type: str = 'postgresql'):
551
+ """Install required database packages if not available"""
552
+ def install_package(package_name):
553
+ try:
554
+ subprocess.check_call([sys.executable, "-m", "pip", "install", package_name])
555
+ return True
556
+ except subprocess.CalledProcessError:
557
+ return False
558
+
559
+ missing_packages = []
560
+
561
+ try:
562
+ import sqlalchemy
563
+ from sqlalchemy import create_engine, text
564
+ except ImportError:
565
+ if install_package("sqlalchemy"):
566
+ try:
567
+ import sqlalchemy
568
+ from sqlalchemy import create_engine, text
569
+ except ImportError as e:
570
+ missing_packages.append(f"sqlalchemy: {str(e)}")
571
+ else:
572
+ missing_packages.append("sqlalchemy: installation failed")
573
+
574
+ # Install database-specific drivers
575
+ if db_type == 'mysql':
576
+ try:
577
+ import pymysql
578
+ except ImportError:
579
+ if install_package("pymysql"):
580
+ try:
581
+ import pymysql
582
+ except ImportError as e:
583
+ missing_packages.append(f"pymysql: {str(e)}")
584
+ else:
585
+ missing_packages.append("pymysql: installation failed")
586
+ else: # postgresql
587
+ try:
588
+ import psycopg2
589
+ except ImportError:
590
+ if install_package("psycopg2-binary"):
591
+ try:
592
+ import psycopg2
593
+ except ImportError as e:
594
+ missing_packages.append(f"psycopg2: {str(e)}")
595
+ else:
596
+ missing_packages.append("psycopg2: installation failed")
597
+
598
+ if missing_packages:
599
+ raise ImportError("Required modules could not be installed: " + ", ".join(missing_packages))
600
+
601
+ from sqlalchemy import create_engine, text
602
+ return create_engine, text
603
+
604
+ def _detect_database_type(self, db_url: str) -> str:
605
+ """Detect database type from connection URL"""
606
+ if db_url.startswith('mysql'):
607
+ return 'mysql'
608
+ elif db_url.startswith('postgresql') or db_url.startswith('postgres'):
609
+ return 'postgresql'
610
+ else:
611
+ # Default to postgresql for backward compatibility
612
+ return 'postgresql'
613
+
614
+ def _get_database_url(self, provided_url: Optional[str] = None, db_type: Optional[str] = None) -> tuple[Optional[str], str]:
615
+ """Get database URL from request or environment variables
616
+ Returns: (db_url, db_type)
617
+ """
618
+ if provided_url:
619
+ detected_type = self._detect_database_type(provided_url)
620
+ return provided_url, detected_type
621
+
622
+ # First try the new multi-database environment format
623
+ # Look for any database configuration in the environment
624
+ db_configs = {}
625
+
626
+ # Scan environment for database configurations
627
+ for key, value in os.environ.items():
628
+ if key.endswith('_CONNECTION_JSON'):
629
+ try:
630
+ config = json.loads(value)
631
+ db_name = key.replace('_CONNECTION_JSON', '')
632
+ db_configs[db_name] = config
633
+
634
+ # Use the first available database configuration matching the requested type
635
+ # Or use the first one if no specific type requested
636
+ config_type = config.get('type', 'postgresql')
637
+ if db_type is None or config_type == db_type:
638
+ if 'connectionUrl' in config:
639
+ db_url = config['connectionUrl']
640
+ else:
641
+ # Build connection URL from components
642
+ if config_type == 'postgresql':
643
+ db_url = f"postgresql://{config['username']}:{config['password']}@{config['host']}:{config['port']}/{config['database']}"
644
+ elif config_type == 'mysql':
645
+ db_url = f"mysql+pymysql://{config['username']}:{config['password']}@{config['host']}:{config['port']}/{config['database']}"
646
+
647
+ if db_url:
648
+ return db_url, config_type
649
+ except Exception as e:
650
+ print(f"[UnifiedDatabaseQueryHandler] Error parsing database config {key}: {e}")
651
+ continue
652
+
653
+ # Fallback to legacy DB_URL environment variable if no multi-db config found
654
+ db_url = os.environ.get('DB_URL')
655
+ if db_url:
656
+ detected_type = self._detect_database_type(db_url)
657
+ return db_url, detected_type
658
+
659
+ return None, db_type or 'postgresql'
660
+
661
+ @tornado.web.authenticated
662
+ def post(self):
663
+ """Execute a read-only SQL query on PostgreSQL or MySQL"""
664
+ try:
665
+ # Parse request body
666
+ try:
667
+ body = json.loads(self.request.body.decode('utf-8'))
668
+ except json.JSONDecodeError:
669
+ self.set_status(400)
670
+ self.finish(json.dumps({
671
+ "error": "Invalid JSON in request body"
672
+ }))
673
+ return
674
+
675
+ # Get query from request
676
+ query = body.get('query')
677
+ if not query:
678
+ self.set_status(400)
679
+ self.finish(json.dumps({
680
+ "error": "Missing 'query' field in request body"
681
+ }))
682
+ return
683
+
684
+ # Basic validation for read-only queries
685
+ normalized_query = query.strip().upper()
686
+ if not normalized_query.startswith('SELECT') and not normalized_query.startswith('WITH'):
687
+ self.set_status(400)
688
+ self.finish(json.dumps({
689
+ "error": "Only SELECT or WITH statements are allowed for read queries."
690
+ }))
691
+ return
692
+
693
+ # Get database URL and type from request or environment
694
+ db_url, db_type = self._get_database_url(body.get('dbUrl'))
695
+
696
+ if not db_url:
697
+ self.set_status(400)
698
+ self.finish(json.dumps({
699
+ "error": "No database URL provided and no database configurations found in environment"
700
+ }))
701
+ return
702
+
703
+ # Ensure MySQL URL uses pymysql driver
704
+ if db_type == 'mysql' and db_url.startswith('mysql://'):
705
+ db_url = db_url.replace('mysql://', 'mysql+pymysql://', 1)
706
+
707
+ # Setup database environment
708
+ try:
709
+ create_engine, text = self._setup_database_environment(db_type)
710
+ except ImportError as e:
711
+ self.set_status(500)
712
+ self.finish(json.dumps({
713
+ "error": str(e)
714
+ }))
715
+ return
716
+
717
+ # Execute query
718
+ try:
719
+ engine = create_engine(db_url)
720
+
721
+ with engine.connect() as conn:
722
+ result = conn.execute(text(query))
723
+
724
+ # Convert result to list of dictionaries
725
+ rows = [dict(row._mapping) for row in result]
726
+
727
+ self.finish(json.dumps({
728
+ "result": rows
729
+ }))
730
+
731
+ except Exception as e:
732
+ self.set_status(500)
733
+ self.finish(json.dumps({
734
+ "error": f"{db_type.title()} query failed: {str(e)}"
735
+ }))
736
+
737
+ except Exception as e:
738
+ self.set_status(500)
739
+ self.finish(json.dumps({
740
+ "error": "Internal server error",
741
+ "message": str(e)
742
+ }))