sqlsaber 0.25.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlsaber might be problematic. Click here for more details.
- sqlsaber/agents/__init__.py +2 -2
- sqlsaber/agents/base.py +1 -1
- sqlsaber/agents/mcp.py +1 -1
- sqlsaber/agents/pydantic_ai_agent.py +207 -135
- sqlsaber/application/__init__.py +1 -0
- sqlsaber/application/auth_setup.py +164 -0
- sqlsaber/application/db_setup.py +223 -0
- sqlsaber/application/model_selection.py +98 -0
- sqlsaber/application/prompts.py +115 -0
- sqlsaber/cli/auth.py +22 -50
- sqlsaber/cli/commands.py +22 -28
- sqlsaber/cli/completers.py +2 -0
- sqlsaber/cli/database.py +25 -86
- sqlsaber/cli/display.py +29 -9
- sqlsaber/cli/interactive.py +150 -127
- sqlsaber/cli/models.py +18 -28
- sqlsaber/cli/onboarding.py +325 -0
- sqlsaber/cli/streaming.py +15 -17
- sqlsaber/cli/threads.py +10 -6
- sqlsaber/config/api_keys.py +2 -2
- sqlsaber/config/settings.py +25 -2
- sqlsaber/database/__init__.py +55 -1
- sqlsaber/database/base.py +124 -0
- sqlsaber/database/csv.py +133 -0
- sqlsaber/database/duckdb.py +313 -0
- sqlsaber/database/mysql.py +345 -0
- sqlsaber/database/postgresql.py +328 -0
- sqlsaber/database/schema.py +66 -963
- sqlsaber/database/sqlite.py +258 -0
- sqlsaber/mcp/mcp.py +1 -1
- sqlsaber/tools/sql_tools.py +1 -1
- {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/METADATA +43 -9
- sqlsaber-0.27.0.dist-info/RECORD +58 -0
- sqlsaber/database/connection.py +0 -535
- sqlsaber-0.25.0.dist-info/RECORD +0 -47
- {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/WHEEL +0 -0
- {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/entry_points.txt +0 -0
- {sqlsaber-0.25.0.dist-info → sqlsaber-0.27.0.dist-info}/licenses/LICENSE +0 -0
sqlsaber/database/schema.py
CHANGED
|
@@ -1,907 +1,19 @@
|
|
|
1
|
-
"""Database schema
|
|
1
|
+
"""Database schema management."""
|
|
2
2
|
|
|
3
|
-
import
|
|
4
|
-
from abc import ABC, abstractmethod
|
|
5
|
-
from typing import Any, TypedDict
|
|
3
|
+
from typing import Any
|
|
6
4
|
|
|
7
|
-
import
|
|
8
|
-
import duckdb
|
|
9
|
-
|
|
10
|
-
from sqlsaber.database.connection import (
|
|
5
|
+
from .base import (
|
|
11
6
|
BaseDatabaseConnection,
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
SQLiteConnection,
|
|
7
|
+
ColumnInfo,
|
|
8
|
+
ForeignKeyInfo,
|
|
9
|
+
IndexInfo,
|
|
10
|
+
SchemaInfo,
|
|
17
11
|
)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
data_type: str
|
|
24
|
-
nullable: bool
|
|
25
|
-
default: str | None
|
|
26
|
-
max_length: int | None
|
|
27
|
-
precision: int | None
|
|
28
|
-
scale: int | None
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class ForeignKeyInfo(TypedDict):
|
|
32
|
-
"""Type definition for foreign key information."""
|
|
33
|
-
|
|
34
|
-
column: str
|
|
35
|
-
references: dict[str, str] # {"table": "schema.table", "column": "column_name"}
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
class IndexInfo(TypedDict):
|
|
39
|
-
"""Type definition for index information."""
|
|
40
|
-
|
|
41
|
-
name: str
|
|
42
|
-
columns: list[str] # ordered
|
|
43
|
-
unique: bool
|
|
44
|
-
type: str | None # btree, gin, FULLTEXT, etc. None if unknown
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
class SchemaInfo(TypedDict):
|
|
48
|
-
"""Type definition for schema information."""
|
|
49
|
-
|
|
50
|
-
schema: str
|
|
51
|
-
name: str
|
|
52
|
-
type: str
|
|
53
|
-
columns: dict[str, ColumnInfo]
|
|
54
|
-
primary_keys: list[str]
|
|
55
|
-
foreign_keys: list[ForeignKeyInfo]
|
|
56
|
-
indexes: list[IndexInfo]
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
class BaseSchemaIntrospector(ABC):
|
|
60
|
-
"""Abstract base class for database-specific schema introspection."""
|
|
61
|
-
|
|
62
|
-
@abstractmethod
|
|
63
|
-
async def get_tables_info(
|
|
64
|
-
self, connection, table_pattern: str | None = None
|
|
65
|
-
) -> dict[str, Any]:
|
|
66
|
-
"""Get tables information for the specific database type."""
|
|
67
|
-
pass
|
|
68
|
-
|
|
69
|
-
@abstractmethod
|
|
70
|
-
async def get_columns_info(self, connection, tables: list) -> list:
|
|
71
|
-
"""Get columns information for the specific database type."""
|
|
72
|
-
pass
|
|
73
|
-
|
|
74
|
-
@abstractmethod
|
|
75
|
-
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
76
|
-
"""Get foreign keys information for the specific database type."""
|
|
77
|
-
pass
|
|
78
|
-
|
|
79
|
-
@abstractmethod
|
|
80
|
-
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
81
|
-
"""Get primary keys information for the specific database type."""
|
|
82
|
-
pass
|
|
83
|
-
|
|
84
|
-
@abstractmethod
|
|
85
|
-
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
86
|
-
"""Get indexes information for the specific database type."""
|
|
87
|
-
pass
|
|
88
|
-
|
|
89
|
-
@abstractmethod
|
|
90
|
-
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
91
|
-
"""Get list of tables with basic information."""
|
|
92
|
-
pass
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
96
|
-
"""PostgreSQL-specific schema introspection."""
|
|
97
|
-
|
|
98
|
-
async def get_tables_info(
|
|
99
|
-
self, connection, table_pattern: str | None = None
|
|
100
|
-
) -> dict[str, Any]:
|
|
101
|
-
"""Get tables information for PostgreSQL."""
|
|
102
|
-
pool = await connection.get_pool()
|
|
103
|
-
async with pool.acquire() as conn:
|
|
104
|
-
# Build WHERE clause for filtering
|
|
105
|
-
where_conditions = [
|
|
106
|
-
"table_schema NOT IN ('pg_catalog', 'information_schema')"
|
|
107
|
-
]
|
|
108
|
-
params = []
|
|
109
|
-
|
|
110
|
-
if table_pattern:
|
|
111
|
-
# Support patterns like 'schema.table' or just 'table'
|
|
112
|
-
if "." in table_pattern:
|
|
113
|
-
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
114
|
-
where_conditions.append(
|
|
115
|
-
"(table_schema LIKE $1 AND table_name LIKE $2)"
|
|
116
|
-
)
|
|
117
|
-
params.extend([schema_pattern, table_name_pattern])
|
|
118
|
-
else:
|
|
119
|
-
where_conditions.append(
|
|
120
|
-
"(table_name LIKE $1 OR table_schema || '.' || table_name LIKE $1)"
|
|
121
|
-
)
|
|
122
|
-
params.append(table_pattern)
|
|
123
|
-
|
|
124
|
-
# Get tables
|
|
125
|
-
tables_query = f"""
|
|
126
|
-
SELECT
|
|
127
|
-
table_schema,
|
|
128
|
-
table_name,
|
|
129
|
-
table_type
|
|
130
|
-
FROM information_schema.tables
|
|
131
|
-
WHERE {" AND ".join(where_conditions)}
|
|
132
|
-
ORDER BY table_schema, table_name;
|
|
133
|
-
"""
|
|
134
|
-
return await conn.fetch(tables_query, *params)
|
|
135
|
-
|
|
136
|
-
async def get_columns_info(self, connection, tables: list) -> list:
|
|
137
|
-
"""Get columns information for PostgreSQL."""
|
|
138
|
-
if not tables:
|
|
139
|
-
return []
|
|
140
|
-
|
|
141
|
-
pool = await connection.get_pool()
|
|
142
|
-
async with pool.acquire() as conn:
|
|
143
|
-
# Build IN clause for the tables we found
|
|
144
|
-
table_filters = []
|
|
145
|
-
for table in tables:
|
|
146
|
-
table_filters.append(
|
|
147
|
-
f"(table_schema = '{table['table_schema']}' AND table_name = '{table['table_name']}')"
|
|
148
|
-
)
|
|
149
|
-
|
|
150
|
-
columns_query = f"""
|
|
151
|
-
SELECT
|
|
152
|
-
table_schema,
|
|
153
|
-
table_name,
|
|
154
|
-
column_name,
|
|
155
|
-
data_type,
|
|
156
|
-
is_nullable,
|
|
157
|
-
column_default,
|
|
158
|
-
character_maximum_length,
|
|
159
|
-
numeric_precision,
|
|
160
|
-
numeric_scale
|
|
161
|
-
FROM information_schema.columns
|
|
162
|
-
WHERE ({" OR ".join(table_filters)})
|
|
163
|
-
ORDER BY table_schema, table_name, ordinal_position;
|
|
164
|
-
"""
|
|
165
|
-
return await conn.fetch(columns_query)
|
|
166
|
-
|
|
167
|
-
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
168
|
-
"""Get foreign keys information for PostgreSQL."""
|
|
169
|
-
if not tables:
|
|
170
|
-
return []
|
|
171
|
-
|
|
172
|
-
pool = await connection.get_pool()
|
|
173
|
-
async with pool.acquire() as conn:
|
|
174
|
-
# Build proper table filters with tc. prefix
|
|
175
|
-
fk_table_filters = []
|
|
176
|
-
for table in tables:
|
|
177
|
-
fk_table_filters.append(
|
|
178
|
-
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
179
|
-
)
|
|
180
|
-
|
|
181
|
-
fk_query = f"""
|
|
182
|
-
SELECT
|
|
183
|
-
tc.table_schema,
|
|
184
|
-
tc.table_name,
|
|
185
|
-
kcu.column_name,
|
|
186
|
-
ccu.table_schema AS foreign_table_schema,
|
|
187
|
-
ccu.table_name AS foreign_table_name,
|
|
188
|
-
ccu.column_name AS foreign_column_name
|
|
189
|
-
FROM information_schema.table_constraints AS tc
|
|
190
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
191
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
192
|
-
AND tc.table_schema = kcu.table_schema
|
|
193
|
-
JOIN information_schema.constraint_column_usage AS ccu
|
|
194
|
-
ON ccu.constraint_name = tc.constraint_name
|
|
195
|
-
AND ccu.table_schema = tc.table_schema
|
|
196
|
-
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
197
|
-
AND ({" OR ".join(fk_table_filters)});
|
|
198
|
-
"""
|
|
199
|
-
return await conn.fetch(fk_query)
|
|
200
|
-
|
|
201
|
-
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
202
|
-
"""Get primary keys information for PostgreSQL."""
|
|
203
|
-
if not tables:
|
|
204
|
-
return []
|
|
205
|
-
|
|
206
|
-
pool = await connection.get_pool()
|
|
207
|
-
async with pool.acquire() as conn:
|
|
208
|
-
# Build proper table filters with tc. prefix
|
|
209
|
-
pk_table_filters = []
|
|
210
|
-
for table in tables:
|
|
211
|
-
pk_table_filters.append(
|
|
212
|
-
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
213
|
-
)
|
|
214
|
-
|
|
215
|
-
pk_query = f"""
|
|
216
|
-
SELECT
|
|
217
|
-
tc.table_schema,
|
|
218
|
-
tc.table_name,
|
|
219
|
-
kcu.column_name
|
|
220
|
-
FROM information_schema.table_constraints AS tc
|
|
221
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
222
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
223
|
-
AND tc.table_schema = kcu.table_schema
|
|
224
|
-
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
225
|
-
AND ({" OR ".join(pk_table_filters)})
|
|
226
|
-
ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
|
|
227
|
-
"""
|
|
228
|
-
return await conn.fetch(pk_query)
|
|
229
|
-
|
|
230
|
-
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
231
|
-
"""Get indexes information for PostgreSQL."""
|
|
232
|
-
if not tables:
|
|
233
|
-
return []
|
|
234
|
-
|
|
235
|
-
pool = await connection.get_pool()
|
|
236
|
-
async with pool.acquire() as conn:
|
|
237
|
-
# Build proper table filters
|
|
238
|
-
idx_table_filters = []
|
|
239
|
-
for table in tables:
|
|
240
|
-
idx_table_filters.append(
|
|
241
|
-
f"(ns.nspname = '{table['table_schema']}' AND t.relname = '{table['table_name']}')"
|
|
242
|
-
)
|
|
243
|
-
|
|
244
|
-
idx_query = f"""
|
|
245
|
-
SELECT
|
|
246
|
-
ns.nspname AS table_schema,
|
|
247
|
-
t.relname AS table_name,
|
|
248
|
-
i.relname AS index_name,
|
|
249
|
-
ix.indisunique AS is_unique,
|
|
250
|
-
am.amname AS index_type,
|
|
251
|
-
array_agg(a.attname ORDER BY ord.ordinality) AS column_names
|
|
252
|
-
FROM pg_class t
|
|
253
|
-
JOIN pg_namespace ns ON ns.oid = t.relnamespace
|
|
254
|
-
JOIN pg_index ix ON ix.indrelid = t.oid
|
|
255
|
-
JOIN pg_class i ON i.oid = ix.indexrelid
|
|
256
|
-
JOIN pg_am am ON am.oid = i.relam
|
|
257
|
-
JOIN LATERAL unnest(ix.indkey) WITH ORDINALITY AS ord(attnum, ordinality)
|
|
258
|
-
ON TRUE
|
|
259
|
-
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ord.attnum
|
|
260
|
-
WHERE ns.nspname NOT IN ('pg_catalog', 'information_schema')
|
|
261
|
-
AND ({" OR ".join(idx_table_filters)})
|
|
262
|
-
GROUP BY table_schema, table_name, index_name, is_unique, index_type
|
|
263
|
-
ORDER BY table_schema, table_name, index_name;
|
|
264
|
-
"""
|
|
265
|
-
return await conn.fetch(idx_query)
|
|
266
|
-
|
|
267
|
-
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
268
|
-
"""Get list of tables with basic information for PostgreSQL."""
|
|
269
|
-
pool = await connection.get_pool()
|
|
270
|
-
async with pool.acquire() as conn:
|
|
271
|
-
# Get tables without row counts for better performance
|
|
272
|
-
tables_query = """
|
|
273
|
-
SELECT
|
|
274
|
-
t.table_schema,
|
|
275
|
-
t.table_name,
|
|
276
|
-
t.table_type
|
|
277
|
-
FROM information_schema.tables t
|
|
278
|
-
WHERE t.table_schema NOT IN ('pg_catalog', 'information_schema')
|
|
279
|
-
ORDER BY t.table_schema, t.table_name;
|
|
280
|
-
"""
|
|
281
|
-
records = await conn.fetch(tables_query)
|
|
282
|
-
|
|
283
|
-
# Convert asyncpg.Record objects to dictionaries
|
|
284
|
-
return [
|
|
285
|
-
{
|
|
286
|
-
"table_schema": record["table_schema"],
|
|
287
|
-
"table_name": record["table_name"],
|
|
288
|
-
"table_type": record["table_type"],
|
|
289
|
-
}
|
|
290
|
-
for record in records
|
|
291
|
-
]
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
295
|
-
"""MySQL-specific schema introspection."""
|
|
296
|
-
|
|
297
|
-
async def get_tables_info(
|
|
298
|
-
self, connection, table_pattern: str | None = None
|
|
299
|
-
) -> dict[str, Any]:
|
|
300
|
-
"""Get tables information for MySQL."""
|
|
301
|
-
pool = await connection.get_pool()
|
|
302
|
-
async with pool.acquire() as conn:
|
|
303
|
-
async with conn.cursor() as cursor:
|
|
304
|
-
# Build WHERE clause for filtering
|
|
305
|
-
where_conditions = [
|
|
306
|
-
"table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')"
|
|
307
|
-
]
|
|
308
|
-
params = []
|
|
309
|
-
|
|
310
|
-
if table_pattern:
|
|
311
|
-
# Support patterns like 'schema.table' or just 'table'
|
|
312
|
-
if "." in table_pattern:
|
|
313
|
-
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
314
|
-
where_conditions.append(
|
|
315
|
-
"(table_schema LIKE %s AND table_name LIKE %s)"
|
|
316
|
-
)
|
|
317
|
-
params.extend([schema_pattern, table_name_pattern])
|
|
318
|
-
else:
|
|
319
|
-
where_conditions.append(
|
|
320
|
-
"(table_name LIKE %s OR CONCAT(table_schema, '.', table_name) LIKE %s)"
|
|
321
|
-
)
|
|
322
|
-
params.extend([table_pattern, table_pattern])
|
|
323
|
-
|
|
324
|
-
# Get tables
|
|
325
|
-
tables_query = f"""
|
|
326
|
-
SELECT
|
|
327
|
-
table_schema,
|
|
328
|
-
table_name,
|
|
329
|
-
table_type
|
|
330
|
-
FROM information_schema.tables
|
|
331
|
-
WHERE {" AND ".join(where_conditions)}
|
|
332
|
-
ORDER BY table_schema, table_name;
|
|
333
|
-
"""
|
|
334
|
-
await cursor.execute(tables_query, params)
|
|
335
|
-
return await cursor.fetchall()
|
|
336
|
-
|
|
337
|
-
async def get_columns_info(self, connection, tables: list) -> list:
|
|
338
|
-
"""Get columns information for MySQL."""
|
|
339
|
-
if not tables:
|
|
340
|
-
return []
|
|
341
|
-
|
|
342
|
-
pool = await connection.get_pool()
|
|
343
|
-
async with pool.acquire() as conn:
|
|
344
|
-
async with conn.cursor() as cursor:
|
|
345
|
-
# Build IN clause for the tables we found
|
|
346
|
-
table_filters = []
|
|
347
|
-
for table in tables:
|
|
348
|
-
table_filters.append(
|
|
349
|
-
f"(table_schema = '{table['table_schema']}' AND table_name = '{table['table_name']}')"
|
|
350
|
-
)
|
|
351
|
-
|
|
352
|
-
columns_query = f"""
|
|
353
|
-
SELECT
|
|
354
|
-
table_schema,
|
|
355
|
-
table_name,
|
|
356
|
-
column_name,
|
|
357
|
-
data_type,
|
|
358
|
-
is_nullable,
|
|
359
|
-
column_default,
|
|
360
|
-
character_maximum_length,
|
|
361
|
-
numeric_precision,
|
|
362
|
-
numeric_scale
|
|
363
|
-
FROM information_schema.columns
|
|
364
|
-
WHERE ({" OR ".join(table_filters)})
|
|
365
|
-
ORDER BY table_schema, table_name, ordinal_position;
|
|
366
|
-
"""
|
|
367
|
-
await cursor.execute(columns_query)
|
|
368
|
-
return await cursor.fetchall()
|
|
369
|
-
|
|
370
|
-
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
371
|
-
"""Get foreign keys information for MySQL."""
|
|
372
|
-
if not tables:
|
|
373
|
-
return []
|
|
374
|
-
|
|
375
|
-
pool = await connection.get_pool()
|
|
376
|
-
async with pool.acquire() as conn:
|
|
377
|
-
async with conn.cursor() as cursor:
|
|
378
|
-
# Build proper table filters
|
|
379
|
-
fk_table_filters = []
|
|
380
|
-
for table in tables:
|
|
381
|
-
fk_table_filters.append(
|
|
382
|
-
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
383
|
-
)
|
|
384
|
-
|
|
385
|
-
fk_query = f"""
|
|
386
|
-
SELECT
|
|
387
|
-
tc.table_schema,
|
|
388
|
-
tc.table_name,
|
|
389
|
-
kcu.column_name,
|
|
390
|
-
rc.unique_constraint_schema AS foreign_table_schema,
|
|
391
|
-
rc.referenced_table_name AS foreign_table_name,
|
|
392
|
-
kcu.referenced_column_name AS foreign_column_name
|
|
393
|
-
FROM information_schema.table_constraints AS tc
|
|
394
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
395
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
396
|
-
AND tc.table_schema = kcu.table_schema
|
|
397
|
-
JOIN information_schema.referential_constraints AS rc
|
|
398
|
-
ON tc.constraint_name = rc.constraint_name
|
|
399
|
-
AND tc.table_schema = rc.constraint_schema
|
|
400
|
-
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
401
|
-
AND ({" OR ".join(fk_table_filters)});
|
|
402
|
-
"""
|
|
403
|
-
await cursor.execute(fk_query)
|
|
404
|
-
return await cursor.fetchall()
|
|
405
|
-
|
|
406
|
-
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
407
|
-
"""Get primary keys information for MySQL."""
|
|
408
|
-
if not tables:
|
|
409
|
-
return []
|
|
410
|
-
|
|
411
|
-
pool = await connection.get_pool()
|
|
412
|
-
async with pool.acquire() as conn:
|
|
413
|
-
async with conn.cursor() as cursor:
|
|
414
|
-
# Build proper table filters
|
|
415
|
-
pk_table_filters = []
|
|
416
|
-
for table in tables:
|
|
417
|
-
pk_table_filters.append(
|
|
418
|
-
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
419
|
-
)
|
|
420
|
-
|
|
421
|
-
pk_query = f"""
|
|
422
|
-
SELECT
|
|
423
|
-
tc.table_schema,
|
|
424
|
-
tc.table_name,
|
|
425
|
-
kcu.column_name
|
|
426
|
-
FROM information_schema.table_constraints AS tc
|
|
427
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
428
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
429
|
-
AND tc.table_schema = kcu.table_schema
|
|
430
|
-
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
431
|
-
AND ({" OR ".join(pk_table_filters)})
|
|
432
|
-
ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
|
|
433
|
-
"""
|
|
434
|
-
await cursor.execute(pk_query)
|
|
435
|
-
return await cursor.fetchall()
|
|
436
|
-
|
|
437
|
-
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
438
|
-
"""Get indexes information for MySQL."""
|
|
439
|
-
if not tables:
|
|
440
|
-
return []
|
|
441
|
-
|
|
442
|
-
pool = await connection.get_pool()
|
|
443
|
-
async with pool.acquire() as conn:
|
|
444
|
-
async with conn.cursor() as cursor:
|
|
445
|
-
# Build proper table filters
|
|
446
|
-
idx_table_filters = []
|
|
447
|
-
for table in tables:
|
|
448
|
-
idx_table_filters.append(
|
|
449
|
-
f"(TABLE_SCHEMA = '{table['table_schema']}' AND TABLE_NAME = '{table['table_name']}')"
|
|
450
|
-
)
|
|
451
|
-
|
|
452
|
-
idx_query = f"""
|
|
453
|
-
SELECT
|
|
454
|
-
TABLE_SCHEMA AS table_schema,
|
|
455
|
-
TABLE_NAME AS table_name,
|
|
456
|
-
INDEX_NAME AS index_name,
|
|
457
|
-
(NON_UNIQUE = 0) AS is_unique,
|
|
458
|
-
INDEX_TYPE AS index_type,
|
|
459
|
-
GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) AS column_names
|
|
460
|
-
FROM INFORMATION_SCHEMA.STATISTICS
|
|
461
|
-
WHERE ({" OR ".join(idx_table_filters)})
|
|
462
|
-
GROUP BY table_schema, table_name, index_name, is_unique, index_type
|
|
463
|
-
ORDER BY table_schema, table_name, index_name;
|
|
464
|
-
"""
|
|
465
|
-
await cursor.execute(idx_query)
|
|
466
|
-
return await cursor.fetchall()
|
|
467
|
-
|
|
468
|
-
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
469
|
-
"""Get list of tables with basic information for MySQL."""
|
|
470
|
-
pool = await connection.get_pool()
|
|
471
|
-
async with pool.acquire() as conn:
|
|
472
|
-
async with conn.cursor() as cursor:
|
|
473
|
-
# Get tables without row counts for better performance
|
|
474
|
-
tables_query = """
|
|
475
|
-
SELECT
|
|
476
|
-
t.table_schema,
|
|
477
|
-
t.table_name,
|
|
478
|
-
t.table_type
|
|
479
|
-
FROM information_schema.tables t
|
|
480
|
-
WHERE t.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')
|
|
481
|
-
ORDER BY t.table_schema, t.table_name;
|
|
482
|
-
"""
|
|
483
|
-
await cursor.execute(tables_query)
|
|
484
|
-
rows = await cursor.fetchall()
|
|
485
|
-
|
|
486
|
-
# Convert rows to dictionaries
|
|
487
|
-
return [
|
|
488
|
-
{
|
|
489
|
-
"table_schema": row["table_schema"],
|
|
490
|
-
"table_name": row["table_name"],
|
|
491
|
-
"table_type": row["table_type"],
|
|
492
|
-
}
|
|
493
|
-
for row in rows
|
|
494
|
-
]
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
498
|
-
"""SQLite-specific schema introspection."""
|
|
499
|
-
|
|
500
|
-
async def _execute_query(self, connection, query: str, params=()) -> list:
|
|
501
|
-
"""Helper method to execute queries on both SQLite and CSV connections."""
|
|
502
|
-
# Handle both SQLite and CSV connections
|
|
503
|
-
if hasattr(connection, "database_path"):
|
|
504
|
-
# Regular SQLite connection
|
|
505
|
-
async with aiosqlite.connect(connection.database_path) as conn:
|
|
506
|
-
conn.row_factory = aiosqlite.Row
|
|
507
|
-
cursor = await conn.execute(query, params)
|
|
508
|
-
return await cursor.fetchall()
|
|
509
|
-
else:
|
|
510
|
-
# CSV connection - use the existing connection
|
|
511
|
-
conn = await connection.get_pool()
|
|
512
|
-
cursor = await conn.execute(query, params)
|
|
513
|
-
return await cursor.fetchall()
|
|
514
|
-
|
|
515
|
-
async def get_tables_info(
|
|
516
|
-
self, connection, table_pattern: str | None = None
|
|
517
|
-
) -> dict[str, Any]:
|
|
518
|
-
"""Get tables information for SQLite."""
|
|
519
|
-
where_conditions = ["type IN ('table', 'view')", "name NOT LIKE 'sqlite_%'"]
|
|
520
|
-
params = ()
|
|
521
|
-
|
|
522
|
-
if table_pattern:
|
|
523
|
-
where_conditions.append("name LIKE ?")
|
|
524
|
-
params = (table_pattern,)
|
|
525
|
-
|
|
526
|
-
query = f"""
|
|
527
|
-
SELECT
|
|
528
|
-
'main' as table_schema,
|
|
529
|
-
name as table_name,
|
|
530
|
-
type as table_type
|
|
531
|
-
FROM sqlite_master
|
|
532
|
-
WHERE {" AND ".join(where_conditions)}
|
|
533
|
-
ORDER BY name;
|
|
534
|
-
"""
|
|
535
|
-
|
|
536
|
-
return await self._execute_query(connection, query, params)
|
|
537
|
-
|
|
538
|
-
async def get_columns_info(self, connection, tables: list) -> list:
|
|
539
|
-
"""Get columns information for SQLite."""
|
|
540
|
-
if not tables:
|
|
541
|
-
return []
|
|
542
|
-
|
|
543
|
-
columns = []
|
|
544
|
-
for table in tables:
|
|
545
|
-
table_name = table["table_name"]
|
|
546
|
-
|
|
547
|
-
# Get table info using PRAGMA
|
|
548
|
-
pragma_query = f"PRAGMA table_info({table_name})"
|
|
549
|
-
table_columns = await self._execute_query(connection, pragma_query)
|
|
550
|
-
|
|
551
|
-
for col in table_columns:
|
|
552
|
-
columns.append(
|
|
553
|
-
{
|
|
554
|
-
"table_schema": "main",
|
|
555
|
-
"table_name": table_name,
|
|
556
|
-
"column_name": col["name"],
|
|
557
|
-
"data_type": col["type"],
|
|
558
|
-
"is_nullable": "YES" if not col["notnull"] else "NO",
|
|
559
|
-
"column_default": col["dflt_value"],
|
|
560
|
-
"character_maximum_length": None,
|
|
561
|
-
"numeric_precision": None,
|
|
562
|
-
"numeric_scale": None,
|
|
563
|
-
}
|
|
564
|
-
)
|
|
565
|
-
|
|
566
|
-
return columns
|
|
567
|
-
|
|
568
|
-
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
569
|
-
"""Get foreign keys information for SQLite."""
|
|
570
|
-
if not tables:
|
|
571
|
-
return []
|
|
572
|
-
|
|
573
|
-
foreign_keys = []
|
|
574
|
-
for table in tables:
|
|
575
|
-
table_name = table["table_name"]
|
|
576
|
-
|
|
577
|
-
# Get foreign key info using PRAGMA
|
|
578
|
-
pragma_query = f"PRAGMA foreign_key_list({table_name})"
|
|
579
|
-
table_fks = await self._execute_query(connection, pragma_query)
|
|
580
|
-
|
|
581
|
-
for fk in table_fks:
|
|
582
|
-
foreign_keys.append(
|
|
583
|
-
{
|
|
584
|
-
"table_schema": "main",
|
|
585
|
-
"table_name": table_name,
|
|
586
|
-
"column_name": fk["from"],
|
|
587
|
-
"foreign_table_schema": "main",
|
|
588
|
-
"foreign_table_name": fk["table"],
|
|
589
|
-
"foreign_column_name": fk["to"],
|
|
590
|
-
}
|
|
591
|
-
)
|
|
592
|
-
|
|
593
|
-
return foreign_keys
|
|
594
|
-
|
|
595
|
-
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
596
|
-
"""Get primary keys information for SQLite."""
|
|
597
|
-
if not tables:
|
|
598
|
-
return []
|
|
599
|
-
|
|
600
|
-
primary_keys = []
|
|
601
|
-
for table in tables:
|
|
602
|
-
table_name = table["table_name"]
|
|
603
|
-
|
|
604
|
-
# Get table info using PRAGMA to find primary keys
|
|
605
|
-
pragma_query = f"PRAGMA table_info({table_name})"
|
|
606
|
-
table_columns = await self._execute_query(connection, pragma_query)
|
|
607
|
-
|
|
608
|
-
for col in table_columns:
|
|
609
|
-
if col["pk"]: # Primary key indicator
|
|
610
|
-
primary_keys.append(
|
|
611
|
-
{
|
|
612
|
-
"table_schema": "main",
|
|
613
|
-
"table_name": table_name,
|
|
614
|
-
"column_name": col["name"],
|
|
615
|
-
}
|
|
616
|
-
)
|
|
617
|
-
|
|
618
|
-
return primary_keys
|
|
619
|
-
|
|
620
|
-
async def get_indexes_info(self, connection, tables: list) -> list:
|
|
621
|
-
"""Get indexes information for SQLite."""
|
|
622
|
-
if not tables:
|
|
623
|
-
return []
|
|
624
|
-
|
|
625
|
-
indexes = []
|
|
626
|
-
for table in tables:
|
|
627
|
-
table_name = table["table_name"]
|
|
628
|
-
|
|
629
|
-
# Get index list using PRAGMA
|
|
630
|
-
pragma_query = f"PRAGMA index_list({table_name})"
|
|
631
|
-
table_indexes = await self._execute_query(connection, pragma_query)
|
|
632
|
-
|
|
633
|
-
for idx in table_indexes:
|
|
634
|
-
idx_name = idx["name"]
|
|
635
|
-
unique = bool(idx["unique"])
|
|
636
|
-
|
|
637
|
-
# Skip auto-generated primary key indexes
|
|
638
|
-
if idx_name.startswith("sqlite_autoindex_"):
|
|
639
|
-
continue
|
|
640
|
-
|
|
641
|
-
# Get index columns using PRAGMA
|
|
642
|
-
pragma_info_query = f"PRAGMA index_info({idx_name})"
|
|
643
|
-
idx_cols = await self._execute_query(connection, pragma_info_query)
|
|
644
|
-
columns = [
|
|
645
|
-
c["name"] for c in sorted(idx_cols, key=lambda r: r["seqno"])
|
|
646
|
-
]
|
|
647
|
-
|
|
648
|
-
indexes.append(
|
|
649
|
-
{
|
|
650
|
-
"table_schema": "main",
|
|
651
|
-
"table_name": table_name,
|
|
652
|
-
"index_name": idx_name,
|
|
653
|
-
"is_unique": unique,
|
|
654
|
-
"index_type": None, # SQLite only has B-tree currently
|
|
655
|
-
"column_names": columns,
|
|
656
|
-
}
|
|
657
|
-
)
|
|
658
|
-
|
|
659
|
-
return indexes
|
|
660
|
-
|
|
661
|
-
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
662
|
-
"""Get list of tables with basic information for SQLite."""
|
|
663
|
-
# Get table names without row counts for better performance
|
|
664
|
-
tables_query = """
|
|
665
|
-
SELECT
|
|
666
|
-
'main' as table_schema,
|
|
667
|
-
name as table_name,
|
|
668
|
-
type as table_type
|
|
669
|
-
FROM sqlite_master
|
|
670
|
-
WHERE type IN ('table', 'view')
|
|
671
|
-
AND name NOT LIKE 'sqlite_%'
|
|
672
|
-
ORDER BY name;
|
|
673
|
-
"""
|
|
674
|
-
|
|
675
|
-
tables = await self._execute_query(connection, tables_query)
|
|
676
|
-
|
|
677
|
-
# Convert to expected format
|
|
678
|
-
return [
|
|
679
|
-
{
|
|
680
|
-
"table_schema": table["table_schema"],
|
|
681
|
-
"table_name": table["table_name"],
|
|
682
|
-
"table_type": table["table_type"],
|
|
683
|
-
}
|
|
684
|
-
for table in tables
|
|
685
|
-
]
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
class DuckDBSchemaIntrospector(BaseSchemaIntrospector):
|
|
689
|
-
"""DuckDB-specific schema introspection."""
|
|
690
|
-
|
|
691
|
-
async def _execute_query(
|
|
692
|
-
self,
|
|
693
|
-
connection: DuckDBConnection | CSVConnection,
|
|
694
|
-
query: str,
|
|
695
|
-
params: tuple[Any, ...] = (),
|
|
696
|
-
) -> list[dict[str, Any]]:
|
|
697
|
-
"""Run a DuckDB query on a thread and return list of dictionaries."""
|
|
698
|
-
|
|
699
|
-
params_tuple = tuple(params)
|
|
700
|
-
|
|
701
|
-
def fetch_rows(conn: duckdb.DuckDBPyConnection) -> list[dict[str, Any]]:
|
|
702
|
-
cursor = conn.execute(query, params_tuple)
|
|
703
|
-
if cursor.description is None:
|
|
704
|
-
return []
|
|
705
|
-
|
|
706
|
-
columns = [col[0] for col in cursor.description]
|
|
707
|
-
rows = conn.fetchall()
|
|
708
|
-
return [dict(zip(columns, row)) for row in rows]
|
|
709
|
-
|
|
710
|
-
if isinstance(connection, CSVConnection):
|
|
711
|
-
return await connection.execute_query(query, *params_tuple)
|
|
712
|
-
|
|
713
|
-
def run_query() -> list[dict[str, Any]]:
|
|
714
|
-
conn = duckdb.connect(connection.database_path)
|
|
715
|
-
try:
|
|
716
|
-
return fetch_rows(conn)
|
|
717
|
-
finally:
|
|
718
|
-
conn.close()
|
|
719
|
-
|
|
720
|
-
return await asyncio.to_thread(run_query)
|
|
721
|
-
|
|
722
|
-
async def get_tables_info(
|
|
723
|
-
self, connection, table_pattern: str | None = None
|
|
724
|
-
) -> list[dict[str, Any]]:
|
|
725
|
-
"""Get tables information for DuckDB."""
|
|
726
|
-
where_conditions = [
|
|
727
|
-
"table_schema NOT IN ('information_schema', 'pg_catalog', 'duckdb_catalog')"
|
|
728
|
-
]
|
|
729
|
-
params: list[Any] = []
|
|
730
|
-
|
|
731
|
-
if table_pattern:
|
|
732
|
-
if "." in table_pattern:
|
|
733
|
-
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
734
|
-
where_conditions.append(
|
|
735
|
-
"(table_schema LIKE ? AND table_name LIKE ?)"
|
|
736
|
-
)
|
|
737
|
-
params.extend([schema_pattern, table_name_pattern])
|
|
738
|
-
else:
|
|
739
|
-
where_conditions.append(
|
|
740
|
-
"(table_name LIKE ? OR table_schema || '.' || table_name LIKE ?)"
|
|
741
|
-
)
|
|
742
|
-
params.extend([table_pattern, table_pattern])
|
|
743
|
-
|
|
744
|
-
query = f"""
|
|
745
|
-
SELECT
|
|
746
|
-
table_schema,
|
|
747
|
-
table_name,
|
|
748
|
-
table_type
|
|
749
|
-
FROM information_schema.tables
|
|
750
|
-
WHERE {" AND ".join(where_conditions)}
|
|
751
|
-
ORDER BY table_schema, table_name;
|
|
752
|
-
"""
|
|
753
|
-
|
|
754
|
-
return await self._execute_query(connection, query, tuple(params))
|
|
755
|
-
|
|
756
|
-
async def get_columns_info(self, connection, tables: list) -> list[dict[str, Any]]:
|
|
757
|
-
"""Get columns information for DuckDB."""
|
|
758
|
-
if not tables:
|
|
759
|
-
return []
|
|
760
|
-
|
|
761
|
-
table_filters = []
|
|
762
|
-
for table in tables:
|
|
763
|
-
table_filters.append(
|
|
764
|
-
"(table_schema = ? AND table_name = ?)"
|
|
765
|
-
)
|
|
766
|
-
|
|
767
|
-
params: list[Any] = []
|
|
768
|
-
for table in tables:
|
|
769
|
-
params.extend([table["table_schema"], table["table_name"]])
|
|
770
|
-
|
|
771
|
-
query = f"""
|
|
772
|
-
SELECT
|
|
773
|
-
table_schema,
|
|
774
|
-
table_name,
|
|
775
|
-
column_name,
|
|
776
|
-
data_type,
|
|
777
|
-
is_nullable,
|
|
778
|
-
column_default,
|
|
779
|
-
character_maximum_length,
|
|
780
|
-
numeric_precision,
|
|
781
|
-
numeric_scale
|
|
782
|
-
FROM information_schema.columns
|
|
783
|
-
WHERE {" OR ".join(table_filters)}
|
|
784
|
-
ORDER BY table_schema, table_name, ordinal_position;
|
|
785
|
-
"""
|
|
786
|
-
|
|
787
|
-
return await self._execute_query(connection, query, tuple(params))
|
|
788
|
-
|
|
789
|
-
async def get_foreign_keys_info(self, connection, tables: list) -> list[dict[str, Any]]:
|
|
790
|
-
"""Get foreign keys information for DuckDB."""
|
|
791
|
-
if not tables:
|
|
792
|
-
return []
|
|
793
|
-
|
|
794
|
-
table_filters = []
|
|
795
|
-
params: list[Any] = []
|
|
796
|
-
for table in tables:
|
|
797
|
-
table_filters.append("(kcu.table_schema = ? AND kcu.table_name = ?)")
|
|
798
|
-
params.extend([table["table_schema"], table["table_name"]])
|
|
799
|
-
|
|
800
|
-
query = f"""
|
|
801
|
-
SELECT
|
|
802
|
-
kcu.table_schema,
|
|
803
|
-
kcu.table_name,
|
|
804
|
-
kcu.column_name,
|
|
805
|
-
ccu.table_schema AS foreign_table_schema,
|
|
806
|
-
ccu.table_name AS foreign_table_name,
|
|
807
|
-
ccu.column_name AS foreign_column_name
|
|
808
|
-
FROM information_schema.referential_constraints AS rc
|
|
809
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
810
|
-
ON rc.constraint_schema = kcu.constraint_schema
|
|
811
|
-
AND rc.constraint_name = kcu.constraint_name
|
|
812
|
-
JOIN information_schema.key_column_usage AS ccu
|
|
813
|
-
ON rc.unique_constraint_schema = ccu.constraint_schema
|
|
814
|
-
AND rc.unique_constraint_name = ccu.constraint_name
|
|
815
|
-
AND ccu.ordinal_position = kcu.position_in_unique_constraint
|
|
816
|
-
WHERE {" OR ".join(table_filters)}
|
|
817
|
-
ORDER BY kcu.table_schema, kcu.table_name, kcu.ordinal_position;
|
|
818
|
-
"""
|
|
819
|
-
|
|
820
|
-
return await self._execute_query(connection, query, tuple(params))
|
|
821
|
-
|
|
822
|
-
async def get_primary_keys_info(self, connection, tables: list) -> list[dict[str, Any]]:
|
|
823
|
-
"""Get primary keys information for DuckDB."""
|
|
824
|
-
if not tables:
|
|
825
|
-
return []
|
|
826
|
-
|
|
827
|
-
table_filters = []
|
|
828
|
-
params: list[Any] = []
|
|
829
|
-
for table in tables:
|
|
830
|
-
table_filters.append("(tc.table_schema = ? AND tc.table_name = ?)")
|
|
831
|
-
params.extend([table["table_schema"], table["table_name"]])
|
|
832
|
-
|
|
833
|
-
query = f"""
|
|
834
|
-
SELECT
|
|
835
|
-
tc.table_schema,
|
|
836
|
-
tc.table_name,
|
|
837
|
-
kcu.column_name
|
|
838
|
-
FROM information_schema.table_constraints AS tc
|
|
839
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
840
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
841
|
-
AND tc.constraint_schema = kcu.constraint_schema
|
|
842
|
-
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
843
|
-
AND ({" OR ".join(table_filters)})
|
|
844
|
-
ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
|
|
845
|
-
"""
|
|
846
|
-
|
|
847
|
-
return await self._execute_query(connection, query, tuple(params))
|
|
848
|
-
|
|
849
|
-
async def get_indexes_info(self, connection, tables: list) -> list[dict[str, Any]]:
|
|
850
|
-
"""Get indexes information for DuckDB."""
|
|
851
|
-
if not tables:
|
|
852
|
-
return []
|
|
853
|
-
|
|
854
|
-
indexes: list[dict[str, Any]] = []
|
|
855
|
-
for table in tables:
|
|
856
|
-
schema = table["table_schema"]
|
|
857
|
-
table_name = table["table_name"]
|
|
858
|
-
query = """
|
|
859
|
-
SELECT
|
|
860
|
-
schema_name,
|
|
861
|
-
table_name,
|
|
862
|
-
index_name,
|
|
863
|
-
sql
|
|
864
|
-
FROM duckdb_indexes()
|
|
865
|
-
WHERE schema_name = ? AND table_name = ?;
|
|
866
|
-
"""
|
|
867
|
-
rows = await self._execute_query(connection, query, (schema, table_name))
|
|
868
|
-
|
|
869
|
-
for row in rows:
|
|
870
|
-
sql_text = (row.get("sql") or "").strip()
|
|
871
|
-
upper_sql = sql_text.upper()
|
|
872
|
-
unique = "UNIQUE" in upper_sql.split("(")[0]
|
|
873
|
-
|
|
874
|
-
columns: list[str] = []
|
|
875
|
-
if "(" in sql_text and ")" in sql_text:
|
|
876
|
-
column_section = sql_text[sql_text.find("(") + 1 : sql_text.rfind(")")]
|
|
877
|
-
columns = [col.strip().strip('"') for col in column_section.split(",") if col.strip()]
|
|
878
|
-
|
|
879
|
-
indexes.append(
|
|
880
|
-
{
|
|
881
|
-
"table_schema": row.get("schema_name") or schema or "main",
|
|
882
|
-
"table_name": row.get("table_name") or table_name,
|
|
883
|
-
"index_name": row.get("index_name"),
|
|
884
|
-
"is_unique": unique,
|
|
885
|
-
"index_type": None,
|
|
886
|
-
"column_names": columns,
|
|
887
|
-
}
|
|
888
|
-
)
|
|
889
|
-
|
|
890
|
-
return indexes
|
|
891
|
-
|
|
892
|
-
async def list_tables_info(self, connection) -> list[dict[str, Any]]:
|
|
893
|
-
"""Get list of tables with basic information for DuckDB."""
|
|
894
|
-
query = """
|
|
895
|
-
SELECT
|
|
896
|
-
table_schema,
|
|
897
|
-
table_name,
|
|
898
|
-
table_type
|
|
899
|
-
FROM information_schema.tables
|
|
900
|
-
WHERE table_schema NOT IN ('information_schema', 'pg_catalog', 'duckdb_catalog')
|
|
901
|
-
ORDER BY table_schema, table_name;
|
|
902
|
-
"""
|
|
903
|
-
|
|
904
|
-
return await self._execute_query(connection, query)
|
|
12
|
+
from .csv import CSVConnection
|
|
13
|
+
from .duckdb import DuckDBConnection, DuckDBSchemaIntrospector
|
|
14
|
+
from .mysql import MySQLConnection, MySQLSchemaIntrospector
|
|
15
|
+
from .postgresql import PostgreSQLConnection, PostgreSQLSchemaIntrospector
|
|
16
|
+
from .sqlite import SQLiteConnection, SQLiteSchemaIntrospector
|
|
905
17
|
|
|
906
18
|
|
|
907
19
|
class SchemaManager:
|
|
@@ -965,98 +77,89 @@ class SchemaManager:
|
|
|
965
77
|
"foreign_keys": [],
|
|
966
78
|
"indexes": [],
|
|
967
79
|
}
|
|
80
|
+
|
|
968
81
|
return schema_info
|
|
969
82
|
|
|
970
|
-
def _add_columns_to_schema(
|
|
971
|
-
|
|
972
|
-
) -> None:
|
|
973
|
-
"""Add column information to schema."""
|
|
83
|
+
def _add_columns_to_schema(self, schema_info: dict, columns: list) -> None:
|
|
84
|
+
"""Add column information to schema structure."""
|
|
974
85
|
for col in columns:
|
|
975
86
|
full_name = f"{col['table_schema']}.{col['table_name']}"
|
|
976
87
|
if full_name in schema_info:
|
|
977
|
-
|
|
88
|
+
column_info: ColumnInfo = {
|
|
978
89
|
"data_type": col["data_type"],
|
|
979
|
-
"nullable": col
|
|
980
|
-
"default": col
|
|
90
|
+
"nullable": col.get("is_nullable", "YES") == "YES",
|
|
91
|
+
"default": col.get("column_default"),
|
|
92
|
+
"max_length": col.get("character_maximum_length"),
|
|
93
|
+
"precision": col.get("numeric_precision"),
|
|
94
|
+
"scale": col.get("numeric_scale"),
|
|
981
95
|
}
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
("character_maximum_length", "max_length"),
|
|
986
|
-
("numeric_precision", "precision"),
|
|
987
|
-
("numeric_scale", "scale"),
|
|
988
|
-
]:
|
|
989
|
-
if col.get(attr_map[0]):
|
|
990
|
-
col_info[attr_map[1]] = col[attr_map[0]]
|
|
991
|
-
|
|
992
|
-
schema_info[full_name]["columns"][col["column_name"]] = col_info
|
|
96
|
+
# Add type field for display compatibility
|
|
97
|
+
column_info["type"] = col["data_type"]
|
|
98
|
+
schema_info[full_name]["columns"][col["column_name"]] = column_info
|
|
993
99
|
|
|
994
100
|
def _add_primary_keys_to_schema(
|
|
995
|
-
self, schema_info: dict
|
|
101
|
+
self, schema_info: dict, primary_keys: list
|
|
996
102
|
) -> None:
|
|
997
|
-
"""Add primary key information to schema."""
|
|
103
|
+
"""Add primary key information to schema structure."""
|
|
998
104
|
for pk in primary_keys:
|
|
999
105
|
full_name = f"{pk['table_schema']}.{pk['table_name']}"
|
|
1000
106
|
if full_name in schema_info:
|
|
1001
107
|
schema_info[full_name]["primary_keys"].append(pk["column_name"])
|
|
1002
108
|
|
|
1003
109
|
def _add_foreign_keys_to_schema(
|
|
1004
|
-
self, schema_info: dict
|
|
110
|
+
self, schema_info: dict, foreign_keys: list
|
|
1005
111
|
) -> None:
|
|
1006
|
-
"""Add foreign key information to schema."""
|
|
112
|
+
"""Add foreign key information to schema structure."""
|
|
1007
113
|
for fk in foreign_keys:
|
|
1008
114
|
full_name = f"{fk['table_schema']}.{fk['table_name']}"
|
|
1009
115
|
if full_name in schema_info:
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
"
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
)
|
|
116
|
+
fk_info: ForeignKeyInfo = {
|
|
117
|
+
"column": fk["column_name"],
|
|
118
|
+
"references": {
|
|
119
|
+
"table": f"{fk['foreign_table_schema']}.{fk['foreign_table_name']}",
|
|
120
|
+
"column": fk["foreign_column_name"],
|
|
121
|
+
},
|
|
122
|
+
}
|
|
123
|
+
schema_info[full_name]["foreign_keys"].append(fk_info)
|
|
1019
124
|
|
|
1020
|
-
def _add_indexes_to_schema(
|
|
1021
|
-
|
|
1022
|
-
) -> None:
|
|
1023
|
-
"""Add index information to schema."""
|
|
125
|
+
def _add_indexes_to_schema(self, schema_info: dict, indexes: list) -> None:
|
|
126
|
+
"""Add index information to schema structure."""
|
|
1024
127
|
for idx in indexes:
|
|
1025
128
|
full_name = f"{idx['table_schema']}.{idx['table_name']}"
|
|
1026
129
|
if full_name in schema_info:
|
|
1027
|
-
# Handle
|
|
1028
|
-
if isinstance(idx
|
|
130
|
+
# Handle column names - could be comma-separated string or list
|
|
131
|
+
if isinstance(idx.get("column_names"), str):
|
|
132
|
+
columns = [
|
|
133
|
+
col.strip()
|
|
134
|
+
for col in idx["column_names"].split(",")
|
|
135
|
+
if col.strip()
|
|
136
|
+
]
|
|
137
|
+
elif isinstance(idx.get("column_names"), list):
|
|
1029
138
|
columns = idx["column_names"]
|
|
1030
139
|
else:
|
|
1031
|
-
|
|
1032
|
-
columns = (
|
|
1033
|
-
idx["column_names"].split(",") if idx["column_names"] else []
|
|
1034
|
-
)
|
|
140
|
+
columns = []
|
|
1035
141
|
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
)
|
|
142
|
+
index_info: IndexInfo = {
|
|
143
|
+
"name": idx["index_name"],
|
|
144
|
+
"columns": columns,
|
|
145
|
+
"unique": bool(idx.get("is_unique", False)),
|
|
146
|
+
"type": idx.get("index_type"),
|
|
147
|
+
}
|
|
148
|
+
schema_info[full_name]["indexes"].append(index_info)
|
|
1044
149
|
|
|
1045
150
|
async def list_tables(self) -> dict[str, Any]:
|
|
1046
|
-
"""Get
|
|
1047
|
-
|
|
151
|
+
"""Get list of tables with basic information."""
|
|
152
|
+
tables_list = await self.introspector.list_tables_info(self.db)
|
|
1048
153
|
|
|
1049
|
-
#
|
|
1050
|
-
|
|
154
|
+
# Add full_name and name fields for backwards compatibility
|
|
155
|
+
for table in tables_list:
|
|
156
|
+
table["full_name"] = f"{table['table_schema']}.{table['table_name']}"
|
|
157
|
+
table["name"] = table["table_name"]
|
|
158
|
+
table["schema"] = table["table_schema"]
|
|
159
|
+
table["type"] = table["table_type"] # Map table_type to type for display
|
|
1051
160
|
|
|
1052
|
-
|
|
1053
|
-
result["tables"].append(
|
|
1054
|
-
{
|
|
1055
|
-
"schema": table["table_schema"],
|
|
1056
|
-
"name": table["table_name"],
|
|
1057
|
-
"full_name": f"{table['table_schema']}.{table['table_name']}",
|
|
1058
|
-
"type": table["table_type"],
|
|
1059
|
-
}
|
|
1060
|
-
)
|
|
161
|
+
return {"tables": tables_list}
|
|
1061
162
|
|
|
1062
|
-
|
|
163
|
+
async def close(self):
|
|
164
|
+
"""Close database connection."""
|
|
165
|
+
await self.db.close()
|