sqlsaber 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlsaber might be problematic. Click here for more details.
- sqlsaber/__init__.py +3 -0
- sqlsaber/__main__.py +4 -0
- sqlsaber/agents/__init__.py +9 -0
- sqlsaber/agents/anthropic.py +451 -0
- sqlsaber/agents/base.py +67 -0
- sqlsaber/agents/streaming.py +26 -0
- sqlsaber/cli/__init__.py +7 -0
- sqlsaber/cli/commands.py +132 -0
- sqlsaber/cli/database.py +275 -0
- sqlsaber/cli/display.py +207 -0
- sqlsaber/cli/interactive.py +93 -0
- sqlsaber/cli/memory.py +239 -0
- sqlsaber/cli/models.py +231 -0
- sqlsaber/cli/streaming.py +94 -0
- sqlsaber/config/__init__.py +7 -0
- sqlsaber/config/api_keys.py +102 -0
- sqlsaber/config/database.py +252 -0
- sqlsaber/config/settings.py +115 -0
- sqlsaber/database/__init__.py +9 -0
- sqlsaber/database/connection.py +187 -0
- sqlsaber/database/schema.py +678 -0
- sqlsaber/memory/__init__.py +1 -0
- sqlsaber/memory/manager.py +77 -0
- sqlsaber/memory/storage.py +176 -0
- sqlsaber/models/__init__.py +13 -0
- sqlsaber/models/events.py +28 -0
- sqlsaber/models/types.py +40 -0
- sqlsaber-0.1.0.dist-info/METADATA +168 -0
- sqlsaber-0.1.0.dist-info/RECORD +32 -0
- sqlsaber-0.1.0.dist-info/WHEEL +4 -0
- sqlsaber-0.1.0.dist-info/entry_points.txt +4 -0
- sqlsaber-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,678 @@
|
|
|
1
|
+
"""Database schema introspection utilities."""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from typing import Any, Dict, Optional, Tuple
|
|
6
|
+
|
|
7
|
+
import aiosqlite
|
|
8
|
+
|
|
9
|
+
from sqlsaber.database.connection import (
|
|
10
|
+
BaseDatabaseConnection,
|
|
11
|
+
MySQLConnection,
|
|
12
|
+
PostgreSQLConnection,
|
|
13
|
+
SQLiteConnection,
|
|
14
|
+
)
|
|
15
|
+
from sqlsaber.models.types import SchemaInfo
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class BaseSchemaIntrospector(ABC):
|
|
19
|
+
"""Abstract base class for database-specific schema introspection."""
|
|
20
|
+
|
|
21
|
+
@abstractmethod
|
|
22
|
+
async def get_tables_info(
|
|
23
|
+
self, connection, table_pattern: Optional[str] = None
|
|
24
|
+
) -> Dict[str, Any]:
|
|
25
|
+
"""Get tables information for the specific database type."""
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
async def get_columns_info(self, connection, tables: list) -> list:
|
|
30
|
+
"""Get columns information for the specific database type."""
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
35
|
+
"""Get foreign keys information for the specific database type."""
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
@abstractmethod
|
|
39
|
+
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
40
|
+
"""Get primary keys information for the specific database type."""
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
@abstractmethod
|
|
44
|
+
async def list_tables_info(self, connection) -> Dict[str, Any]:
|
|
45
|
+
"""Get list of tables with basic information."""
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
50
|
+
"""PostgreSQL-specific schema introspection."""
|
|
51
|
+
|
|
52
|
+
async def get_tables_info(
|
|
53
|
+
self, connection, table_pattern: Optional[str] = None
|
|
54
|
+
) -> Dict[str, Any]:
|
|
55
|
+
"""Get tables information for PostgreSQL."""
|
|
56
|
+
pool = await connection.get_pool()
|
|
57
|
+
async with pool.acquire() as conn:
|
|
58
|
+
# Build WHERE clause for filtering
|
|
59
|
+
where_conditions = [
|
|
60
|
+
"table_schema NOT IN ('pg_catalog', 'information_schema')"
|
|
61
|
+
]
|
|
62
|
+
params = []
|
|
63
|
+
|
|
64
|
+
if table_pattern:
|
|
65
|
+
# Support patterns like 'schema.table' or just 'table'
|
|
66
|
+
if "." in table_pattern:
|
|
67
|
+
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
68
|
+
where_conditions.append(
|
|
69
|
+
"(table_schema LIKE $1 AND table_name LIKE $2)"
|
|
70
|
+
)
|
|
71
|
+
params.extend([schema_pattern, table_name_pattern])
|
|
72
|
+
else:
|
|
73
|
+
where_conditions.append(
|
|
74
|
+
"(table_name LIKE $1 OR table_schema || '.' || table_name LIKE $1)"
|
|
75
|
+
)
|
|
76
|
+
params.append(table_pattern)
|
|
77
|
+
|
|
78
|
+
# Get tables
|
|
79
|
+
tables_query = f"""
|
|
80
|
+
SELECT
|
|
81
|
+
table_schema,
|
|
82
|
+
table_name,
|
|
83
|
+
table_type
|
|
84
|
+
FROM information_schema.tables
|
|
85
|
+
WHERE {" AND ".join(where_conditions)}
|
|
86
|
+
ORDER BY table_schema, table_name;
|
|
87
|
+
"""
|
|
88
|
+
return await conn.fetch(tables_query, *params)
|
|
89
|
+
|
|
90
|
+
async def get_columns_info(self, connection, tables: list) -> list:
|
|
91
|
+
"""Get columns information for PostgreSQL."""
|
|
92
|
+
if not tables:
|
|
93
|
+
return []
|
|
94
|
+
|
|
95
|
+
pool = await connection.get_pool()
|
|
96
|
+
async with pool.acquire() as conn:
|
|
97
|
+
# Build IN clause for the tables we found
|
|
98
|
+
table_filters = []
|
|
99
|
+
for table in tables:
|
|
100
|
+
table_filters.append(
|
|
101
|
+
f"(table_schema = '{table['table_schema']}' AND table_name = '{table['table_name']}')"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
columns_query = f"""
|
|
105
|
+
SELECT
|
|
106
|
+
table_schema,
|
|
107
|
+
table_name,
|
|
108
|
+
column_name,
|
|
109
|
+
data_type,
|
|
110
|
+
is_nullable,
|
|
111
|
+
column_default,
|
|
112
|
+
character_maximum_length,
|
|
113
|
+
numeric_precision,
|
|
114
|
+
numeric_scale
|
|
115
|
+
FROM information_schema.columns
|
|
116
|
+
WHERE ({" OR ".join(table_filters)})
|
|
117
|
+
ORDER BY table_schema, table_name, ordinal_position;
|
|
118
|
+
"""
|
|
119
|
+
return await conn.fetch(columns_query)
|
|
120
|
+
|
|
121
|
+
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
122
|
+
"""Get foreign keys information for PostgreSQL."""
|
|
123
|
+
if not tables:
|
|
124
|
+
return []
|
|
125
|
+
|
|
126
|
+
pool = await connection.get_pool()
|
|
127
|
+
async with pool.acquire() as conn:
|
|
128
|
+
# Build proper table filters with tc. prefix
|
|
129
|
+
fk_table_filters = []
|
|
130
|
+
for table in tables:
|
|
131
|
+
fk_table_filters.append(
|
|
132
|
+
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
fk_query = f"""
|
|
136
|
+
SELECT
|
|
137
|
+
tc.table_schema,
|
|
138
|
+
tc.table_name,
|
|
139
|
+
kcu.column_name,
|
|
140
|
+
ccu.table_schema AS foreign_table_schema,
|
|
141
|
+
ccu.table_name AS foreign_table_name,
|
|
142
|
+
ccu.column_name AS foreign_column_name
|
|
143
|
+
FROM information_schema.table_constraints AS tc
|
|
144
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
145
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
146
|
+
AND tc.table_schema = kcu.table_schema
|
|
147
|
+
JOIN information_schema.constraint_column_usage AS ccu
|
|
148
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
149
|
+
AND ccu.table_schema = tc.table_schema
|
|
150
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
151
|
+
AND ({" OR ".join(fk_table_filters)});
|
|
152
|
+
"""
|
|
153
|
+
return await conn.fetch(fk_query)
|
|
154
|
+
|
|
155
|
+
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
156
|
+
"""Get primary keys information for PostgreSQL."""
|
|
157
|
+
if not tables:
|
|
158
|
+
return []
|
|
159
|
+
|
|
160
|
+
pool = await connection.get_pool()
|
|
161
|
+
async with pool.acquire() as conn:
|
|
162
|
+
# Build proper table filters with tc. prefix
|
|
163
|
+
pk_table_filters = []
|
|
164
|
+
for table in tables:
|
|
165
|
+
pk_table_filters.append(
|
|
166
|
+
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
pk_query = f"""
|
|
170
|
+
SELECT
|
|
171
|
+
tc.table_schema,
|
|
172
|
+
tc.table_name,
|
|
173
|
+
kcu.column_name
|
|
174
|
+
FROM information_schema.table_constraints AS tc
|
|
175
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
176
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
177
|
+
AND tc.table_schema = kcu.table_schema
|
|
178
|
+
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
179
|
+
AND ({" OR ".join(pk_table_filters)})
|
|
180
|
+
ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
|
|
181
|
+
"""
|
|
182
|
+
return await conn.fetch(pk_query)
|
|
183
|
+
|
|
184
|
+
async def list_tables_info(self, connection) -> Dict[str, Any]:
|
|
185
|
+
"""Get list of tables with basic information for PostgreSQL."""
|
|
186
|
+
pool = await connection.get_pool()
|
|
187
|
+
async with pool.acquire() as conn:
|
|
188
|
+
# Get tables with row counts
|
|
189
|
+
tables_query = """
|
|
190
|
+
WITH table_stats AS (
|
|
191
|
+
SELECT
|
|
192
|
+
schemaname,
|
|
193
|
+
relname as tablename,
|
|
194
|
+
n_live_tup as approximate_row_count
|
|
195
|
+
FROM pg_stat_user_tables
|
|
196
|
+
)
|
|
197
|
+
SELECT
|
|
198
|
+
t.table_schema,
|
|
199
|
+
t.table_name,
|
|
200
|
+
t.table_type,
|
|
201
|
+
COALESCE(ts.approximate_row_count, 0) as row_count
|
|
202
|
+
FROM information_schema.tables t
|
|
203
|
+
LEFT JOIN table_stats ts
|
|
204
|
+
ON t.table_schema = ts.schemaname
|
|
205
|
+
AND t.table_name = ts.tablename
|
|
206
|
+
WHERE t.table_schema NOT IN ('pg_catalog', 'information_schema')
|
|
207
|
+
ORDER BY t.table_schema, t.table_name;
|
|
208
|
+
"""
|
|
209
|
+
return await conn.fetch(tables_query)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class MySQLSchemaIntrospector(BaseSchemaIntrospector):
|
|
213
|
+
"""MySQL-specific schema introspection."""
|
|
214
|
+
|
|
215
|
+
async def get_tables_info(
|
|
216
|
+
self, connection, table_pattern: Optional[str] = None
|
|
217
|
+
) -> Dict[str, Any]:
|
|
218
|
+
"""Get tables information for MySQL."""
|
|
219
|
+
pool = await connection.get_pool()
|
|
220
|
+
async with pool.acquire() as conn:
|
|
221
|
+
async with conn.cursor() as cursor:
|
|
222
|
+
# Build WHERE clause for filtering
|
|
223
|
+
where_conditions = [
|
|
224
|
+
"table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')"
|
|
225
|
+
]
|
|
226
|
+
params = []
|
|
227
|
+
|
|
228
|
+
if table_pattern:
|
|
229
|
+
# Support patterns like 'schema.table' or just 'table'
|
|
230
|
+
if "." in table_pattern:
|
|
231
|
+
schema_pattern, table_name_pattern = table_pattern.split(".", 1)
|
|
232
|
+
where_conditions.append(
|
|
233
|
+
"(table_schema LIKE %s AND table_name LIKE %s)"
|
|
234
|
+
)
|
|
235
|
+
params.extend([schema_pattern, table_name_pattern])
|
|
236
|
+
else:
|
|
237
|
+
where_conditions.append(
|
|
238
|
+
"(table_name LIKE %s OR CONCAT(table_schema, '.', table_name) LIKE %s)"
|
|
239
|
+
)
|
|
240
|
+
params.extend([table_pattern, table_pattern])
|
|
241
|
+
|
|
242
|
+
# Get tables
|
|
243
|
+
tables_query = f"""
|
|
244
|
+
SELECT
|
|
245
|
+
table_schema,
|
|
246
|
+
table_name,
|
|
247
|
+
table_type
|
|
248
|
+
FROM information_schema.tables
|
|
249
|
+
WHERE {" AND ".join(where_conditions)}
|
|
250
|
+
ORDER BY table_schema, table_name;
|
|
251
|
+
"""
|
|
252
|
+
await cursor.execute(tables_query, params)
|
|
253
|
+
return await cursor.fetchall()
|
|
254
|
+
|
|
255
|
+
async def get_columns_info(self, connection, tables: list) -> list:
|
|
256
|
+
"""Get columns information for MySQL."""
|
|
257
|
+
if not tables:
|
|
258
|
+
return []
|
|
259
|
+
|
|
260
|
+
pool = await connection.get_pool()
|
|
261
|
+
async with pool.acquire() as conn:
|
|
262
|
+
async with conn.cursor() as cursor:
|
|
263
|
+
# Build IN clause for the tables we found
|
|
264
|
+
table_filters = []
|
|
265
|
+
for table in tables:
|
|
266
|
+
table_filters.append(
|
|
267
|
+
f"(table_schema = '{table['table_schema']}' AND table_name = '{table['table_name']}')"
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
columns_query = f"""
|
|
271
|
+
SELECT
|
|
272
|
+
table_schema,
|
|
273
|
+
table_name,
|
|
274
|
+
column_name,
|
|
275
|
+
data_type,
|
|
276
|
+
is_nullable,
|
|
277
|
+
column_default,
|
|
278
|
+
character_maximum_length,
|
|
279
|
+
numeric_precision,
|
|
280
|
+
numeric_scale
|
|
281
|
+
FROM information_schema.columns
|
|
282
|
+
WHERE ({" OR ".join(table_filters)})
|
|
283
|
+
ORDER BY table_schema, table_name, ordinal_position;
|
|
284
|
+
"""
|
|
285
|
+
await cursor.execute(columns_query)
|
|
286
|
+
return await cursor.fetchall()
|
|
287
|
+
|
|
288
|
+
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
289
|
+
"""Get foreign keys information for MySQL."""
|
|
290
|
+
if not tables:
|
|
291
|
+
return []
|
|
292
|
+
|
|
293
|
+
pool = await connection.get_pool()
|
|
294
|
+
async with pool.acquire() as conn:
|
|
295
|
+
async with conn.cursor() as cursor:
|
|
296
|
+
# Build proper table filters
|
|
297
|
+
fk_table_filters = []
|
|
298
|
+
for table in tables:
|
|
299
|
+
fk_table_filters.append(
|
|
300
|
+
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
fk_query = f"""
|
|
304
|
+
SELECT
|
|
305
|
+
tc.table_schema,
|
|
306
|
+
tc.table_name,
|
|
307
|
+
kcu.column_name,
|
|
308
|
+
rc.unique_constraint_schema AS foreign_table_schema,
|
|
309
|
+
rc.referenced_table_name AS foreign_table_name,
|
|
310
|
+
kcu.referenced_column_name AS foreign_column_name
|
|
311
|
+
FROM information_schema.table_constraints AS tc
|
|
312
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
313
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
314
|
+
AND tc.table_schema = kcu.table_schema
|
|
315
|
+
JOIN information_schema.referential_constraints AS rc
|
|
316
|
+
ON tc.constraint_name = rc.constraint_name
|
|
317
|
+
AND tc.table_schema = rc.constraint_schema
|
|
318
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
319
|
+
AND ({" OR ".join(fk_table_filters)});
|
|
320
|
+
"""
|
|
321
|
+
await cursor.execute(fk_query)
|
|
322
|
+
return await cursor.fetchall()
|
|
323
|
+
|
|
324
|
+
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
325
|
+
"""Get primary keys information for MySQL."""
|
|
326
|
+
if not tables:
|
|
327
|
+
return []
|
|
328
|
+
|
|
329
|
+
pool = await connection.get_pool()
|
|
330
|
+
async with pool.acquire() as conn:
|
|
331
|
+
async with conn.cursor() as cursor:
|
|
332
|
+
# Build proper table filters
|
|
333
|
+
pk_table_filters = []
|
|
334
|
+
for table in tables:
|
|
335
|
+
pk_table_filters.append(
|
|
336
|
+
f"(tc.table_schema = '{table['table_schema']}' AND tc.table_name = '{table['table_name']}')"
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
pk_query = f"""
|
|
340
|
+
SELECT
|
|
341
|
+
tc.table_schema,
|
|
342
|
+
tc.table_name,
|
|
343
|
+
kcu.column_name
|
|
344
|
+
FROM information_schema.table_constraints AS tc
|
|
345
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
346
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
347
|
+
AND tc.table_schema = kcu.table_schema
|
|
348
|
+
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
349
|
+
AND ({" OR ".join(pk_table_filters)})
|
|
350
|
+
ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
|
|
351
|
+
"""
|
|
352
|
+
await cursor.execute(pk_query)
|
|
353
|
+
return await cursor.fetchall()
|
|
354
|
+
|
|
355
|
+
async def list_tables_info(self, connection) -> Dict[str, Any]:
|
|
356
|
+
"""Get list of tables with basic information for MySQL."""
|
|
357
|
+
pool = await connection.get_pool()
|
|
358
|
+
async with pool.acquire() as conn:
|
|
359
|
+
async with conn.cursor() as cursor:
|
|
360
|
+
# Get tables with row counts
|
|
361
|
+
tables_query = """
|
|
362
|
+
SELECT
|
|
363
|
+
t.table_schema,
|
|
364
|
+
t.table_name,
|
|
365
|
+
t.table_type,
|
|
366
|
+
COALESCE(t.table_rows, 0) as row_count
|
|
367
|
+
FROM information_schema.tables t
|
|
368
|
+
WHERE t.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')
|
|
369
|
+
ORDER BY t.table_schema, t.table_name;
|
|
370
|
+
"""
|
|
371
|
+
await cursor.execute(tables_query)
|
|
372
|
+
return await cursor.fetchall()
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
class SQLiteSchemaIntrospector(BaseSchemaIntrospector):
|
|
376
|
+
"""SQLite-specific schema introspection."""
|
|
377
|
+
|
|
378
|
+
async def get_tables_info(
|
|
379
|
+
self, connection, table_pattern: Optional[str] = None
|
|
380
|
+
) -> Dict[str, Any]:
|
|
381
|
+
"""Get tables information for SQLite."""
|
|
382
|
+
where_clause = ""
|
|
383
|
+
params = ()
|
|
384
|
+
|
|
385
|
+
if table_pattern:
|
|
386
|
+
where_clause = "WHERE name LIKE ?"
|
|
387
|
+
params = (table_pattern,)
|
|
388
|
+
|
|
389
|
+
query = f"""
|
|
390
|
+
SELECT
|
|
391
|
+
'main' as table_schema,
|
|
392
|
+
name as table_name,
|
|
393
|
+
type as table_type
|
|
394
|
+
FROM sqlite_master
|
|
395
|
+
WHERE type IN ('table', 'view')
|
|
396
|
+
AND name NOT LIKE 'sqlite_%'
|
|
397
|
+
{where_clause}
|
|
398
|
+
ORDER BY name;
|
|
399
|
+
"""
|
|
400
|
+
|
|
401
|
+
async with aiosqlite.connect(connection.database_path) as conn:
|
|
402
|
+
conn.row_factory = aiosqlite.Row
|
|
403
|
+
cursor = await conn.execute(query, params)
|
|
404
|
+
return await cursor.fetchall()
|
|
405
|
+
|
|
406
|
+
async def get_columns_info(self, connection, tables: list) -> list:
|
|
407
|
+
"""Get columns information for SQLite."""
|
|
408
|
+
if not tables:
|
|
409
|
+
return []
|
|
410
|
+
|
|
411
|
+
columns = []
|
|
412
|
+
for table in tables:
|
|
413
|
+
table_name = table["table_name"]
|
|
414
|
+
|
|
415
|
+
# Get table info using PRAGMA
|
|
416
|
+
pragma_query = f"PRAGMA table_info({table_name})"
|
|
417
|
+
|
|
418
|
+
async with aiosqlite.connect(connection.database_path) as conn:
|
|
419
|
+
conn.row_factory = aiosqlite.Row
|
|
420
|
+
cursor = await conn.execute(pragma_query)
|
|
421
|
+
table_columns = await cursor.fetchall()
|
|
422
|
+
|
|
423
|
+
for col in table_columns:
|
|
424
|
+
columns.append(
|
|
425
|
+
{
|
|
426
|
+
"table_schema": "main",
|
|
427
|
+
"table_name": table_name,
|
|
428
|
+
"column_name": col["name"],
|
|
429
|
+
"data_type": col["type"],
|
|
430
|
+
"is_nullable": "YES" if not col["notnull"] else "NO",
|
|
431
|
+
"column_default": col["dflt_value"],
|
|
432
|
+
"character_maximum_length": None,
|
|
433
|
+
"numeric_precision": None,
|
|
434
|
+
"numeric_scale": None,
|
|
435
|
+
}
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
return columns
|
|
439
|
+
|
|
440
|
+
async def get_foreign_keys_info(self, connection, tables: list) -> list:
|
|
441
|
+
"""Get foreign keys information for SQLite."""
|
|
442
|
+
if not tables:
|
|
443
|
+
return []
|
|
444
|
+
|
|
445
|
+
foreign_keys = []
|
|
446
|
+
for table in tables:
|
|
447
|
+
table_name = table["table_name"]
|
|
448
|
+
|
|
449
|
+
# Get foreign key info using PRAGMA
|
|
450
|
+
pragma_query = f"PRAGMA foreign_key_list({table_name})"
|
|
451
|
+
|
|
452
|
+
async with aiosqlite.connect(connection.database_path) as conn:
|
|
453
|
+
conn.row_factory = aiosqlite.Row
|
|
454
|
+
cursor = await conn.execute(pragma_query)
|
|
455
|
+
table_fks = await cursor.fetchall()
|
|
456
|
+
|
|
457
|
+
for fk in table_fks:
|
|
458
|
+
foreign_keys.append(
|
|
459
|
+
{
|
|
460
|
+
"table_schema": "main",
|
|
461
|
+
"table_name": table_name,
|
|
462
|
+
"column_name": fk["from"],
|
|
463
|
+
"foreign_table_schema": "main",
|
|
464
|
+
"foreign_table_name": fk["table"],
|
|
465
|
+
"foreign_column_name": fk["to"],
|
|
466
|
+
}
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
return foreign_keys
|
|
470
|
+
|
|
471
|
+
async def get_primary_keys_info(self, connection, tables: list) -> list:
|
|
472
|
+
"""Get primary keys information for SQLite."""
|
|
473
|
+
if not tables:
|
|
474
|
+
return []
|
|
475
|
+
|
|
476
|
+
primary_keys = []
|
|
477
|
+
for table in tables:
|
|
478
|
+
table_name = table["table_name"]
|
|
479
|
+
|
|
480
|
+
# Get table info using PRAGMA to find primary keys
|
|
481
|
+
pragma_query = f"PRAGMA table_info({table_name})"
|
|
482
|
+
|
|
483
|
+
async with aiosqlite.connect(connection.database_path) as conn:
|
|
484
|
+
conn.row_factory = aiosqlite.Row
|
|
485
|
+
cursor = await conn.execute(pragma_query)
|
|
486
|
+
table_columns = await cursor.fetchall()
|
|
487
|
+
|
|
488
|
+
for col in table_columns:
|
|
489
|
+
if col["pk"]: # Primary key indicator
|
|
490
|
+
primary_keys.append(
|
|
491
|
+
{
|
|
492
|
+
"table_schema": "main",
|
|
493
|
+
"table_name": table_name,
|
|
494
|
+
"column_name": col["name"],
|
|
495
|
+
}
|
|
496
|
+
)
|
|
497
|
+
|
|
498
|
+
return primary_keys
|
|
499
|
+
|
|
500
|
+
async def list_tables_info(self, connection) -> Dict[str, Any]:
|
|
501
|
+
"""Get list of tables with basic information for SQLite."""
|
|
502
|
+
# Get tables (SQLite doesn't have easy row count access)
|
|
503
|
+
tables_query = """
|
|
504
|
+
SELECT
|
|
505
|
+
'main' as table_schema,
|
|
506
|
+
name as table_name,
|
|
507
|
+
type as table_type,
|
|
508
|
+
0 as row_count
|
|
509
|
+
FROM sqlite_master
|
|
510
|
+
WHERE type IN ('table', 'view')
|
|
511
|
+
AND name NOT LIKE 'sqlite_%'
|
|
512
|
+
ORDER BY name;
|
|
513
|
+
"""
|
|
514
|
+
|
|
515
|
+
async with aiosqlite.connect(connection.database_path) as conn:
|
|
516
|
+
conn.row_factory = aiosqlite.Row
|
|
517
|
+
cursor = await conn.execute(tables_query)
|
|
518
|
+
return await cursor.fetchall()
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
class SchemaManager:
|
|
522
|
+
"""Manages database schema introspection with caching."""
|
|
523
|
+
|
|
524
|
+
def __init__(self, db_connection: BaseDatabaseConnection, cache_ttl: int = 900):
|
|
525
|
+
self.db = db_connection
|
|
526
|
+
self.cache_ttl = cache_ttl # Default 15 minutes
|
|
527
|
+
self._schema_cache: Dict[str, Tuple[float, Dict[str, Any]]] = {}
|
|
528
|
+
|
|
529
|
+
# Select appropriate introspector based on connection type
|
|
530
|
+
if isinstance(db_connection, PostgreSQLConnection):
|
|
531
|
+
self.introspector = PostgreSQLSchemaIntrospector()
|
|
532
|
+
elif isinstance(db_connection, MySQLConnection):
|
|
533
|
+
self.introspector = MySQLSchemaIntrospector()
|
|
534
|
+
elif isinstance(db_connection, SQLiteConnection):
|
|
535
|
+
self.introspector = SQLiteSchemaIntrospector()
|
|
536
|
+
else:
|
|
537
|
+
raise ValueError(
|
|
538
|
+
f"Unsupported database connection type: {type(db_connection)}"
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
def clear_schema_cache(self):
|
|
542
|
+
"""Clear the schema cache."""
|
|
543
|
+
self._schema_cache.clear()
|
|
544
|
+
|
|
545
|
+
async def get_schema_info(
|
|
546
|
+
self, table_pattern: Optional[str] = None
|
|
547
|
+
) -> Dict[str, SchemaInfo]:
|
|
548
|
+
"""Get database schema information, optionally filtered by table pattern.
|
|
549
|
+
|
|
550
|
+
Args:
|
|
551
|
+
table_pattern: Optional SQL LIKE pattern to filter tables (e.g., 'public.user%')
|
|
552
|
+
"""
|
|
553
|
+
# Check cache first
|
|
554
|
+
cache_key = f"schema:{table_pattern or 'all'}"
|
|
555
|
+
cached_data = self._get_cached_schema(cache_key)
|
|
556
|
+
if cached_data is not None:
|
|
557
|
+
return cached_data
|
|
558
|
+
|
|
559
|
+
# Fetch from database if not cached
|
|
560
|
+
schema_info = await self._fetch_schema_from_db(table_pattern)
|
|
561
|
+
|
|
562
|
+
# Cache the result
|
|
563
|
+
self._schema_cache[cache_key] = (time.time(), schema_info)
|
|
564
|
+
return schema_info
|
|
565
|
+
|
|
566
|
+
def _get_cached_schema(self, cache_key: str) -> Optional[Dict[str, SchemaInfo]]:
|
|
567
|
+
"""Get schema from cache if available and not expired."""
|
|
568
|
+
if cache_key in self._schema_cache:
|
|
569
|
+
cached_time, cached_data = self._schema_cache[cache_key]
|
|
570
|
+
if time.time() - cached_time < self.cache_ttl:
|
|
571
|
+
return cached_data
|
|
572
|
+
return None
|
|
573
|
+
|
|
574
|
+
async def _fetch_schema_from_db(
|
|
575
|
+
self, table_pattern: Optional[str]
|
|
576
|
+
) -> Dict[str, SchemaInfo]:
|
|
577
|
+
"""Fetch schema information from database."""
|
|
578
|
+
# Get all schema components
|
|
579
|
+
tables = await self.introspector.get_tables_info(self.db, table_pattern)
|
|
580
|
+
columns = await self.introspector.get_columns_info(self.db, tables)
|
|
581
|
+
foreign_keys = await self.introspector.get_foreign_keys_info(self.db, tables)
|
|
582
|
+
primary_keys = await self.introspector.get_primary_keys_info(self.db, tables)
|
|
583
|
+
|
|
584
|
+
# Build schema structure
|
|
585
|
+
schema_info = self._build_table_structure(tables)
|
|
586
|
+
self._add_columns_to_schema(schema_info, columns)
|
|
587
|
+
self._add_primary_keys_to_schema(schema_info, primary_keys)
|
|
588
|
+
self._add_foreign_keys_to_schema(schema_info, foreign_keys)
|
|
589
|
+
|
|
590
|
+
return schema_info
|
|
591
|
+
|
|
592
|
+
def _build_table_structure(self, tables: list) -> Dict[str, Dict]:
|
|
593
|
+
"""Build basic table structure from table info."""
|
|
594
|
+
schema_info = {}
|
|
595
|
+
for table in tables:
|
|
596
|
+
schema_name = table["table_schema"]
|
|
597
|
+
table_name = table["table_name"]
|
|
598
|
+
full_name = f"{schema_name}.{table_name}"
|
|
599
|
+
|
|
600
|
+
schema_info[full_name] = {
|
|
601
|
+
"schema": schema_name,
|
|
602
|
+
"name": table_name,
|
|
603
|
+
"type": table["table_type"],
|
|
604
|
+
"columns": {},
|
|
605
|
+
"primary_keys": [],
|
|
606
|
+
"foreign_keys": [],
|
|
607
|
+
}
|
|
608
|
+
return schema_info
|
|
609
|
+
|
|
610
|
+
def _add_columns_to_schema(
|
|
611
|
+
self, schema_info: Dict[str, Dict], columns: list
|
|
612
|
+
) -> None:
|
|
613
|
+
"""Add column information to schema."""
|
|
614
|
+
for col in columns:
|
|
615
|
+
full_name = f"{col['table_schema']}.{col['table_name']}"
|
|
616
|
+
if full_name in schema_info:
|
|
617
|
+
col_info = {
|
|
618
|
+
"data_type": col["data_type"],
|
|
619
|
+
"nullable": col["is_nullable"] == "YES",
|
|
620
|
+
"default": col["column_default"],
|
|
621
|
+
}
|
|
622
|
+
|
|
623
|
+
# Add optional attributes
|
|
624
|
+
for attr_map in [
|
|
625
|
+
("character_maximum_length", "max_length"),
|
|
626
|
+
("numeric_precision", "precision"),
|
|
627
|
+
("numeric_scale", "scale"),
|
|
628
|
+
]:
|
|
629
|
+
if col.get(attr_map[0]):
|
|
630
|
+
col_info[attr_map[1]] = col[attr_map[0]]
|
|
631
|
+
|
|
632
|
+
schema_info[full_name]["columns"][col["column_name"]] = col_info
|
|
633
|
+
|
|
634
|
+
def _add_primary_keys_to_schema(
|
|
635
|
+
self, schema_info: Dict[str, Dict], primary_keys: list
|
|
636
|
+
) -> None:
|
|
637
|
+
"""Add primary key information to schema."""
|
|
638
|
+
for pk in primary_keys:
|
|
639
|
+
full_name = f"{pk['table_schema']}.{pk['table_name']}"
|
|
640
|
+
if full_name in schema_info:
|
|
641
|
+
schema_info[full_name]["primary_keys"].append(pk["column_name"])
|
|
642
|
+
|
|
643
|
+
def _add_foreign_keys_to_schema(
|
|
644
|
+
self, schema_info: Dict[str, Dict], foreign_keys: list
|
|
645
|
+
) -> None:
|
|
646
|
+
"""Add foreign key information to schema."""
|
|
647
|
+
for fk in foreign_keys:
|
|
648
|
+
full_name = f"{fk['table_schema']}.{fk['table_name']}"
|
|
649
|
+
if full_name in schema_info:
|
|
650
|
+
schema_info[full_name]["foreign_keys"].append(
|
|
651
|
+
{
|
|
652
|
+
"column": fk["column_name"],
|
|
653
|
+
"references": {
|
|
654
|
+
"table": f"{fk['foreign_table_schema']}.{fk['foreign_table_name']}",
|
|
655
|
+
"column": fk["foreign_column_name"],
|
|
656
|
+
},
|
|
657
|
+
}
|
|
658
|
+
)
|
|
659
|
+
|
|
660
|
+
async def list_tables(self) -> Dict[str, Any]:
|
|
661
|
+
"""Get a list of all tables with basic information like row counts."""
|
|
662
|
+
tables = await self.introspector.list_tables_info(self.db)
|
|
663
|
+
|
|
664
|
+
# Format the result
|
|
665
|
+
result = {"tables": [], "total_tables": len(tables)}
|
|
666
|
+
|
|
667
|
+
for table in tables:
|
|
668
|
+
result["tables"].append(
|
|
669
|
+
{
|
|
670
|
+
"schema": table["table_schema"],
|
|
671
|
+
"name": table["table_name"],
|
|
672
|
+
"full_name": f"{table['table_schema']}.{table['table_name']}",
|
|
673
|
+
"type": table["table_type"],
|
|
674
|
+
"row_count": table["row_count"],
|
|
675
|
+
}
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
return result
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Memory management for database-specific context storage."""
|