sqlsaber 0.24.0__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlsaber might be problematic. Click here for more details.

@@ -0,0 +1,345 @@
1
+ """MySQL database connection and schema introspection."""
2
+
3
+ import asyncio
4
+ import ssl
5
+ from typing import Any
6
+ from urllib.parse import parse_qs, urlparse
7
+
8
+ import aiomysql
9
+
10
+ from .base import (
11
+ DEFAULT_QUERY_TIMEOUT,
12
+ BaseDatabaseConnection,
13
+ BaseSchemaIntrospector,
14
+ QueryTimeoutError,
15
+ )
16
+
17
+
18
+ class MySQLConnection(BaseDatabaseConnection):
19
+ """MySQL database connection using aiomysql."""
20
+
21
+ def __init__(self, connection_string: str):
22
+ super().__init__(connection_string)
23
+ self._pool: aiomysql.Pool | None = None
24
+ self._parse_connection_string()
25
+
26
+ def _parse_connection_string(self):
27
+ """Parse MySQL connection string into components."""
28
+ parsed = urlparse(self.connection_string)
29
+ self.host = parsed.hostname or "localhost"
30
+ self.port = parsed.port or 3306
31
+ self.database = parsed.path.lstrip("/") if parsed.path else ""
32
+ self.user = parsed.username or ""
33
+ self.password = parsed.password or ""
34
+
35
+ # Parse SSL parameters
36
+ self.ssl_params = {}
37
+ if parsed.query:
38
+ params = parse_qs(parsed.query)
39
+
40
+ ssl_mode = params.get("ssl_mode", [None])[0]
41
+ if ssl_mode:
42
+ # Map SSL modes to aiomysql SSL parameters
43
+ if ssl_mode.upper() == "DISABLED":
44
+ self.ssl_params["ssl"] = None
45
+ elif ssl_mode.upper() in [
46
+ "PREFERRED",
47
+ "REQUIRED",
48
+ "VERIFY_CA",
49
+ "VERIFY_IDENTITY",
50
+ ]:
51
+ ssl_context = ssl.create_default_context()
52
+
53
+ if ssl_mode.upper() == "REQUIRED":
54
+ ssl_context.check_hostname = False
55
+ ssl_context.verify_mode = ssl.CERT_NONE
56
+ elif ssl_mode.upper() == "VERIFY_CA":
57
+ ssl_context.check_hostname = False
58
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
59
+ elif ssl_mode.upper() == "VERIFY_IDENTITY":
60
+ ssl_context.check_hostname = True
61
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
62
+
63
+ # Load certificates if provided
64
+ ssl_ca = params.get("ssl_ca", [None])[0]
65
+ ssl_cert = params.get("ssl_cert", [None])[0]
66
+ ssl_key = params.get("ssl_key", [None])[0]
67
+
68
+ if ssl_ca:
69
+ ssl_context.load_verify_locations(ssl_ca)
70
+
71
+ if ssl_cert and ssl_key:
72
+ ssl_context.load_cert_chain(ssl_cert, ssl_key)
73
+
74
+ self.ssl_params["ssl"] = ssl_context
75
+
76
+ async def get_pool(self) -> aiomysql.Pool:
77
+ """Get or create connection pool."""
78
+ if self._pool is None:
79
+ pool_kwargs = {
80
+ "host": self.host,
81
+ "port": self.port,
82
+ "user": self.user,
83
+ "password": self.password,
84
+ "db": self.database,
85
+ "minsize": 1,
86
+ "maxsize": 10,
87
+ "autocommit": False,
88
+ }
89
+
90
+ # Add SSL parameters if configured
91
+ pool_kwargs.update(self.ssl_params)
92
+
93
+ self._pool = await aiomysql.create_pool(**pool_kwargs)
94
+ return self._pool
95
+
96
+ async def close(self):
97
+ """Close the connection pool."""
98
+ if self._pool:
99
+ self._pool.close()
100
+ await self._pool.wait_closed()
101
+ self._pool = None
102
+
103
+ async def execute_query(
104
+ self, query: str, *args, timeout: float | None = None
105
+ ) -> list[dict[str, Any]]:
106
+ """Execute a query and return results as list of dicts.
107
+
108
+ All queries run in a transaction that is rolled back at the end,
109
+ ensuring no changes are persisted to the database.
110
+ """
111
+ effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
112
+ pool = await self.get_pool()
113
+
114
+ async with pool.acquire() as conn:
115
+ async with conn.cursor(aiomysql.DictCursor) as cursor:
116
+ # Start transaction
117
+ await conn.begin()
118
+ try:
119
+ # Set server-side timeout if specified
120
+ if effective_timeout:
121
+ # Clamp timeout to sane range (10ms to 5 minutes) and validate
122
+ timeout_ms = max(10, min(int(effective_timeout * 1000), 300000))
123
+ await cursor.execute(
124
+ f"SET SESSION MAX_EXECUTION_TIME = {timeout_ms}"
125
+ )
126
+
127
+ # Execute query with client-side timeout
128
+ if effective_timeout:
129
+ await asyncio.wait_for(
130
+ cursor.execute(query, args if args else None),
131
+ timeout=effective_timeout,
132
+ )
133
+ rows = await asyncio.wait_for(
134
+ cursor.fetchall(), timeout=effective_timeout
135
+ )
136
+ else:
137
+ await cursor.execute(query, args if args else None)
138
+ rows = await cursor.fetchall()
139
+
140
+ return [dict(row) for row in rows]
141
+ except asyncio.TimeoutError as exc:
142
+ raise QueryTimeoutError(effective_timeout or 0) from exc
143
+ finally:
144
+ # Always rollback to ensure no changes are committed
145
+ await conn.rollback()
146
+
147
+
148
+ class MySQLSchemaIntrospector(BaseSchemaIntrospector):
149
+ """MySQL-specific schema introspection."""
150
+
151
+ def _build_table_filter_clause(self, tables: list) -> tuple[str, list]:
152
+ """Build row constructor with bind parameters for table filtering.
153
+
154
+ Args:
155
+ tables: List of table dictionaries with table_schema and table_name keys
156
+
157
+ Returns:
158
+ Tuple of (placeholders, params) for use in SQL queries
159
+ """
160
+ if not tables:
161
+ return "", []
162
+
163
+ table_pairs = [(table["table_schema"], table["table_name"]) for table in tables]
164
+ placeholders = ", ".join(["(%s, %s)"] * len(table_pairs))
165
+ params = [value for pair in table_pairs for value in pair]
166
+ return placeholders, params
167
+
168
+ async def get_tables_info(
169
+ self, connection, table_pattern: str | None = None
170
+ ) -> dict[str, Any]:
171
+ """Get tables information for MySQL."""
172
+ pool = await connection.get_pool()
173
+ async with pool.acquire() as conn:
174
+ async with conn.cursor() as cursor:
175
+ # Build WHERE clause for filtering
176
+ where_conditions = [
177
+ "table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')"
178
+ ]
179
+ params = []
180
+
181
+ if table_pattern:
182
+ # Support patterns like 'schema.table' or just 'table'
183
+ if "." in table_pattern:
184
+ schema_pattern, table_name_pattern = table_pattern.split(".", 1)
185
+ where_conditions.append(
186
+ "(table_schema LIKE %s AND table_name LIKE %s)"
187
+ )
188
+ params.extend([schema_pattern, table_name_pattern])
189
+ else:
190
+ where_conditions.append(
191
+ "(table_name LIKE %s OR CONCAT(table_schema, '.', table_name) LIKE %s)"
192
+ )
193
+ params.extend([table_pattern, table_pattern])
194
+
195
+ # Get tables
196
+ tables_query = f"""
197
+ SELECT
198
+ table_schema,
199
+ table_name,
200
+ table_type
201
+ FROM information_schema.tables
202
+ WHERE {" AND ".join(where_conditions)}
203
+ ORDER BY table_schema, table_name;
204
+ """
205
+ await cursor.execute(tables_query, params)
206
+ return await cursor.fetchall()
207
+
208
+ async def get_columns_info(self, connection, tables: list) -> list:
209
+ """Get columns information for MySQL."""
210
+ if not tables:
211
+ return []
212
+
213
+ pool = await connection.get_pool()
214
+ async with pool.acquire() as conn:
215
+ async with conn.cursor() as cursor:
216
+ placeholders, params = self._build_table_filter_clause(tables)
217
+
218
+ columns_query = f"""
219
+ SELECT
220
+ c.table_schema,
221
+ c.table_name,
222
+ c.column_name,
223
+ c.data_type,
224
+ c.is_nullable,
225
+ c.column_default,
226
+ c.character_maximum_length,
227
+ c.numeric_precision,
228
+ c.numeric_scale
229
+ FROM information_schema.columns c
230
+ WHERE (c.table_schema, c.table_name) IN ({placeholders})
231
+ ORDER BY c.table_schema, c.table_name, c.ordinal_position;
232
+ """
233
+ await cursor.execute(columns_query, params)
234
+ return await cursor.fetchall()
235
+
236
+ async def get_foreign_keys_info(self, connection, tables: list) -> list:
237
+ """Get foreign keys information for MySQL."""
238
+ if not tables:
239
+ return []
240
+
241
+ pool = await connection.get_pool()
242
+ async with pool.acquire() as conn:
243
+ async with conn.cursor() as cursor:
244
+ placeholders, params = self._build_table_filter_clause(tables)
245
+
246
+ fk_query = f"""
247
+ SELECT
248
+ tc.table_schema,
249
+ tc.table_name,
250
+ kcu.column_name,
251
+ rc.unique_constraint_schema AS foreign_table_schema,
252
+ rc.referenced_table_name AS foreign_table_name,
253
+ kcu.referenced_column_name AS foreign_column_name
254
+ FROM information_schema.table_constraints AS tc
255
+ JOIN information_schema.key_column_usage AS kcu
256
+ ON tc.constraint_name = kcu.constraint_name
257
+ AND tc.table_schema = kcu.table_schema
258
+ JOIN information_schema.referential_constraints AS rc
259
+ ON tc.constraint_name = rc.constraint_name
260
+ AND tc.table_schema = rc.constraint_schema
261
+ WHERE tc.constraint_type = 'FOREIGN KEY'
262
+ AND (tc.table_schema, tc.table_name) IN ({placeholders});
263
+ """
264
+ await cursor.execute(fk_query, params)
265
+ return await cursor.fetchall()
266
+
267
+ async def get_primary_keys_info(self, connection, tables: list) -> list:
268
+ """Get primary keys information for MySQL."""
269
+ if not tables:
270
+ return []
271
+
272
+ pool = await connection.get_pool()
273
+ async with pool.acquire() as conn:
274
+ async with conn.cursor() as cursor:
275
+ placeholders, params = self._build_table_filter_clause(tables)
276
+
277
+ pk_query = f"""
278
+ SELECT
279
+ tc.table_schema,
280
+ tc.table_name,
281
+ kcu.column_name
282
+ FROM information_schema.table_constraints AS tc
283
+ JOIN information_schema.key_column_usage AS kcu
284
+ ON tc.constraint_name = kcu.constraint_name
285
+ AND tc.table_schema = kcu.table_schema
286
+ WHERE tc.constraint_type = 'PRIMARY KEY'
287
+ AND (tc.table_schema, tc.table_name) IN ({placeholders})
288
+ ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
289
+ """
290
+ await cursor.execute(pk_query, params)
291
+ return await cursor.fetchall()
292
+
293
+ async def get_indexes_info(self, connection, tables: list) -> list:
294
+ """Get indexes information for MySQL."""
295
+ if not tables:
296
+ return []
297
+
298
+ pool = await connection.get_pool()
299
+ async with pool.acquire() as conn:
300
+ async with conn.cursor() as cursor:
301
+ placeholders, params = self._build_table_filter_clause(tables)
302
+
303
+ idx_query = f"""
304
+ SELECT
305
+ TABLE_SCHEMA AS table_schema,
306
+ TABLE_NAME AS table_name,
307
+ INDEX_NAME AS index_name,
308
+ (NON_UNIQUE = 0) AS is_unique,
309
+ INDEX_TYPE AS index_type,
310
+ GROUP_CONCAT(COLUMN_NAME ORDER BY SEQ_IN_INDEX) AS column_names
311
+ FROM INFORMATION_SCHEMA.STATISTICS
312
+ WHERE (TABLE_SCHEMA, TABLE_NAME) IN ({placeholders})
313
+ GROUP BY table_schema, table_name, index_name, is_unique, index_type
314
+ ORDER BY table_schema, table_name, index_name;
315
+ """
316
+ await cursor.execute(idx_query, params)
317
+ return await cursor.fetchall()
318
+
319
+ async def list_tables_info(self, connection) -> list[dict[str, Any]]:
320
+ """Get list of tables with basic information for MySQL."""
321
+ pool = await connection.get_pool()
322
+ async with pool.acquire() as conn:
323
+ async with conn.cursor() as cursor:
324
+ # Get tables without row counts for better performance
325
+ tables_query = """
326
+ SELECT
327
+ t.table_schema,
328
+ t.table_name,
329
+ t.table_type
330
+ FROM information_schema.tables t
331
+ WHERE t.table_schema NOT IN ('information_schema', 'performance_schema', 'mysql', 'sys')
332
+ ORDER BY t.table_schema, t.table_name;
333
+ """
334
+ await cursor.execute(tables_query)
335
+ rows = await cursor.fetchall()
336
+
337
+ # Convert rows to dictionaries
338
+ return [
339
+ {
340
+ "table_schema": row["table_schema"],
341
+ "table_name": row["table_name"],
342
+ "table_type": row["table_type"],
343
+ }
344
+ for row in rows
345
+ ]
@@ -0,0 +1,328 @@
1
+ """PostgreSQL database connection and schema introspection."""
2
+
3
+ import asyncio
4
+ import ssl
5
+ from typing import Any
6
+ from urllib.parse import parse_qs, urlparse
7
+
8
+ import asyncpg
9
+
10
+ from .base import (
11
+ DEFAULT_QUERY_TIMEOUT,
12
+ BaseDatabaseConnection,
13
+ BaseSchemaIntrospector,
14
+ QueryTimeoutError,
15
+ )
16
+
17
+
18
+ class PostgreSQLConnection(BaseDatabaseConnection):
19
+ """PostgreSQL database connection using asyncpg."""
20
+
21
+ def __init__(self, connection_string: str):
22
+ super().__init__(connection_string)
23
+ self._pool: asyncpg.Pool | None = None
24
+ self._ssl_context = self._create_ssl_context()
25
+
26
+ def _create_ssl_context(self) -> ssl.SSLContext | None:
27
+ """Create SSL context from connection string parameters."""
28
+ parsed = urlparse(self.connection_string)
29
+ if not parsed.query:
30
+ return None
31
+
32
+ params = parse_qs(parsed.query)
33
+ ssl_mode = params.get("sslmode", [None])[0]
34
+
35
+ if not ssl_mode or ssl_mode == "disable":
36
+ return None
37
+
38
+ # Create SSL context based on mode
39
+ if ssl_mode in ["require", "verify-ca", "verify-full"]:
40
+ ssl_context = ssl.create_default_context()
41
+
42
+ # Configure certificate verification
43
+ if ssl_mode == "require":
44
+ ssl_context.check_hostname = False
45
+ ssl_context.verify_mode = ssl.CERT_NONE
46
+ elif ssl_mode == "verify-ca":
47
+ ssl_context.check_hostname = False
48
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
49
+ elif ssl_mode == "verify-full":
50
+ ssl_context.check_hostname = True
51
+ ssl_context.verify_mode = ssl.CERT_REQUIRED
52
+
53
+ # Load certificates if provided
54
+ ssl_ca = params.get("sslrootcert", [None])[0]
55
+ ssl_cert = params.get("sslcert", [None])[0]
56
+ ssl_key = params.get("sslkey", [None])[0]
57
+
58
+ if ssl_ca:
59
+ ssl_context.load_verify_locations(ssl_ca)
60
+
61
+ if ssl_cert and ssl_key:
62
+ ssl_context.load_cert_chain(ssl_cert, ssl_key)
63
+
64
+ return ssl_context
65
+
66
+ return None
67
+
68
+ async def get_pool(self) -> asyncpg.Pool:
69
+ """Get or create connection pool."""
70
+ if self._pool is None:
71
+ # Create pool with SSL context if configured
72
+ if self._ssl_context:
73
+ self._pool = await asyncpg.create_pool(
74
+ self.connection_string,
75
+ min_size=1,
76
+ max_size=10,
77
+ ssl=self._ssl_context,
78
+ )
79
+ else:
80
+ self._pool = await asyncpg.create_pool(
81
+ self.connection_string, min_size=1, max_size=10
82
+ )
83
+ return self._pool
84
+
85
+ async def close(self):
86
+ """Close the connection pool."""
87
+ if self._pool:
88
+ await self._pool.close()
89
+ self._pool = None
90
+
91
+ async def execute_query(
92
+ self, query: str, *args, timeout: float | None = None
93
+ ) -> list[dict[str, Any]]:
94
+ """Execute a query and return results as list of dicts.
95
+
96
+ All queries run in a transaction that is rolled back at the end,
97
+ ensuring no changes are persisted to the database.
98
+ """
99
+ effective_timeout = timeout or DEFAULT_QUERY_TIMEOUT
100
+ pool = await self.get_pool()
101
+
102
+ async with pool.acquire() as conn:
103
+ # Start a transaction that we'll always rollback
104
+ transaction = conn.transaction()
105
+ await transaction.start()
106
+
107
+ try:
108
+ # Set server-side timeout if specified
109
+ if effective_timeout:
110
+ # Clamp timeout to sane range (10ms to 5 minutes) and validate
111
+ timeout_ms = max(10, min(int(effective_timeout * 1000), 300000))
112
+ await conn.execute(f"SET LOCAL statement_timeout = {timeout_ms}")
113
+
114
+ # Execute query with client-side timeout
115
+ if effective_timeout:
116
+ rows = await asyncio.wait_for(
117
+ conn.fetch(query, *args), timeout=effective_timeout
118
+ )
119
+ else:
120
+ rows = await conn.fetch(query, *args)
121
+
122
+ return [dict(row) for row in rows]
123
+ except asyncio.TimeoutError as exc:
124
+ raise QueryTimeoutError(effective_timeout or 0) from exc
125
+ finally:
126
+ # Always rollback to ensure no changes are committed
127
+ await transaction.rollback()
128
+
129
+
130
+ class PostgreSQLSchemaIntrospector(BaseSchemaIntrospector):
131
+ """PostgreSQL-specific schema introspection."""
132
+
133
+ def _build_table_filter_clause(self, tables: list) -> tuple[str, list]:
134
+ """Build VALUES clause with bind parameters for table filtering.
135
+
136
+ Args:
137
+ tables: List of table dictionaries with table_schema and table_name keys
138
+
139
+ Returns:
140
+ Tuple of (values_clause, params) for use in SQL queries
141
+ """
142
+ if not tables:
143
+ return "", []
144
+
145
+ table_pairs = [(table["table_schema"], table["table_name"]) for table in tables]
146
+ values_clause = ", ".join(
147
+ [f"(${2 * i + 1}, ${2 * i + 2})" for i in range(len(table_pairs))]
148
+ )
149
+ params = [value for pair in table_pairs for value in pair]
150
+ return values_clause, params
151
+
152
+ async def get_tables_info(
153
+ self, connection, table_pattern: str | None = None
154
+ ) -> dict[str, Any]:
155
+ """Get tables information for PostgreSQL."""
156
+ pool = await connection.get_pool()
157
+ async with pool.acquire() as conn:
158
+ # Build WHERE clause for filtering
159
+ where_conditions = [
160
+ "table_schema NOT IN ('pg_catalog', 'information_schema')"
161
+ ]
162
+ params = []
163
+
164
+ if table_pattern:
165
+ # Support patterns like 'schema.table' or just 'table'
166
+ if "." in table_pattern:
167
+ schema_pattern, table_name_pattern = table_pattern.split(".", 1)
168
+ where_conditions.append(
169
+ "(table_schema LIKE $1 AND table_name LIKE $2)"
170
+ )
171
+ params.extend([schema_pattern, table_name_pattern])
172
+ else:
173
+ where_conditions.append(
174
+ "(table_name LIKE $1 OR table_schema || '.' || table_name LIKE $1)"
175
+ )
176
+ params.append(table_pattern)
177
+
178
+ # Get tables
179
+ tables_query = f"""
180
+ SELECT
181
+ table_schema,
182
+ table_name,
183
+ table_type
184
+ FROM information_schema.tables
185
+ WHERE {" AND ".join(where_conditions)}
186
+ ORDER BY table_schema, table_name;
187
+ """
188
+ return await conn.fetch(tables_query, *params)
189
+
190
+ async def get_columns_info(self, connection, tables: list) -> list:
191
+ """Get columns information for PostgreSQL."""
192
+ if not tables:
193
+ return []
194
+
195
+ pool = await connection.get_pool()
196
+ async with pool.acquire() as conn:
197
+ values_clause, params = self._build_table_filter_clause(tables)
198
+
199
+ columns_query = f"""
200
+ SELECT
201
+ c.table_schema,
202
+ c.table_name,
203
+ c.column_name,
204
+ c.data_type,
205
+ c.is_nullable,
206
+ c.column_default,
207
+ c.character_maximum_length,
208
+ c.numeric_precision,
209
+ c.numeric_scale
210
+ FROM information_schema.columns c
211
+ WHERE (c.table_schema, c.table_name) IN (VALUES {values_clause})
212
+ ORDER BY c.table_schema, c.table_name, c.ordinal_position;
213
+ """
214
+ return await conn.fetch(columns_query, *params)
215
+
216
+ async def get_foreign_keys_info(self, connection, tables: list) -> list:
217
+ """Get foreign keys information for PostgreSQL."""
218
+ if not tables:
219
+ return []
220
+
221
+ pool = await connection.get_pool()
222
+ async with pool.acquire() as conn:
223
+ values_clause, params = self._build_table_filter_clause(tables)
224
+
225
+ fk_query = f"""
226
+ WITH t(schema, name) AS (VALUES {values_clause})
227
+ SELECT
228
+ tc.table_schema,
229
+ tc.table_name,
230
+ kcu.column_name,
231
+ ccu.table_schema AS foreign_table_schema,
232
+ ccu.table_name AS foreign_table_name,
233
+ ccu.column_name AS foreign_column_name
234
+ FROM information_schema.table_constraints AS tc
235
+ JOIN information_schema.key_column_usage AS kcu
236
+ ON tc.constraint_name = kcu.constraint_name
237
+ AND tc.table_schema = kcu.table_schema
238
+ JOIN information_schema.constraint_column_usage AS ccu
239
+ ON ccu.constraint_name = tc.constraint_name
240
+ AND ccu.table_schema = tc.table_schema
241
+ JOIN t ON t.schema = tc.table_schema AND t.name = tc.table_name
242
+ WHERE tc.constraint_type = 'FOREIGN KEY';
243
+ """
244
+ return await conn.fetch(fk_query, *params)
245
+
246
+ async def get_primary_keys_info(self, connection, tables: list) -> list:
247
+ """Get primary keys information for PostgreSQL."""
248
+ if not tables:
249
+ return []
250
+
251
+ pool = await connection.get_pool()
252
+ async with pool.acquire() as conn:
253
+ values_clause, params = self._build_table_filter_clause(tables)
254
+
255
+ pk_query = f"""
256
+ WITH t(schema, name) AS (VALUES {values_clause})
257
+ SELECT
258
+ tc.table_schema,
259
+ tc.table_name,
260
+ kcu.column_name
261
+ FROM information_schema.table_constraints AS tc
262
+ JOIN information_schema.key_column_usage AS kcu
263
+ ON tc.constraint_name = kcu.constraint_name
264
+ AND tc.table_schema = kcu.table_schema
265
+ JOIN t ON t.schema = tc.table_schema AND t.name = tc.table_name
266
+ WHERE tc.constraint_type = 'PRIMARY KEY'
267
+ ORDER BY tc.table_schema, tc.table_name, kcu.ordinal_position;
268
+ """
269
+ return await conn.fetch(pk_query, *params)
270
+
271
+ async def get_indexes_info(self, connection, tables: list) -> list:
272
+ """Get indexes information for PostgreSQL."""
273
+ if not tables:
274
+ return []
275
+
276
+ pool = await connection.get_pool()
277
+ async with pool.acquire() as conn:
278
+ values_clause, params = self._build_table_filter_clause(tables)
279
+
280
+ idx_query = f"""
281
+ WITH t_filter(schema, name) AS (VALUES {values_clause})
282
+ SELECT
283
+ ns.nspname AS table_schema,
284
+ tcls.relname AS table_name,
285
+ icls.relname AS index_name,
286
+ ix.indisunique AS is_unique,
287
+ am.amname AS index_type,
288
+ string_agg(a.attname, ',' ORDER BY att.ordinality) AS column_names
289
+ FROM pg_class tcls
290
+ JOIN pg_namespace ns ON tcls.relnamespace = ns.oid
291
+ JOIN pg_index ix ON tcls.oid = ix.indrelid
292
+ JOIN pg_class icls ON icls.oid = ix.indexrelid
293
+ JOIN pg_am am ON icls.relam = am.oid
294
+ JOIN pg_attribute a ON a.attrelid = tcls.oid
295
+ JOIN unnest(ix.indkey) WITH ORDINALITY AS att(attnum, ordinality) ON a.attnum = att.attnum
296
+ JOIN t_filter ON t_filter.schema = ns.nspname AND t_filter.name = tcls.relname
297
+ WHERE tcls.relkind = 'r'
298
+ AND icls.relname NOT LIKE '%_pkey'
299
+ GROUP BY ns.nspname, tcls.relname, icls.relname, ix.indisunique, am.amname
300
+ ORDER BY ns.nspname, tcls.relname, icls.relname;
301
+ """
302
+ return await conn.fetch(idx_query, *params)
303
+
304
+ async def list_tables_info(self, connection) -> list[dict[str, Any]]:
305
+ """Get list of tables with basic information for PostgreSQL."""
306
+ pool = await connection.get_pool()
307
+ async with pool.acquire() as conn:
308
+ # Get table names and basic info without row counts for better performance
309
+ tables_query = """
310
+ SELECT
311
+ table_schema,
312
+ table_name,
313
+ table_type
314
+ FROM information_schema.tables
315
+ WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
316
+ ORDER BY table_schema, table_name;
317
+ """
318
+ tables = await conn.fetch(tables_query)
319
+
320
+ # Convert to expected format
321
+ return [
322
+ {
323
+ "table_schema": table["table_schema"],
324
+ "table_name": table["table_name"],
325
+ "table_type": table["table_type"],
326
+ }
327
+ for table in tables
328
+ ]