mcp-dbutils 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_dbutils/__init__.py +9 -9
- mcp_dbutils/base.py +383 -58
- mcp_dbutils/config.py +12 -12
- mcp_dbutils/postgres/__init__.py +3 -3
- mcp_dbutils/postgres/config.py +95 -34
- mcp_dbutils/postgres/handler.py +446 -14
- mcp_dbutils/postgres/server.py +16 -16
- mcp_dbutils/sqlite/__init__.py +3 -3
- mcp_dbutils/sqlite/config.py +12 -12
- mcp_dbutils/sqlite/handler.py +361 -77
- mcp_dbutils/sqlite/server.py +21 -21
- mcp_dbutils/stats.py +112 -3
- mcp_dbutils-0.10.0.dist-info/METADATA +227 -0
- mcp_dbutils-0.10.0.dist-info/RECORD +18 -0
- mcp_dbutils-0.8.0.dist-info/METADATA +0 -358
- mcp_dbutils-0.8.0.dist-info/RECORD +0 -18
- {mcp_dbutils-0.8.0.dist-info → mcp_dbutils-0.10.0.dist-info}/WHEEL +0 -0
- {mcp_dbutils-0.8.0.dist-info → mcp_dbutils-0.10.0.dist-info}/entry_points.txt +0 -0
- {mcp_dbutils-0.8.0.dist-info → mcp_dbutils-0.10.0.dist-info}/licenses/LICENSE +0 -0
mcp_dbutils/postgres/handler.py
CHANGED
@@ -1,31 +1,31 @@
|
|
1
|
-
"""PostgreSQL
|
1
|
+
"""PostgreSQL connection handler implementation"""
|
2
2
|
|
3
3
|
import psycopg2
|
4
4
|
from psycopg2.pool import SimpleConnectionPool
|
5
5
|
import mcp.types as types
|
6
6
|
|
7
|
-
from ..base import
|
8
|
-
from .config import
|
7
|
+
from ..base import ConnectionHandler, ConnectionHandlerError
|
8
|
+
from .config import PostgreSQLConfig
|
9
9
|
|
10
|
-
class
|
10
|
+
class PostgreSQLHandler(ConnectionHandler):
|
11
11
|
@property
|
12
12
|
def db_type(self) -> str:
|
13
13
|
return 'postgres'
|
14
14
|
|
15
|
-
def __init__(self, config_path: str,
|
15
|
+
def __init__(self, config_path: str, connection: str, debug: bool = False):
|
16
16
|
"""Initialize PostgreSQL handler
|
17
17
|
|
18
18
|
Args:
|
19
19
|
config_path: Path to configuration file
|
20
|
-
|
20
|
+
connection: Database connection name
|
21
21
|
debug: Enable debug mode
|
22
22
|
"""
|
23
|
-
super().__init__(config_path,
|
24
|
-
self.config =
|
23
|
+
super().__init__(config_path, connection, debug)
|
24
|
+
self.config = PostgreSQLConfig.from_yaml(config_path, connection)
|
25
25
|
|
26
26
|
# No connection pool creation during initialization
|
27
27
|
masked_params = self.config.get_masked_connection_info()
|
28
|
-
self.log("debug", f"Configuring
|
28
|
+
self.log("debug", f"Configuring connection with parameters: {masked_params}")
|
29
29
|
self.pool = None
|
30
30
|
|
31
31
|
async def get_tables(self) -> list[types.Resource]:
|
@@ -47,16 +47,16 @@ class PostgresHandler(DatabaseHandler):
|
|
47
47
|
tables = cur.fetchall()
|
48
48
|
return [
|
49
49
|
types.Resource(
|
50
|
-
uri=f"postgres://{self.
|
50
|
+
uri=f"postgres://{self.connection}/{table[0]}/schema",
|
51
51
|
name=f"{table[0]} schema",
|
52
52
|
description=table[1] if table[1] else None,
|
53
53
|
mimeType="application/json"
|
54
54
|
) for table in tables
|
55
55
|
]
|
56
56
|
except psycopg2.Error as e:
|
57
|
-
error_msg = f"Failed to get
|
57
|
+
error_msg = f"Failed to get constraint information: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
58
58
|
self.stats.record_error(e.__class__.__name__)
|
59
|
-
raise
|
59
|
+
raise ConnectionHandlerError(error_msg)
|
60
60
|
finally:
|
61
61
|
if conn:
|
62
62
|
conn.close()
|
@@ -109,7 +109,7 @@ class PostgresHandler(DatabaseHandler):
|
|
109
109
|
except psycopg2.Error as e:
|
110
110
|
error_msg = f"Failed to read table schema: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
111
111
|
self.stats.record_error(e.__class__.__name__)
|
112
|
-
raise
|
112
|
+
raise ConnectionHandlerError(error_msg)
|
113
113
|
finally:
|
114
114
|
if conn:
|
115
115
|
conn.close()
|
@@ -144,7 +144,439 @@ class PostgresHandler(DatabaseHandler):
|
|
144
144
|
cur.execute("ROLLBACK")
|
145
145
|
except psycopg2.Error as e:
|
146
146
|
error_msg = f"[{self.db_type}] Query execution failed: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
147
|
-
raise
|
147
|
+
raise ConnectionHandlerError(error_msg)
|
148
|
+
finally:
|
149
|
+
if conn:
|
150
|
+
conn.close()
|
151
|
+
|
152
|
+
async def get_table_description(self, table_name: str) -> str:
|
153
|
+
"""Get detailed table description"""
|
154
|
+
conn = None
|
155
|
+
try:
|
156
|
+
conn_params = self.config.get_connection_params()
|
157
|
+
conn = psycopg2.connect(**conn_params)
|
158
|
+
with conn.cursor() as cur:
|
159
|
+
# 获取表的基本信息和注释
|
160
|
+
cur.execute("""
|
161
|
+
SELECT obj_description(
|
162
|
+
(quote_ident(table_schema) || '.' || quote_ident(table_name))::regclass,
|
163
|
+
'pg_class'
|
164
|
+
) as table_comment
|
165
|
+
FROM information_schema.tables
|
166
|
+
WHERE table_name = %s
|
167
|
+
""", (table_name,))
|
168
|
+
table_info = cur.fetchone()
|
169
|
+
table_comment = table_info[0] if table_info else None
|
170
|
+
|
171
|
+
# 获取列信息
|
172
|
+
cur.execute("""
|
173
|
+
SELECT
|
174
|
+
column_name,
|
175
|
+
data_type,
|
176
|
+
column_default,
|
177
|
+
is_nullable,
|
178
|
+
character_maximum_length,
|
179
|
+
numeric_precision,
|
180
|
+
numeric_scale,
|
181
|
+
col_description(
|
182
|
+
(quote_ident(table_schema) || '.' || quote_ident(table_name))::regclass,
|
183
|
+
ordinal_position
|
184
|
+
) as column_comment
|
185
|
+
FROM information_schema.columns
|
186
|
+
WHERE table_name = %s
|
187
|
+
ORDER BY ordinal_position
|
188
|
+
""", (table_name,))
|
189
|
+
columns = cur.fetchall()
|
190
|
+
|
191
|
+
# 格式化输出
|
192
|
+
description = [
|
193
|
+
f"Table: {table_name}",
|
194
|
+
f"Comment: {table_comment or 'No comment'}\n",
|
195
|
+
"Columns:"
|
196
|
+
]
|
197
|
+
|
198
|
+
for col in columns:
|
199
|
+
col_info = [
|
200
|
+
f" {col[0]} ({col[1]})",
|
201
|
+
f" Nullable: {col[3]}",
|
202
|
+
f" Default: {col[2] or 'None'}"
|
203
|
+
]
|
204
|
+
|
205
|
+
if col[4]: # character_maximum_length
|
206
|
+
col_info.append(f" Max Length: {col[4]}")
|
207
|
+
if col[5]: # numeric_precision
|
208
|
+
col_info.append(f" Precision: {col[5]}")
|
209
|
+
if col[6]: # numeric_scale
|
210
|
+
col_info.append(f" Scale: {col[6]}")
|
211
|
+
if col[7]: # column_comment
|
212
|
+
col_info.append(f" Comment: {col[7]}")
|
213
|
+
|
214
|
+
description.extend(col_info)
|
215
|
+
description.append("") # Empty line between columns
|
216
|
+
|
217
|
+
return "\n".join(description)
|
218
|
+
|
219
|
+
except psycopg2.Error as e:
|
220
|
+
error_msg = f"Failed to get index information: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
221
|
+
self.stats.record_error(e.__class__.__name__)
|
222
|
+
raise ConnectionHandlerError(error_msg)
|
223
|
+
finally:
|
224
|
+
if conn:
|
225
|
+
conn.close()
|
226
|
+
|
227
|
+
async def get_table_ddl(self, table_name: str) -> str:
|
228
|
+
"""Get DDL statement for creating table"""
|
229
|
+
conn = None
|
230
|
+
try:
|
231
|
+
conn_params = self.config.get_connection_params()
|
232
|
+
conn = psycopg2.connect(**conn_params)
|
233
|
+
with conn.cursor() as cur:
|
234
|
+
# 获取列定义
|
235
|
+
cur.execute("""
|
236
|
+
SELECT
|
237
|
+
column_name,
|
238
|
+
data_type,
|
239
|
+
column_default,
|
240
|
+
is_nullable,
|
241
|
+
character_maximum_length,
|
242
|
+
numeric_precision,
|
243
|
+
numeric_scale
|
244
|
+
FROM information_schema.columns
|
245
|
+
WHERE table_name = %s
|
246
|
+
ORDER BY ordinal_position
|
247
|
+
""", (table_name,))
|
248
|
+
columns = cur.fetchall()
|
249
|
+
|
250
|
+
# 获取约束
|
251
|
+
cur.execute("""
|
252
|
+
SELECT
|
253
|
+
conname as constraint_name,
|
254
|
+
pg_get_constraintdef(c.oid) as constraint_def
|
255
|
+
FROM pg_constraint c
|
256
|
+
JOIN pg_class t ON c.conrelid = t.oid
|
257
|
+
WHERE t.relname = %s
|
258
|
+
""", (table_name,))
|
259
|
+
constraints = cur.fetchall()
|
260
|
+
|
261
|
+
# 构建CREATE TABLE语句
|
262
|
+
ddl = [f"CREATE TABLE {table_name} ("]
|
263
|
+
|
264
|
+
# 添加列定义
|
265
|
+
column_defs = []
|
266
|
+
for col in columns:
|
267
|
+
col_def = [f" {col[0]} {col[1]}"]
|
268
|
+
|
269
|
+
if col[4]: # character_maximum_length
|
270
|
+
col_def[0] = f"{col_def[0]}({col[4]})"
|
271
|
+
elif col[5]: # numeric_precision
|
272
|
+
if col[6]: # numeric_scale
|
273
|
+
col_def[0] = f"{col_def[0]}({col[5]},{col[6]})"
|
274
|
+
else:
|
275
|
+
col_def[0] = f"{col_def[0]}({col[5]})"
|
276
|
+
|
277
|
+
if col[2]: # default
|
278
|
+
col_def.append(f"DEFAULT {col[2]}")
|
279
|
+
if col[3] == 'NO': # not null
|
280
|
+
col_def.append("NOT NULL")
|
281
|
+
|
282
|
+
column_defs.append(" ".join(col_def))
|
283
|
+
|
284
|
+
# 添加约束定义
|
285
|
+
for con in constraints:
|
286
|
+
column_defs.append(f" CONSTRAINT {con[0]} {con[1]}")
|
287
|
+
|
288
|
+
ddl.append(",\n".join(column_defs))
|
289
|
+
ddl.append(");")
|
290
|
+
|
291
|
+
# 添加注释
|
292
|
+
cur.execute("""
|
293
|
+
SELECT
|
294
|
+
c.column_name,
|
295
|
+
col_description(
|
296
|
+
(quote_ident(table_schema) || '.' || quote_ident(table_name))::regclass,
|
297
|
+
c.ordinal_position
|
298
|
+
) as column_comment,
|
299
|
+
obj_description(
|
300
|
+
(quote_ident(table_schema) || '.' || quote_ident(table_name))::regclass,
|
301
|
+
'pg_class'
|
302
|
+
) as table_comment
|
303
|
+
FROM information_schema.columns c
|
304
|
+
WHERE c.table_name = %s
|
305
|
+
""", (table_name,))
|
306
|
+
comments = cur.fetchall()
|
307
|
+
|
308
|
+
for comment in comments:
|
309
|
+
if comment[2]: # table comment
|
310
|
+
ddl.append(f"\nCOMMENT ON TABLE {table_name} IS '{comment[2]}';")
|
311
|
+
if comment[1]: # column comment
|
312
|
+
ddl.append(f"COMMENT ON COLUMN {table_name}.{comment[0]} IS '{comment[1]}';")
|
313
|
+
|
314
|
+
return "\n".join(ddl)
|
315
|
+
|
316
|
+
except psycopg2.Error as e:
|
317
|
+
error_msg = f"Failed to get table DDL: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
318
|
+
self.stats.record_error(e.__class__.__name__)
|
319
|
+
raise ConnectionHandlerError(error_msg)
|
320
|
+
finally:
|
321
|
+
if conn:
|
322
|
+
conn.close()
|
323
|
+
|
324
|
+
async def get_table_indexes(self, table_name: str) -> str:
|
325
|
+
"""Get index information for table"""
|
326
|
+
conn = None
|
327
|
+
try:
|
328
|
+
conn_params = self.config.get_connection_params()
|
329
|
+
conn = psycopg2.connect(**conn_params)
|
330
|
+
with conn.cursor() as cur:
|
331
|
+
# 获取索引信息
|
332
|
+
cur.execute("""
|
333
|
+
SELECT
|
334
|
+
i.relname as index_name,
|
335
|
+
a.attname as column_name,
|
336
|
+
CASE
|
337
|
+
WHEN ix.indisprimary THEN 'PRIMARY KEY'
|
338
|
+
WHEN ix.indisunique THEN 'UNIQUE'
|
339
|
+
ELSE 'INDEX'
|
340
|
+
END as index_type,
|
341
|
+
am.amname as index_method,
|
342
|
+
pg_get_indexdef(ix.indexrelid) as index_def,
|
343
|
+
obj_description(i.oid, 'pg_class') as index_comment
|
344
|
+
FROM pg_class t
|
345
|
+
JOIN pg_index ix ON t.oid = ix.indrelid
|
346
|
+
JOIN pg_class i ON ix.indexrelid = i.oid
|
347
|
+
JOIN pg_am am ON i.relam = am.oid
|
348
|
+
JOIN pg_attribute a ON t.oid = a.attrelid
|
349
|
+
WHERE t.relname = %s
|
350
|
+
AND a.attnum = ANY(ix.indkey)
|
351
|
+
ORDER BY i.relname, a.attnum
|
352
|
+
""", (table_name,))
|
353
|
+
indexes = cur.fetchall()
|
354
|
+
|
355
|
+
if not indexes:
|
356
|
+
return f"No indexes found on table {table_name}"
|
357
|
+
|
358
|
+
# 按索引名称分组
|
359
|
+
current_index = None
|
360
|
+
formatted_indexes = []
|
361
|
+
index_info = []
|
362
|
+
|
363
|
+
for idx in indexes:
|
364
|
+
if current_index != idx[0]:
|
365
|
+
if index_info:
|
366
|
+
formatted_indexes.extend(index_info)
|
367
|
+
formatted_indexes.append("")
|
368
|
+
current_index = idx[0]
|
369
|
+
index_info = [
|
370
|
+
f"Index: {idx[0]}",
|
371
|
+
f"Type: {idx[2]}",
|
372
|
+
f"Method: {idx[3]}",
|
373
|
+
"Columns:",
|
374
|
+
]
|
375
|
+
if idx[5]: # index comment
|
376
|
+
index_info.insert(1, f"Comment: {idx[5]}")
|
377
|
+
|
378
|
+
index_info.append(f" - {idx[1]}")
|
379
|
+
|
380
|
+
if index_info:
|
381
|
+
formatted_indexes.extend(index_info)
|
382
|
+
|
383
|
+
return "\n".join(formatted_indexes)
|
384
|
+
|
385
|
+
except psycopg2.Error as e:
|
386
|
+
error_msg = f"Failed to get index information: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
387
|
+
self.stats.record_error(e.__class__.__name__)
|
388
|
+
raise ConnectionHandlerError(error_msg)
|
389
|
+
finally:
|
390
|
+
if conn:
|
391
|
+
conn.close()
|
392
|
+
|
393
|
+
async def get_table_stats(self, table_name: str) -> str:
|
394
|
+
"""Get table statistics information"""
|
395
|
+
conn = None
|
396
|
+
try:
|
397
|
+
conn_params = self.config.get_connection_params()
|
398
|
+
conn = psycopg2.connect(**conn_params)
|
399
|
+
with conn.cursor() as cur:
|
400
|
+
# Get table statistics
|
401
|
+
cur.execute("""
|
402
|
+
SELECT
|
403
|
+
c.reltuples::bigint as row_estimate,
|
404
|
+
pg_size_pretty(pg_total_relation_size(c.oid)) as total_size,
|
405
|
+
pg_size_pretty(pg_table_size(c.oid)) as table_size,
|
406
|
+
pg_size_pretty(pg_indexes_size(c.oid)) as index_size,
|
407
|
+
age(c.relfrozenxid) as xid_age,
|
408
|
+
c.relhasindex as has_indexes,
|
409
|
+
c.relpages::bigint as pages,
|
410
|
+
c.relallvisible::bigint as visible_pages
|
411
|
+
FROM pg_class c
|
412
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
413
|
+
WHERE c.relname = %s AND n.nspname = 'public'
|
414
|
+
""", (table_name,))
|
415
|
+
stats = cur.fetchone()
|
416
|
+
|
417
|
+
if not stats:
|
418
|
+
return f"No statistics found for table {table_name}"
|
419
|
+
|
420
|
+
# Get column statistics
|
421
|
+
cur.execute("""
|
422
|
+
SELECT
|
423
|
+
a.attname as column_name,
|
424
|
+
s.null_frac * 100 as null_percent,
|
425
|
+
s.n_distinct as distinct_values,
|
426
|
+
pg_column_size(a.attname::text) as approx_width
|
427
|
+
FROM pg_stats s
|
428
|
+
JOIN pg_attribute a ON a.attrelid = %s::regclass
|
429
|
+
AND a.attnum > 0
|
430
|
+
AND a.attname = s.attname
|
431
|
+
WHERE s.schemaname = 'public'
|
432
|
+
AND s.tablename = %s
|
433
|
+
ORDER BY a.attnum;
|
434
|
+
""", (table_name, table_name))
|
435
|
+
column_stats = cur.fetchall()
|
436
|
+
|
437
|
+
# Format the output
|
438
|
+
output = [
|
439
|
+
f"Table Statistics for {table_name}:",
|
440
|
+
f" Estimated Row Count: {stats[0]:,}",
|
441
|
+
f" Total Size: {stats[1]}",
|
442
|
+
f" Table Size: {stats[2]}",
|
443
|
+
f" Index Size: {stats[3]}",
|
444
|
+
f" Transaction ID Age: {stats[4]:,}",
|
445
|
+
f" Has Indexes: {stats[5]}",
|
446
|
+
f" Total Pages: {stats[6]:,}",
|
447
|
+
f" Visible Pages: {stats[7]:,}\n",
|
448
|
+
"Column Statistics:"
|
449
|
+
]
|
450
|
+
|
451
|
+
for col in column_stats:
|
452
|
+
col_info = [
|
453
|
+
f" {col[0]}:",
|
454
|
+
f" Null Values: {col[1]:.1f}%",
|
455
|
+
f" Distinct Values: {col[2] if col[2] >= 0 else 'Unknown'}",
|
456
|
+
f" Average Width: {col[3]}"
|
457
|
+
]
|
458
|
+
output.extend(col_info)
|
459
|
+
output.append("") # Empty line between columns
|
460
|
+
|
461
|
+
return "\n".join(output)
|
462
|
+
|
463
|
+
except psycopg2.Error as e:
|
464
|
+
error_msg = f"Failed to get table statistics: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
465
|
+
self.stats.record_error(e.__class__.__name__)
|
466
|
+
raise ConnectionHandlerError(error_msg)
|
467
|
+
finally:
|
468
|
+
if conn:
|
469
|
+
conn.close()
|
470
|
+
|
471
|
+
async def get_table_constraints(self, table_name: str) -> str:
|
472
|
+
"""Get constraint information for table"""
|
473
|
+
conn = None
|
474
|
+
try:
|
475
|
+
conn_params = self.config.get_connection_params()
|
476
|
+
conn = psycopg2.connect(**conn_params)
|
477
|
+
with conn.cursor() as cur:
|
478
|
+
# Get all constraints
|
479
|
+
cur.execute("""
|
480
|
+
SELECT
|
481
|
+
con.conname as constraint_name,
|
482
|
+
con.contype as constraint_type,
|
483
|
+
pg_get_constraintdef(con.oid) as definition,
|
484
|
+
CASE con.contype
|
485
|
+
WHEN 'p' THEN 'Primary Key'
|
486
|
+
WHEN 'f' THEN 'Foreign Key'
|
487
|
+
WHEN 'u' THEN 'Unique'
|
488
|
+
WHEN 'c' THEN 'Check'
|
489
|
+
WHEN 't' THEN 'Trigger'
|
490
|
+
ELSE 'Unknown'
|
491
|
+
END as type_desc,
|
492
|
+
con.condeferrable as is_deferrable,
|
493
|
+
con.condeferred as is_deferred,
|
494
|
+
obj_description(con.oid, 'pg_constraint') as comment
|
495
|
+
FROM pg_constraint con
|
496
|
+
JOIN pg_class rel ON rel.oid = con.conrelid
|
497
|
+
JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
|
498
|
+
WHERE rel.relname = %s
|
499
|
+
ORDER BY con.contype, con.conname
|
500
|
+
""", (table_name,))
|
501
|
+
constraints = cur.fetchall()
|
502
|
+
|
503
|
+
if not constraints:
|
504
|
+
return f"No constraints found on table {table_name}"
|
505
|
+
|
506
|
+
# Format constraints by type
|
507
|
+
output = [f"Constraints for {table_name}:"]
|
508
|
+
current_type = None
|
509
|
+
|
510
|
+
for con in constraints:
|
511
|
+
if current_type != con[3]:
|
512
|
+
current_type = con[3]
|
513
|
+
output.append(f"\n{current_type} Constraints:")
|
514
|
+
|
515
|
+
output.extend([
|
516
|
+
f" {con[0]}:",
|
517
|
+
f" Definition: {con[2]}"
|
518
|
+
])
|
519
|
+
|
520
|
+
if con[4]: # is_deferrable
|
521
|
+
output.append(f" Deferrable: {'Deferred' if con[5] else 'Immediate'}")
|
522
|
+
|
523
|
+
if con[6]: # comment
|
524
|
+
output.append(f" Comment: {con[6]}")
|
525
|
+
|
526
|
+
output.append("") # Empty line between constraints
|
527
|
+
|
528
|
+
return "\n".join(output)
|
529
|
+
|
530
|
+
except psycopg2.Error as e:
|
531
|
+
error_msg = f"Failed to get constraint information: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
532
|
+
self.stats.record_error(e.__class__.__name__)
|
533
|
+
raise ConnectionHandlerError(error_msg)
|
534
|
+
finally:
|
535
|
+
if conn:
|
536
|
+
conn.close()
|
537
|
+
|
538
|
+
async def explain_query(self, sql: str) -> str:
|
539
|
+
"""Get query execution plan"""
|
540
|
+
conn = None
|
541
|
+
try:
|
542
|
+
conn_params = self.config.get_connection_params()
|
543
|
+
conn = psycopg2.connect(**conn_params)
|
544
|
+
with conn.cursor() as cur:
|
545
|
+
# Get both regular and analyze explain plans
|
546
|
+
# Get EXPLAIN output (without execution)
|
547
|
+
cur.execute("""
|
548
|
+
EXPLAIN (FORMAT TEXT, VERBOSE, COSTS)
|
549
|
+
{}
|
550
|
+
""".format(sql))
|
551
|
+
regular_plan = cur.fetchall()
|
552
|
+
|
553
|
+
# Get EXPLAIN ANALYZE output (with actual execution)
|
554
|
+
cur.execute("""
|
555
|
+
EXPLAIN (ANALYZE, FORMAT TEXT, VERBOSE, COSTS, TIMING)
|
556
|
+
{}
|
557
|
+
""".format(sql))
|
558
|
+
analyze_plan = cur.fetchall()
|
559
|
+
|
560
|
+
output = [
|
561
|
+
"Query Execution Plan:",
|
562
|
+
"==================",
|
563
|
+
"\nEstimated Plan:",
|
564
|
+
"----------------"
|
565
|
+
]
|
566
|
+
output.extend(line[0] for line in regular_plan)
|
567
|
+
|
568
|
+
output.extend([
|
569
|
+
"\nActual Plan (ANALYZE):",
|
570
|
+
"----------------------"
|
571
|
+
])
|
572
|
+
output.extend(line[0] for line in analyze_plan)
|
573
|
+
|
574
|
+
return "\n".join(output)
|
575
|
+
|
576
|
+
except psycopg2.Error as e:
|
577
|
+
error_msg = f"Failed to explain query: [Code: {e.pgcode}] {e.pgerror or str(e)}"
|
578
|
+
self.stats.record_error(e.__class__.__name__)
|
579
|
+
raise ConnectionHandlerError(error_msg)
|
148
580
|
finally:
|
149
581
|
if conn:
|
150
582
|
conn.close()
|
mcp_dbutils/postgres/server.py
CHANGED
@@ -4,14 +4,14 @@ from psycopg2.pool import SimpleConnectionPool
|
|
4
4
|
from typing import Optional, List
|
5
5
|
import mcp.types as types
|
6
6
|
from importlib.metadata import metadata
|
7
|
-
from ..base import
|
7
|
+
from ..base import ConnectionServer
|
8
8
|
from ..log import create_logger
|
9
|
-
from .config import
|
9
|
+
from .config import PostgreSQLConfig
|
10
10
|
|
11
11
|
# 获取包信息用于日志命名
|
12
12
|
pkg_meta = metadata("mcp-dbutils")
|
13
|
-
class
|
14
|
-
def __init__(self, config:
|
13
|
+
class PostgreSQLServer(ConnectionServer):
|
14
|
+
def __init__(self, config: PostgreSQLConfig, config_path: Optional[str] = None):
|
15
15
|
"""初始化PostgreSQL服务器
|
16
16
|
Args:
|
17
17
|
config: 数据库配置
|
@@ -32,9 +32,9 @@ class PostgresServer(DatabaseServer):
|
|
32
32
|
self.log("info", "测试连接成功")
|
33
33
|
# 创建连接池
|
34
34
|
self.pool = SimpleConnectionPool(1, 5, **conn_params)
|
35
|
-
self.log("info", "
|
35
|
+
self.log("info", "连接池创建成功")
|
36
36
|
except psycopg2.Error as e:
|
37
|
-
self.log("error", f"
|
37
|
+
self.log("error", f"连接失败: [Code: {e.pgcode}] {e.pgerror or str(e)}")
|
38
38
|
raise
|
39
39
|
async def list_resources(self) -> list[types.Resource]:
|
40
40
|
"""列出所有表资源"""
|
@@ -124,9 +124,9 @@ class PostgresServer(DatabaseServer):
|
|
124
124
|
inputSchema={
|
125
125
|
"type": "object",
|
126
126
|
"properties": {
|
127
|
-
"
|
127
|
+
"connection": {
|
128
128
|
"type": "string",
|
129
|
-
"description": "
|
129
|
+
"description": "数据库连接名称(可选)"
|
130
130
|
},
|
131
131
|
"sql": {
|
132
132
|
"type": "string",
|
@@ -147,16 +147,16 @@ class PostgresServer(DatabaseServer):
|
|
147
147
|
# 仅允许SELECT语句
|
148
148
|
if not sql.lower().startswith("select"):
|
149
149
|
raise ValueError("仅支持SELECT查询")
|
150
|
-
|
150
|
+
connection = arguments.get("connection")
|
151
151
|
use_pool = True
|
152
152
|
conn = None
|
153
153
|
try:
|
154
|
-
if
|
155
|
-
#
|
156
|
-
config =
|
154
|
+
if connection and self.config_path:
|
155
|
+
# 使用指定的数据库连接
|
156
|
+
config = PostgreSQLConfig.from_yaml(self.config_path, connection)
|
157
157
|
conn_params = config.get_connection_params()
|
158
158
|
masked_params = config.get_masked_connection_info()
|
159
|
-
self.log("info", f"使用配置 {
|
159
|
+
self.log("info", f"使用配置 {connection} 连接数据库: {masked_params}")
|
160
160
|
conn = psycopg2.connect(**conn_params)
|
161
161
|
use_pool = False
|
162
162
|
else:
|
@@ -173,7 +173,7 @@ class PostgresServer(DatabaseServer):
|
|
173
173
|
formatted_results = [dict(zip(columns, row)) for row in results]
|
174
174
|
result_text = str({
|
175
175
|
'type': 'postgres',
|
176
|
-
'config_name':
|
176
|
+
'config_name': connection or 'default',
|
177
177
|
'query_result': {
|
178
178
|
'columns': columns,
|
179
179
|
'rows': formatted_results,
|
@@ -191,7 +191,7 @@ class PostgresServer(DatabaseServer):
|
|
191
191
|
error = f"查询执行失败: {str(e)}"
|
192
192
|
error_msg = str({
|
193
193
|
'type': 'postgres',
|
194
|
-
'config_name':
|
194
|
+
'config_name': connection or 'default',
|
195
195
|
'error': error
|
196
196
|
})
|
197
197
|
self.log("error", error_msg)
|
@@ -205,5 +205,5 @@ class PostgresServer(DatabaseServer):
|
|
205
205
|
async def cleanup(self):
|
206
206
|
"""清理资源"""
|
207
207
|
if hasattr(self, 'pool'):
|
208
|
-
self.log("info", "
|
208
|
+
self.log("info", "关闭连接池")
|
209
209
|
self.pool.closeall()
|
mcp_dbutils/sqlite/__init__.py
CHANGED