adbpg-mcp-server 1.0.4__py3-none-any.whl → 1.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adbpg_mcp_server-1.0.6.dist-info/METADATA +10 -0
- adbpg_mcp_server-1.0.6.dist-info/RECORD +6 -0
- {adbpg_mcp_server-1.0.4.dist-info → adbpg_mcp_server-1.0.6.dist-info}/WHEEL +1 -2
- adbpg_mcp_server-1.0.6.dist-info/entry_points.txt +2 -0
- adbpg_mcp_server-1.0.6.dist-info/licenses/LICENSE +201 -0
- adbpg_mcp_server.py +1110 -0
- adbpg_mcp_server/__init__.py +0 -4
- adbpg_mcp_server/adbpg_mcp_server.py +0 -422
- adbpg_mcp_server-1.0.4.dist-info/METADATA +0 -128
- adbpg_mcp_server-1.0.4.dist-info/RECORD +0 -8
- adbpg_mcp_server-1.0.4.dist-info/entry_points.txt +0 -2
- adbpg_mcp_server-1.0.4.dist-info/licenses/LICENSE +0 -19
- adbpg_mcp_server-1.0.4.dist-info/top_level.txt +0 -1
adbpg_mcp_server/__init__.py
DELETED
@@ -1,422 +0,0 @@
|
|
1
|
-
import asyncio
|
2
|
-
import logging
|
3
|
-
import os
|
4
|
-
import sys
|
5
|
-
import psycopg
|
6
|
-
from psycopg import OperationalError as Error
|
7
|
-
from mcp.server import Server
|
8
|
-
from mcp.types import Resource, Tool, TextContent, ResourceTemplate
|
9
|
-
from pydantic import AnyUrl
|
10
|
-
from dotenv import load_dotenv
|
11
|
-
from mcp.server.stdio import stdio_server
|
12
|
-
|
13
|
-
# 配置日志,输出到标准错误
|
14
|
-
logging.basicConfig(
|
15
|
-
level=logging.DEBUG,
|
16
|
-
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
17
|
-
stream=sys.stderr
|
18
|
-
)
|
19
|
-
logger = logging.getLogger("adbpg_mcp_server")
|
20
|
-
|
21
|
-
# 加载环境变量
|
22
|
-
try:
|
23
|
-
load_dotenv()
|
24
|
-
logger.info("Environment variables loaded")
|
25
|
-
|
26
|
-
# 检查必要的环境变量
|
27
|
-
required_vars = ["ADBPG_HOST", "ADBPG_PORT", "ADBPG_USER", "ADBPG_PASSWORD", "ADBPG_DATABASE"]
|
28
|
-
missing_vars = [var for var in required_vars if not os.getenv(var)]
|
29
|
-
if missing_vars:
|
30
|
-
error_msg = f"Missing required environment variables: {', '.join(missing_vars)}"
|
31
|
-
logger.error(error_msg)
|
32
|
-
raise ValueError(error_msg)
|
33
|
-
|
34
|
-
logger.info("All required environment variables are set")
|
35
|
-
except Exception as e:
|
36
|
-
logger.error(f"Error loading environment variables: {e}")
|
37
|
-
sys.exit(1)
|
38
|
-
|
39
|
-
SERVER_VERSION = "0.1.0"
|
40
|
-
|
41
|
-
def get_db_config():
|
42
|
-
"""从环境变量获取数据库配置信息"""
|
43
|
-
try:
|
44
|
-
config = {
|
45
|
-
"host": os.getenv("ADBPG_HOST", "localhost"),
|
46
|
-
"port": os.getenv("ADBPG_PORT"),
|
47
|
-
"user": os.getenv("ADBPG_USER"),
|
48
|
-
"password": os.getenv("ADBPG_PASSWORD"),
|
49
|
-
"dbname": os.getenv("ADBPG_DATABASE"),
|
50
|
-
"application_name": f"adbpg-mcp-server-{SERVER_VERSION}"
|
51
|
-
}
|
52
|
-
|
53
|
-
# 记录配置信息(不包含密码)
|
54
|
-
logger.info(f"Database config: host={config['host']}, port={config['port']}, user={config['user']}, dbname={config['dbname']}")
|
55
|
-
|
56
|
-
return config
|
57
|
-
except Exception as e:
|
58
|
-
logger.error(f"Error getting database config: {str(e)}")
|
59
|
-
raise
|
60
|
-
|
61
|
-
# 初始化服务器
|
62
|
-
try:
|
63
|
-
app = Server("adbpg-mcp-server")
|
64
|
-
logger.info("MCP server initialized")
|
65
|
-
except Exception as e:
|
66
|
-
logger.error(f"Error initializing MCP server: {e}")
|
67
|
-
sys.exit(1)
|
68
|
-
|
69
|
-
@app.list_resources()
|
70
|
-
async def list_resources() -> list[Resource]:
|
71
|
-
"""列出可用的基本资源"""
|
72
|
-
try:
|
73
|
-
return [
|
74
|
-
Resource(
|
75
|
-
uri="adbpg:///schemas",
|
76
|
-
name="All Schemas",
|
77
|
-
description="AnalyticDB PostgreSQL schemas. List all schemas in the database",
|
78
|
-
mimeType="text/plain"
|
79
|
-
)
|
80
|
-
]
|
81
|
-
except Exception as e:
|
82
|
-
logger.error(f"Error listing resources: {str(e)}")
|
83
|
-
raise
|
84
|
-
|
85
|
-
@app.list_resource_templates()
|
86
|
-
async def list_resource_templates() -> list[ResourceTemplate]:
|
87
|
-
"""
|
88
|
-
定义动态资源模板
|
89
|
-
|
90
|
-
返回:
|
91
|
-
list[ResourceTemplate]: 资源模板列表
|
92
|
-
包含以下模板:
|
93
|
-
- 列出schema中的表
|
94
|
-
- 获取表DDL
|
95
|
-
- 获取表统计信息
|
96
|
-
"""
|
97
|
-
return [
|
98
|
-
ResourceTemplate(
|
99
|
-
uriTemplate="adbpg:///{schema}/tables", # 表列表模板
|
100
|
-
name="Schema Tables",
|
101
|
-
description="List all tables in a specific schema",
|
102
|
-
mimeType="text/plain"
|
103
|
-
),
|
104
|
-
ResourceTemplate(
|
105
|
-
uriTemplate="adbpg:///{schema}/{table}/ddl", # 表DDL模板
|
106
|
-
name="Table DDL",
|
107
|
-
description="Get the DDL script of a table in a specific schema",
|
108
|
-
mimeType="text/plain"
|
109
|
-
),
|
110
|
-
ResourceTemplate(
|
111
|
-
uriTemplate="adbpg:///{schema}/{table}/statistics", # 表统计信息模板
|
112
|
-
name="Table Statistics",
|
113
|
-
description="Get statistics information of a table",
|
114
|
-
mimeType="text/plain"
|
115
|
-
)
|
116
|
-
]
|
117
|
-
|
118
|
-
@app.read_resource()
|
119
|
-
async def read_resource(uri: AnyUrl) -> str:
|
120
|
-
"""
|
121
|
-
读取资源内容
|
122
|
-
|
123
|
-
参数:
|
124
|
-
uri (AnyUrl): 资源URI
|
125
|
-
|
126
|
-
返回:
|
127
|
-
str: 资源内容
|
128
|
-
|
129
|
-
支持的URI格式:
|
130
|
-
- adbpg:///schemas: 列出所有schema
|
131
|
-
- adbpg:///{schema}/tables: 列出指定schema中的表
|
132
|
-
- adbpg:///{schema}/{table}/ddl: 获取表的DDL
|
133
|
-
- adbpg:///{schema}/{table}/statistics: 获取表的统计信息
|
134
|
-
"""
|
135
|
-
config = get_db_config()
|
136
|
-
uri_str = str(uri)
|
137
|
-
|
138
|
-
if not uri_str.startswith("adbpg:///"):
|
139
|
-
raise ValueError(f"Invalid URI scheme: {uri_str}")
|
140
|
-
|
141
|
-
try:
|
142
|
-
with psycopg.connect(**config) as conn: # 建立数据库连接
|
143
|
-
conn.autocommit = True # 设置自动提交
|
144
|
-
with conn.cursor() as cursor: # 创建游标
|
145
|
-
path_parts = uri_str[9:].split('/') # 解析URI路径
|
146
|
-
|
147
|
-
if path_parts[0] == "schemas":
|
148
|
-
# 列出所有schema
|
149
|
-
query = """
|
150
|
-
SELECT schema_name
|
151
|
-
FROM information_schema.schemata
|
152
|
-
WHERE schema_name NOT IN ('pg_catalog', 'information_schema')
|
153
|
-
ORDER BY schema_name;
|
154
|
-
"""
|
155
|
-
cursor.execute(query)
|
156
|
-
schemas = cursor.fetchall()
|
157
|
-
return "\n".join([schema[0] for schema in schemas])
|
158
|
-
|
159
|
-
elif len(path_parts) == 2 and path_parts[1] == "tables":
|
160
|
-
# 列出指定schema中的表
|
161
|
-
schema = path_parts[0]
|
162
|
-
query = f"""
|
163
|
-
SELECT table_name, table_type
|
164
|
-
FROM information_schema.tables
|
165
|
-
WHERE table_schema = %s
|
166
|
-
ORDER BY table_name;
|
167
|
-
"""
|
168
|
-
cursor.execute(query, (schema,))
|
169
|
-
tables = cursor.fetchall()
|
170
|
-
return "\n".join([f"{table[0]} ({table[1]})" for table in tables])
|
171
|
-
|
172
|
-
elif len(path_parts) == 3 and path_parts[2] == "ddl":
|
173
|
-
# 获取表的DDL
|
174
|
-
schema = path_parts[0]
|
175
|
-
table = path_parts[1]
|
176
|
-
query = f"""
|
177
|
-
SELECT pg_get_ddl('{schema}.{table}'::regclass);
|
178
|
-
"""
|
179
|
-
cursor.execute(query)
|
180
|
-
ddl = cursor.fetchone()
|
181
|
-
return ddl[0] if ddl else f"No DDL found for {schema}.{table}"
|
182
|
-
|
183
|
-
elif len(path_parts) == 3 and path_parts[2] == "statistics":
|
184
|
-
# 获取表的统计信息
|
185
|
-
schema = path_parts[0]
|
186
|
-
table = path_parts[1]
|
187
|
-
query = """
|
188
|
-
SELECT
|
189
|
-
schemaname,
|
190
|
-
tablename,
|
191
|
-
attname,
|
192
|
-
null_frac,
|
193
|
-
avg_width,
|
194
|
-
n_distinct,
|
195
|
-
most_common_vals,
|
196
|
-
most_common_freqs
|
197
|
-
FROM pg_stats
|
198
|
-
WHERE schemaname = %s AND tablename = %s
|
199
|
-
ORDER BY attname;
|
200
|
-
"""
|
201
|
-
cursor.execute(query, (schema, table))
|
202
|
-
rows = cursor.fetchall()
|
203
|
-
if not rows:
|
204
|
-
return f"No statistics found for {schema}.{table}"
|
205
|
-
|
206
|
-
result = []
|
207
|
-
for row in rows:
|
208
|
-
result.append(f"Column: {row[2]}")
|
209
|
-
result.append(f" Null fraction: {row[3]}")
|
210
|
-
result.append(f" Average width: {row[4]}")
|
211
|
-
result.append(f" Distinct values: {row[5]}")
|
212
|
-
if row[6]:
|
213
|
-
result.append(f" Most common values: {row[6]}")
|
214
|
-
result.append(f" Most common frequencies: {row[7]}")
|
215
|
-
result.append("")
|
216
|
-
return "\n".join(result)
|
217
|
-
|
218
|
-
raise ValueError(f"Invalid resource URI format: {uri_str}")
|
219
|
-
|
220
|
-
except Error as e:
|
221
|
-
raise RuntimeError(f"Database error: {str(e)}")
|
222
|
-
|
223
|
-
@app.list_tools()
|
224
|
-
async def list_tools() -> list[Tool]:
|
225
|
-
"""
|
226
|
-
列出可用的工具
|
227
|
-
|
228
|
-
返回:
|
229
|
-
list[Tool]: 工具列表
|
230
|
-
包含以下工具:
|
231
|
-
- execute_select_sql: 执行SELECT查询
|
232
|
-
- execute_dml_sql: 执行DML操作
|
233
|
-
- execute_ddl_sql: 执行DDL操作
|
234
|
-
- analyze_table: 分析表统计信息
|
235
|
-
- explain_query: 获取查询执行计划
|
236
|
-
"""
|
237
|
-
return [
|
238
|
-
Tool(
|
239
|
-
name="execute_select_sql",
|
240
|
-
description="Execute SELECT SQL to query data from ADBPG database.",
|
241
|
-
inputSchema={
|
242
|
-
"type": "object",
|
243
|
-
"properties": {
|
244
|
-
"query": {
|
245
|
-
"type": "string",
|
246
|
-
"description": "The (SELECT) SQL query to execute"
|
247
|
-
}
|
248
|
-
},
|
249
|
-
"required": ["query"]
|
250
|
-
}
|
251
|
-
),
|
252
|
-
Tool(
|
253
|
-
name="execute_dml_sql",
|
254
|
-
description="Execute (INSERT, UPDATE, DELETE) SQL to modify data in ADBPG database.",
|
255
|
-
inputSchema={
|
256
|
-
"type": "object",
|
257
|
-
"properties": {
|
258
|
-
"query": {
|
259
|
-
"type": "string",
|
260
|
-
"description": "The DML SQL query to execute"
|
261
|
-
}
|
262
|
-
},
|
263
|
-
"required": ["query"]
|
264
|
-
}
|
265
|
-
),
|
266
|
-
Tool(
|
267
|
-
name="execute_ddl_sql",
|
268
|
-
description="Execute (CREATE, ALTER, DROP) SQL statements to manage database objects.",
|
269
|
-
inputSchema={
|
270
|
-
"type": "object",
|
271
|
-
"properties": {
|
272
|
-
"query": {
|
273
|
-
"type": "string",
|
274
|
-
"description": "The DDL SQL query to execute"
|
275
|
-
}
|
276
|
-
},
|
277
|
-
"required": ["query"]
|
278
|
-
}
|
279
|
-
),
|
280
|
-
Tool(
|
281
|
-
name="analyze_table",
|
282
|
-
description="Execute ANALYZE command to collect table statistics.",
|
283
|
-
inputSchema={
|
284
|
-
"type": "object",
|
285
|
-
"properties": {
|
286
|
-
"schema": {
|
287
|
-
"type": "string",
|
288
|
-
"description": "Schema name"
|
289
|
-
},
|
290
|
-
"table": {
|
291
|
-
"type": "string",
|
292
|
-
"description": "Table name"
|
293
|
-
}
|
294
|
-
},
|
295
|
-
"required": ["schema", "table"]
|
296
|
-
}
|
297
|
-
),
|
298
|
-
Tool(
|
299
|
-
name="explain_query",
|
300
|
-
description="Get query execution plan.",
|
301
|
-
inputSchema={
|
302
|
-
"type": "object",
|
303
|
-
"properties": {
|
304
|
-
"query": {
|
305
|
-
"type": "string",
|
306
|
-
"description": "The SQL query to analyze"
|
307
|
-
}
|
308
|
-
},
|
309
|
-
"required": ["query"]
|
310
|
-
}
|
311
|
-
)
|
312
|
-
]
|
313
|
-
|
314
|
-
@app.call_tool()
|
315
|
-
async def call_tool(name: str, arguments: dict) -> list[TextContent]:
|
316
|
-
"""
|
317
|
-
执行工具操作
|
318
|
-
|
319
|
-
参数:
|
320
|
-
name (str): 工具名称
|
321
|
-
arguments (dict): 工具参数
|
322
|
-
|
323
|
-
返回:
|
324
|
-
list[TextContent]: 执行结果
|
325
|
-
|
326
|
-
支持的工具:
|
327
|
-
- execute_select_sql: 执行SELECT查询
|
328
|
-
- execute_dml_sql: 执行DML操作
|
329
|
-
- execute_ddl_sql: 执行DDL操作
|
330
|
-
- analyze_table: 分析表统计信息
|
331
|
-
- explain_query: 获取查询执行计划
|
332
|
-
"""
|
333
|
-
config = get_db_config()
|
334
|
-
|
335
|
-
# 根据工具名称处理不同的操作
|
336
|
-
if name == "execute_select_sql":
|
337
|
-
query = arguments.get("query")
|
338
|
-
if not query:
|
339
|
-
raise ValueError("Query is required")
|
340
|
-
if not query.strip().upper().startswith("SELECT"):
|
341
|
-
raise ValueError("Query must be a SELECT statement")
|
342
|
-
elif name == "execute_dml_sql":
|
343
|
-
query = arguments.get("query")
|
344
|
-
if not query:
|
345
|
-
raise ValueError("Query is required")
|
346
|
-
if not any(query.strip().upper().startswith(keyword) for keyword in ["INSERT", "UPDATE", "DELETE"]):
|
347
|
-
raise ValueError("Query must be a DML statement (INSERT, UPDATE, DELETE)")
|
348
|
-
elif name == "execute_ddl_sql":
|
349
|
-
query = arguments.get("query")
|
350
|
-
if not query:
|
351
|
-
raise ValueError("Query is required")
|
352
|
-
if not any(query.strip().upper().startswith(keyword) for keyword in ["CREATE", "ALTER", "DROP"]):
|
353
|
-
raise ValueError("Query must be a DDL statement (CREATE, ALTER, DROP)")
|
354
|
-
elif name == "analyze_table":
|
355
|
-
schema = arguments.get("schema")
|
356
|
-
table = arguments.get("table")
|
357
|
-
if not all([schema, table]):
|
358
|
-
raise ValueError("Schema and table are required")
|
359
|
-
query = f"ANALYZE {schema}.{table}"
|
360
|
-
elif name == "explain_query":
|
361
|
-
query = arguments.get("query")
|
362
|
-
if not query:
|
363
|
-
raise ValueError("Query is required")
|
364
|
-
query = f"EXPLAIN {query}"
|
365
|
-
else:
|
366
|
-
raise ValueError(f"Unknown tool: {name}")
|
367
|
-
|
368
|
-
try:
|
369
|
-
with psycopg.connect(**config) as conn:
|
370
|
-
conn.autocommit = True
|
371
|
-
with conn.cursor() as cursor:
|
372
|
-
cursor.execute(query)
|
373
|
-
|
374
|
-
if name == "analyze_table":
|
375
|
-
return [TextContent(type="text", text=f"Successfully analyzed table {schema}.{table}")]
|
376
|
-
|
377
|
-
if cursor.description:
|
378
|
-
columns = [desc[0] for desc in cursor.description]
|
379
|
-
rows = cursor.fetchall()
|
380
|
-
result = [",".join(map(str, row)) for row in rows]
|
381
|
-
return [TextContent(type="text", text="\n".join([",".join(columns)] + result))]
|
382
|
-
else:
|
383
|
-
return [TextContent(type="text", text="Query executed successfully")]
|
384
|
-
except Exception as e:
|
385
|
-
return [TextContent(type="text", text=f"Error executing query: {str(e)}")]
|
386
|
-
|
387
|
-
async def main():
|
388
|
-
"""服务器主入口点"""
|
389
|
-
try:
|
390
|
-
config = get_db_config()
|
391
|
-
logger.info("Starting ADBPG MCP server...")
|
392
|
-
|
393
|
-
# 测试数据库连接
|
394
|
-
try:
|
395
|
-
with psycopg.connect(**config) as conn:
|
396
|
-
logger.info("Successfully connected to database")
|
397
|
-
except Exception as e:
|
398
|
-
logger.error(f"Failed to connect to database: {e}")
|
399
|
-
sys.exit(1)
|
400
|
-
|
401
|
-
# 使用 stdio 传输
|
402
|
-
async with stdio_server() as (read_stream, write_stream):
|
403
|
-
try:
|
404
|
-
logger.info("Running MCP server with stdio transport...")
|
405
|
-
await app.run(
|
406
|
-
read_stream=read_stream,
|
407
|
-
write_stream=write_stream,
|
408
|
-
initialization_options=app.create_initialization_options()
|
409
|
-
)
|
410
|
-
except Exception as e:
|
411
|
-
logger.error(f"Error running server: {str(e)}")
|
412
|
-
raise
|
413
|
-
except Exception as e:
|
414
|
-
logger.error(f"Server initialization error: {str(e)}")
|
415
|
-
raise
|
416
|
-
|
417
|
-
def run():
|
418
|
-
"""Run the MCP server."""
|
419
|
-
asyncio.run(main())
|
420
|
-
|
421
|
-
if __name__ == "__main__":
|
422
|
-
run()
|
@@ -1,128 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.4
|
2
|
-
Name: adbpg_mcp_server
|
3
|
-
Version: 1.0.4
|
4
|
-
Summary: ADBPG MCP Server
|
5
|
-
Home-page: https://github.com/aliyun/alibabacloud-adbpg-mcp-server
|
6
|
-
Author: Yutian Qiu
|
7
|
-
Author-email: qiuytian@gmail.com
|
8
|
-
Classifier: Programming Language :: Python :: 3
|
9
|
-
Classifier: License :: OSI Approved :: MIT License
|
10
|
-
Classifier: Operating System :: OS Independent
|
11
|
-
Requires-Python: >=3.10
|
12
|
-
Description-Content-Type: text/markdown
|
13
|
-
License-File: LICENSE
|
14
|
-
Requires-Dist: psycopg>=3.1.0
|
15
|
-
Requires-Dist: psycopg-binary>=3.1.0
|
16
|
-
Requires-Dist: mcp>=1.4.0
|
17
|
-
Requires-Dist: pydantic>=2.0.0
|
18
|
-
Requires-Dist: python-dotenv>=1.0.0
|
19
|
-
Dynamic: author
|
20
|
-
Dynamic: author-email
|
21
|
-
Dynamic: classifier
|
22
|
-
Dynamic: description
|
23
|
-
Dynamic: description-content-type
|
24
|
-
Dynamic: home-page
|
25
|
-
Dynamic: license-file
|
26
|
-
Dynamic: requires-dist
|
27
|
-
Dynamic: requires-python
|
28
|
-
Dynamic: summary
|
29
|
-
|
30
|
-
# AnalyticDB PostgreSQL MCP Server
|
31
|
-
|
32
|
-
AnalyticDB PostgreSQL MCP Server serves as a universal interface between AI Agents and AnalyticDB PostgreSQL databases. It enables seamless communication between AI Agents and AnalyticDB PostgreSQL, helping AI Agents retrieve database metadata and execute SQL operations.
|
33
|
-
|
34
|
-
## Configuration
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
#### Download
|
39
|
-
|
40
|
-
Download from Github
|
41
|
-
|
42
|
-
```shell
|
43
|
-
git clone https://github.com/aliyun/alibabacloud-adbpg-mcp-server.git
|
44
|
-
```
|
45
|
-
|
46
|
-
#### MCP Integration
|
47
|
-
|
48
|
-
Add the following configuration to the MCP client configuration file:
|
49
|
-
|
50
|
-
```json
|
51
|
-
"mcpServers": {
|
52
|
-
"adbpg-mcp-server": {
|
53
|
-
"command": "uv",
|
54
|
-
"args": [
|
55
|
-
"--directory",
|
56
|
-
"/path/to/adbpg-mcp-server",
|
57
|
-
"run",
|
58
|
-
"adbpg-mcp-server"
|
59
|
-
],
|
60
|
-
"env": {
|
61
|
-
"ADBPG_HOST": "host",
|
62
|
-
"ADBPG_PORT": "port",
|
63
|
-
"ADBPG_USER": "username",
|
64
|
-
"ADBPG_PASSWORD": "password",
|
65
|
-
"ADBPG_DATABASE": "database"
|
66
|
-
}
|
67
|
-
}
|
68
|
-
}
|
69
|
-
```
|
70
|
-
|
71
|
-
## Components
|
72
|
-
|
73
|
-
### Tools
|
74
|
-
|
75
|
-
* `execute_select_sql`: Execute SELECT SQL queries on the AnalyticDB PostgreSQL server
|
76
|
-
* `execute_dml_sql`: Execute DML (INSERT, UPDATE, DELETE) SQL queries on the AnalyticDB PostgreSQL server
|
77
|
-
* `execute_ddl_sql`: Execute DDL (CREATE, ALTER, DROP) SQL queries on the AnalyticDB PostgreSQL server
|
78
|
-
* `analyze_table`: Collect table statistics
|
79
|
-
* `explain_query`: Get query execution plan
|
80
|
-
|
81
|
-
### Resources
|
82
|
-
|
83
|
-
#### Built-in Resources
|
84
|
-
|
85
|
-
* `adbpg:///schemas`: Get all schemas in the database
|
86
|
-
|
87
|
-
#### Resource Templates
|
88
|
-
|
89
|
-
* `adbpg:///{schema}/tables`: List all tables in a specific schema
|
90
|
-
* `adbpg:///{schema}/{table}/ddl`: Get table DDL
|
91
|
-
* `adbpg:///{schema}/{table}/statistics`: Show table statistics
|
92
|
-
|
93
|
-
## Environment Variables
|
94
|
-
|
95
|
-
MCP Server requires the following environment variables to connect to AnalyticDB PostgreSQL instance:
|
96
|
-
|
97
|
-
- `ADBPG_HOST`: Database host address
|
98
|
-
- `ADBPG_PORT`: Database port
|
99
|
-
- `ADBPG_USER`: Database username
|
100
|
-
- `ADBPG_PASSWORD`: Database password
|
101
|
-
- `ADBPG_DATABASE`: Database name
|
102
|
-
|
103
|
-
## Dependencies
|
104
|
-
|
105
|
-
- Python 3.10 or higher
|
106
|
-
- Required packages:
|
107
|
-
- mcp >= 1.4.0
|
108
|
-
- psycopg >= 3.1.0
|
109
|
-
- python-dotenv >= 1.0.0
|
110
|
-
- pydantic >= 2.0.0
|
111
|
-
|
112
|
-
## Running
|
113
|
-
|
114
|
-
```bash
|
115
|
-
# Create and activate virtual environment
|
116
|
-
uv venv .venv
|
117
|
-
source .venv/bin/activate # Linux/Mac
|
118
|
-
# or
|
119
|
-
.venv\Scripts\activate # Windows
|
120
|
-
|
121
|
-
# Install dependencies
|
122
|
-
uv pip install -e .
|
123
|
-
|
124
|
-
# Run server
|
125
|
-
uv run adbpg-mcp-server
|
126
|
-
```
|
127
|
-
|
128
|
-
|
@@ -1,8 +0,0 @@
|
|
1
|
-
adbpg_mcp_server/__init__.py,sha256=7emeTRXWecoiR32c5Jr_KFnO94NAlTP5FOJOHRfgMjo,82
|
2
|
-
adbpg_mcp_server/adbpg_mcp_server.py,sha256=PP42ZblxSY68dUUpElywnoJ-2DHfoVrxChH9GeTnGnU,15246
|
3
|
-
adbpg_mcp_server-1.0.4.dist-info/licenses/LICENSE,sha256=KfhlNIQJu0to_CCmMo4whKiv23yAI_yKJSufqkJN1nw,1073
|
4
|
-
adbpg_mcp_server-1.0.4.dist-info/METADATA,sha256=ROtUqARP6-LLfMy0lv7MTkxnI_O5EEAvZohdy18TWD0,3144
|
5
|
-
adbpg_mcp_server-1.0.4.dist-info/WHEEL,sha256=ooBFpIzZCPdw3uqIQsOo4qqbA4ZRPxHnOH7peeONza0,91
|
6
|
-
adbpg_mcp_server-1.0.4.dist-info/entry_points.txt,sha256=hAsJBdSxCnAe8Y1D8O_vf1PclV_HDbVXAn3TZ6fSNmc,75
|
7
|
-
adbpg_mcp_server-1.0.4.dist-info/top_level.txt,sha256=krAI593mc5XIlEZEMGe8qBvn-uqkJtMAr2tBld6ea3M,17
|
8
|
-
adbpg_mcp_server-1.0.4.dist-info/RECORD,,
|
@@ -1,19 +0,0 @@
|
|
1
|
-
Copyright (c) 2025 The Python Packaging Authority
|
2
|
-
|
3
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
4
|
-
of this software and associated documentation files (the "Software"), to deal
|
5
|
-
in the Software without restriction, including without limitation the rights
|
6
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
7
|
-
copies of the Software, and to permit persons to whom the Software is
|
8
|
-
furnished to do so, subject to the following conditions:
|
9
|
-
|
10
|
-
The above copyright notice and this permission notice shall be included in all
|
11
|
-
copies or substantial portions of the Software.
|
12
|
-
|
13
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
14
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
15
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
16
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
17
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
18
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
19
|
-
SOFTWARE.
|
@@ -1 +0,0 @@
|
|
1
|
-
adbpg_mcp_server
|