adbpg-mcp-server 1.0.4__py3-none-any.whl → 1.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
adbpg_mcp_server.py ADDED
@@ -0,0 +1,1143 @@
1
+ import asyncio
2
+ import logging
3
+ import os
4
+ import sys
5
+ import json
6
+ import psycopg
7
+ import re
8
+ import ast
9
+ from psycopg import OperationalError as Error
10
+ from psycopg import Connection
11
+ from mcp.server import Server
12
+ from mcp.types import Resource, Tool, TextContent, ResourceTemplate
13
+ from pydantic import AnyUrl
14
+ from dotenv import load_dotenv
15
+ from mcp.server.stdio import stdio_server
16
+
17
+ # 配置日志,输出到标准错误
18
+ logging.basicConfig(
19
+ level=logging.DEBUG,
20
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
21
+ stream=sys.stderr
22
+ )
23
+ logger = logging.getLogger("adbpg-mcp-server")
24
+
25
+ # 检查环境变量Flag
26
+ GRAPHRAG_ENV_IS_READY = True
27
+ LLMEMORY_ENV_IS_READY = True
28
+ # 加载环境变量
29
+ try:
30
+ load_dotenv()
31
+ logger.info("Environment variables loaded")
32
+
33
+ # 检查必要的环境变量
34
+ required_vars = ["ADBPG_HOST", "ADBPG_PORT", "ADBPG_USER", "ADBPG_PASSWORD", "ADBPG_DATABASE"]
35
+ missing_vars = [var for var in required_vars if not os.getenv(var)]
36
+ if missing_vars:
37
+ error_msg = f"Missing required environment variables: {', '.join(missing_vars)}"
38
+ logger.error(error_msg)
39
+ raise ValueError(error_msg)
40
+ logger.info("All ADBPG required environment variables are set")
41
+
42
+ # 检查graphrag/llmemory 环境变量
43
+ required_graphrag_vars = [
44
+ "GRAPHRAG_LLM_MODEL",
45
+ "GRAPHRAG_API_KEY",
46
+ "GRAPHRAG_BASE_URL",
47
+ "GRAPHRAG_EMBEDDING_MODEL",
48
+ "GRAPHRAG_EMBEDDING_BASE_URL",
49
+ "GRAPHRAG_EMBEDDING_API_KEY"
50
+ ]
51
+ missing_graphrag_vars = [var for var in required_graphrag_vars if not os.getenv(var)]
52
+ if missing_graphrag_vars:
53
+ GRAPHRAG_ENV_IS_READY = False
54
+ error_msg = f"Missing required graphrag environment variables:{', '.join(missing_graphrag_vars)}"
55
+ logger.error(error_msg)
56
+ else:
57
+ logger.info("All graphRAG required environment variables are set")
58
+
59
+ required_llmemory_vars = ["LLMEMORY_LLM_MODEL", "LLMEMORY_API_KEY", "LLMEMORY_BASE_URL", "LLMEMORY_EMBEDDING_MODEL"]
60
+ missing_llmemory_vars = [var for var in required_llmemory_vars if not os.getenv(var)]
61
+ if missing_llmemory_vars:
62
+ LLMEMORY_ENV_IS_READY = False
63
+ error_msg = f"Missing required llm memory environment variables:{', '.join(missing_llmemory_vars)}"
64
+ logger.error(error_msg)
65
+ else:
66
+ logger.info("All llm memory required environment variables are set")
67
+
68
+
69
+ except Exception as e:
70
+ logger.error(f"Error loading environment variables: {e}")
71
+ sys.exit(1)
72
+
73
+ SERVER_VERSION = "0.1.0"
74
+
75
+
76
+ # 获得 graphrag 初始化配置
77
+ def get_graphrag_config():
78
+ graphrag_config = {
79
+ "llm_model": os.getenv("GRAPHRAG_LLM_MODEL"),
80
+ "llm_api_key": os.getenv("GRAPHRAG_API_KEY"),
81
+ "llm_url": os.getenv("GRAPHRAG_BASE_URL"),
82
+ "embedding_model": os.getenv("GRAPHRAG_EMBEDDING_MODEL"),
83
+ "embedding_api_key": os.getenv("GRAPHRAG_EMBEDDING_API_KEY"),
84
+ "embedding_url": os.getenv("GRAPHRAG_EMBEDDING_BASE_URL"),
85
+ "language": os.getenv("GRAPHRAG_LANGUAGE", "English"),
86
+ "entity_types": os.getenv("GRAPHRAG_ENTITY_TYPES"),
87
+ "relationship_types": os.getenv("GRAPHRAG_RELATIONSHIP_TYPES"),
88
+ "postgres_password": os.getenv("ADBPG_PASSWORD")
89
+ }
90
+ return graphrag_config
91
+
92
+ # 获得llmemory 初始化配置
93
+ def get_llmemory_config():
94
+ llm_memory_config = {
95
+ "llm": {
96
+ "provider": "openai",
97
+ "config": {
98
+ "model": os.getenv("LLMEMORY_LLM_MODEL"),
99
+ "openai_base_url": os.getenv("LLMEMORY_BASE_URL"),
100
+ "api_key": os.getenv("LLMEMORY_API_KEY")
101
+ }
102
+ },
103
+ "embedder": {
104
+ "provider": "openai",
105
+ "config": {
106
+ "model": os.getenv("LLMEMORY_EMBEDDING_MODEL"),
107
+ "embedding_dims": os.getenv("LLMEMORY_EMBEDDING_DIMS", 1024),
108
+ "api_key": os.getenv("LLMEMORY_API_KEY"),
109
+ "openai_base_url": os.getenv("LLMEMORY_BASE_URL")
110
+ }
111
+ },
112
+ "vector_store": {
113
+ "provider": "adbpg",
114
+ "config": {
115
+ "user": os.getenv("ADBPG_USER"),
116
+ "password": os.getenv("ADBPG_PASSWORD"),
117
+ "dbname": os.getenv("ADBPG_DATABASE"),
118
+ "hnsw": "True",
119
+ "embedding_model_dims": os.getenv("LLMEMORY_EMBEDDING_DIMS", 1024)
120
+ }
121
+ }
122
+ }
123
+ return llm_memory_config
124
+
125
+ def get_db_config():
126
+ """从环境变量获取数据库配置信息"""
127
+ try:
128
+ config = {
129
+ "host": os.getenv("ADBPG_HOST", "localhost"),
130
+ "port": os.getenv("ADBPG_PORT"),
131
+ "user": os.getenv("ADBPG_USER"),
132
+ "password": os.getenv("ADBPG_PASSWORD"),
133
+ "dbname": os.getenv("ADBPG_DATABASE"),
134
+ "application_name": f"adbpg-mcp-server-{SERVER_VERSION}"
135
+ }
136
+
137
+ # 记录配置信息(不包含密码)
138
+ logger.info(f"Database config: host={config['host']}, port={config['port']}, user={config['user']}, dbname={config['dbname']}")
139
+ return config
140
+ except Exception as e:
141
+ logger.error(f"Error getting database config: {str(e)}")
142
+ raise
143
+
144
+ # 全局graphrag长连接 并 初始化
145
+ GRAPHRAG_CONN: Connection | None = None
146
+ def get_graphrag_tool_connection() -> Connection:
147
+ global GRAPHRAG_CONN
148
+ global GRAPHRAG_ENV_IS_READY
149
+ config = get_db_config()
150
+ # 如果未连接,或者连接失效 重新连接
151
+ if GRAPHRAG_CONN is None or GRAPHRAG_CONN.closed:
152
+ GRAPHRAG_CONN = psycopg.connect(**config)
153
+ GRAPHRAG_CONN.autocommit = True
154
+ try:
155
+ graphrag_conn = GRAPHRAG_CONN
156
+ with graphrag_conn.cursor() as cursor:
157
+ cursor.execute("SELECT adbpg_graphrag.initialize(%s::json);", (graphrag_config_str,))
158
+ logger.info(f"[GraphRAG] Use the connection {id(graphrag_conn)} when executing the graphrag init")
159
+ logger.info("Successfully initialize the graphrag server\n")
160
+ except Exception as e:
161
+ GRAPHRAG_ENV_IS_READY = False
162
+ logger.error(f"Failed to initialize the graphrag server: {e}")
163
+ # 重新执行初始化
164
+ else:
165
+ # 发送一个轻量级查询 检测 连接是否健康
166
+ try:
167
+ with GRAPHRAG_CONN.cursor() as cur:
168
+ cur.execute("SELECT 1;")
169
+ _ = cur.fetchone()
170
+ except Exception:
171
+ # 重连
172
+ GRAPHRAG_CONN.close()
173
+ GRAPHRAG_CONN = psycopg.connect(**config)
174
+ GRAPHRAG_CONN.autocommit = True
175
+ # 重新执行初始化
176
+ try:
177
+ graphrag_conn = GRAPHRAG_CONN
178
+ with graphrag_conn.cursor() as cursor:
179
+ cursor.execute("SELECT adbpg_graphrag.initialize(%s::json);", (graphrag_config_str,))
180
+ logger.info(f"[GraphRAG] Use the connection {id(graphrag_conn)} when executing the graphrag init")
181
+ logger.info("Successfully initialize the graphrag server\n")
182
+ except Exception as e:
183
+ GRAPHRAG_ENV_IS_READY = False
184
+ logger.error(f"Failed to initialize the graphrag server: {e}")
185
+
186
+ return GRAPHRAG_CONN
187
+
188
+ LLM_MEMORY_CONN: Connection | None = None
189
+ def get_llm_memory_tool_connection() -> Connection:
190
+ global LLMEMORY_ENV_IS_READY
191
+ global LLM_MEMORY_CONN
192
+ config = get_db_config()
193
+ # 如果未连接,或者连接失效 重新连接
194
+ if LLM_MEMORY_CONN is None or LLM_MEMORY_CONN.closed:
195
+ LLM_MEMORY_CONN = psycopg.connect(**config)
196
+ LLM_MEMORY_CONN.autocommit = True
197
+ try:
198
+ llm_memory_conn = LLM_MEMORY_CONN
199
+ with llm_memory_conn.cursor() as cursor:
200
+ cursor.execute("SELECT adbpg_llm_memory.config(%s::json)", (llm_memory_config_str,))
201
+ logger.info(f"[LLM Memory] Use the connection {id(llm_memory_conn)} when executing the llm_memory init")
202
+ logger.info("Successfully initialize the llm server\n")
203
+ except Exception as e:
204
+ LLMEMORY_ENV_IS_READY = False
205
+ logger.error(f"Failed to initialize the llm_memory server: {e}")
206
+ else:
207
+ # 发送一个轻量级查询 检测 连接是否健康
208
+ try:
209
+ with LLM_MEMORY_CONN.cursor() as cur:
210
+ cur.execute("SELECT 1;")
211
+ _ = cur.fetchone()
212
+ except Exception:
213
+ # 重连
214
+ LLM_MEMORY_CONN.close()
215
+ LLM_MEMORY_CONN = psycopg.connect(**config)
216
+ LLM_MEMORY_CONN.autocommit = True
217
+ try:
218
+ llm_memory_conn = LLM_MEMORY_CONN
219
+ with llm_memory_conn.cursor() as cursor:
220
+ cursor.execute("SELECT adbpg_llm_memory.config(%s::json)", (llm_memory_config_str,))
221
+ logger.info(f"[LLM Memory] Use the connection {id(llm_memory_conn)} when executing the llm_memory init")
222
+ logger.info("Successfully initialize the llm server\n")
223
+ except Exception as e:
224
+ LLMEMORY_ENV_IS_READY = False
225
+ logger.error(f"Failed to initialize the llm_memory server: {e}")
226
+
227
+ return LLM_MEMORY_CONN
228
+
229
+ #### 初始化
230
+ if GRAPHRAG_ENV_IS_READY == True:
231
+ # 初始化 graphrag
232
+ logger.info("Starting graphRAG server...")
233
+ graphrag_config = get_graphrag_config()
234
+ graphrag_config_str = json.dumps(graphrag_config)
235
+ # 建立长连接 并 初始化
236
+ get_graphrag_tool_connection()
237
+ if LLMEMORY_ENV_IS_READY == True:
238
+ # 初始化 llmemory
239
+ logger.info("Starting llmemory server...")
240
+ llm_memory_config = get_llmemory_config()
241
+ llm_memory_config_str = json.dumps(llm_memory_config)
242
+ # 建立长连接 并 初始化
243
+ get_llm_memory_tool_connection()
244
+
245
+ # 初始化服务器
246
+ try:
247
+ app = Server("adbpg-mcp-server")
248
+ logger.info("MCP server initialized")
249
+ except Exception as e:
250
+ logger.error(f"Error initializing MCP server: {e}")
251
+ sys.exit(1)
252
+
253
+ @app.list_resources()
254
+ async def list_resources() -> list[Resource]:
255
+ """列出可用的基本资源"""
256
+ try:
257
+ return [
258
+ Resource(
259
+ uri="adbpg:///schemas",
260
+ name="All Schemas",
261
+ description="AnalyticDB PostgreSQL schemas. List all schemas in the database",
262
+ mimeType="text/plain"
263
+ )
264
+ ]
265
+ except Exception as e:
266
+ logger.error(f"Error listing resources: {str(e)}")
267
+ raise
268
+
269
+ @app.list_resource_templates()
270
+ async def list_resource_templates() -> list[ResourceTemplate]:
271
+ """
272
+ 定义动态资源模板
273
+
274
+ 返回:
275
+ list[ResourceTemplate]: 资源模板列表
276
+ 包含以下模板:
277
+ - 列出schema中的表
278
+ - 获取表DDL
279
+ - 获取表统计信息
280
+ """
281
+ return [
282
+ ResourceTemplate(
283
+ uriTemplate="adbpg:///{schema}/tables", # 表列表模板
284
+ name="Schema Tables",
285
+ description="List all tables in a specific schema",
286
+ mimeType="text/plain"
287
+ ),
288
+ ResourceTemplate(
289
+ uriTemplate="adbpg:///{schema}/{table}/ddl", # 表DDL模板
290
+ name="Table DDL",
291
+ description="Get the DDL script of a table in a specific schema",
292
+ mimeType="text/plain"
293
+ ),
294
+ ResourceTemplate(
295
+ uriTemplate="adbpg:///{schema}/{table}/statistics", # 表统计信息模板
296
+ name="Table Statistics",
297
+ description="Get statistics information of a table",
298
+ mimeType="text/plain"
299
+ )
300
+ ]
301
+
302
+ @app.read_resource()
303
+ async def read_resource(uri: AnyUrl) -> str:
304
+ """
305
+ 读取资源内容
306
+
307
+ 参数:
308
+ uri (AnyUrl): 资源URI
309
+
310
+ 返回:
311
+ str: 资源内容
312
+
313
+ 支持的URI格式:
314
+ - adbpg:///schemas: 列出所有schema
315
+ - adbpg:///{schema}/tables: 列出指定schema中的表
316
+ - adbpg:///{schema}/{table}/ddl: 获取表的DDL
317
+ - adbpg:///{schema}/{table}/statistics: 获取表的统计信息
318
+ """
319
+ config = get_db_config()
320
+ uri_str = str(uri)
321
+
322
+ if not uri_str.startswith("adbpg:///"):
323
+ raise ValueError(f"Invalid URI scheme: {uri_str}")
324
+
325
+ try:
326
+ with psycopg.connect(**config) as conn: # 建立数据库连接
327
+ conn.autocommit = True # 设置自动提交
328
+ with conn.cursor() as cursor: # 创建游标
329
+ path_parts = uri_str[9:].split('/') # 解析URI路径
330
+
331
+ if path_parts[0] == "schemas":
332
+ # 列出所有schema
333
+ query = """
334
+ SELECT schema_name
335
+ FROM information_schema.schemata
336
+ WHERE schema_name NOT IN ('pg_catalog', 'information_schema')
337
+ ORDER BY schema_name;
338
+ """
339
+ cursor.execute(query)
340
+ schemas = cursor.fetchall()
341
+ return "\n".join([schema[0] for schema in schemas])
342
+
343
+ elif len(path_parts) == 2 and path_parts[1] == "tables":
344
+ # 列出指定schema中的表
345
+ schema = path_parts[0]
346
+ query = f"""
347
+ SELECT table_name, table_type
348
+ FROM information_schema.tables
349
+ WHERE table_schema = %s
350
+ ORDER BY table_name;
351
+ """
352
+ cursor.execute(query, (schema,))
353
+ tables = cursor.fetchall()
354
+ return "\n".join([f"{table[0]} ({table[1]})" for table in tables])
355
+
356
+ elif len(path_parts) == 3 and path_parts[2] == "ddl":
357
+ # 获取表的DDL
358
+ schema = path_parts[0]
359
+ table = path_parts[1]
360
+ query = f"""
361
+ SELECT pg_get_ddl('{schema}.{table}'::regclass);
362
+ """
363
+ cursor.execute(query)
364
+ ddl = cursor.fetchone()
365
+ return ddl[0] if ddl else f"No DDL found for {schema}.{table}"
366
+
367
+ elif len(path_parts) == 3 and path_parts[2] == "statistics":
368
+ # 获取表的统计信息
369
+ schema = path_parts[0]
370
+ table = path_parts[1]
371
+ query = """
372
+ SELECT
373
+ schemaname,
374
+ tablename,
375
+ attname,
376
+ null_frac,
377
+ avg_width,
378
+ n_distinct,
379
+ most_common_vals,
380
+ most_common_freqs
381
+ FROM pg_stats
382
+ WHERE schemaname = %s AND tablename = %s
383
+ ORDER BY attname;
384
+ """
385
+ cursor.execute(query, (schema, table))
386
+ rows = cursor.fetchall()
387
+ if not rows:
388
+ return f"No statistics found for {schema}.{table}"
389
+
390
+ result = []
391
+ for row in rows:
392
+ result.append(f"Column: {row[2]}")
393
+ result.append(f" Null fraction: {row[3]}")
394
+ result.append(f" Average width: {row[4]}")
395
+ result.append(f" Distinct values: {row[5]}")
396
+ if row[6]:
397
+ result.append(f" Most common values: {row[6]}")
398
+ result.append(f" Most common frequencies: {row[7]}")
399
+ result.append("")
400
+ return "\n".join(result)
401
+
402
+ raise ValueError(f"Invalid resource URI format: {uri_str}")
403
+
404
+ except Error as e:
405
+ raise RuntimeError(f"Database error: {str(e)}")
406
+
407
+ @app.list_tools()
408
+ async def list_tools() -> list[Tool]:
409
+ """
410
+ 列出可用的工具
411
+
412
+ 返回:
413
+ list[Tool]: 工具列表
414
+ 包含以下工具:
415
+ - execute_select_sql: 执行SELECT查询
416
+ - execute_dml_sql: 执行DML操作
417
+ - execute_ddl_sql: 执行DDL操作
418
+ - analyze_table: 分析表统计信息
419
+ - explain_query: 获取查询执行计划
420
+
421
+ - adbpg_graphrag_upload: 执行 graphRAG upload 操作,上传文本
422
+ - adbpg_graphrag_query: 执行 graphRAG query 操作
423
+ - adbpg_graphrag_upload_decision_tree: 上传一个决策树
424
+ - adbpg_graphrag_append_decision_tree: 在某个节点上新增子树
425
+ - adbpg_graphrag_delete_decision_tree: 根据节点id删除起下层子树
426
+
427
+ - adbpg_llm_memory_add: 执行新增记忆操作
428
+ - adbpg_llm_memory_get_all: 获取所有记忆
429
+ - adbpg_llm_memory_search: 根据查询检索记忆
430
+ - adbpg_llm_memory_delete_all: 删除所有记忆
431
+ """
432
+ return [
433
+ Tool(
434
+ name="execute_select_sql",
435
+ description="Execute SELECT SQL to query data from ADBPG database.",
436
+ inputSchema={
437
+ "type": "object",
438
+ "properties": {
439
+ "query": {
440
+ "type": "string",
441
+ "description": "The (SELECT) SQL query to execute"
442
+ }
443
+ },
444
+ "required": ["query"]
445
+ }
446
+ ),
447
+ Tool(
448
+ name="execute_dml_sql",
449
+ description="Execute (INSERT, UPDATE, DELETE) SQL to modify data in ADBPG database.",
450
+ inputSchema={
451
+ "type": "object",
452
+ "properties": {
453
+ "query": {
454
+ "type": "string",
455
+ "description": "The DML SQL query to execute"
456
+ }
457
+ },
458
+ "required": ["query"]
459
+ }
460
+ ),
461
+ Tool(
462
+ name="execute_ddl_sql",
463
+ description="Execute (CREATE, ALTER, DROP) SQL statements to manage database objects.",
464
+ inputSchema={
465
+ "type": "object",
466
+ "properties": {
467
+ "query": {
468
+ "type": "string",
469
+ "description": "The DDL SQL query to execute"
470
+ }
471
+ },
472
+ "required": ["query"]
473
+ }
474
+ ),
475
+ Tool(
476
+ name="analyze_table",
477
+ description="Execute ANALYZE command to collect table statistics.",
478
+ inputSchema={
479
+ "type": "object",
480
+ "properties": {
481
+ "schema": {
482
+ "type": "string",
483
+ "description": "Schema name"
484
+ },
485
+ "table": {
486
+ "type": "string",
487
+ "description": "Table name"
488
+ }
489
+ },
490
+ "required": ["schema", "table"]
491
+ }
492
+ ),
493
+ Tool(
494
+ name="explain_query",
495
+ description="Get query execution plan.",
496
+ inputSchema={
497
+ "type": "object",
498
+ "properties": {
499
+ "query": {
500
+ "type": "string",
501
+ "description": "The SQL query to analyze"
502
+ }
503
+ },
504
+ "required": ["query"]
505
+ }
506
+ ),
507
+
508
+ #### graphrag & llm_memory tool list
509
+ Tool(
510
+ name = "adbpg_graphrag_upload",
511
+ description = "Execute graphrag upload operation",
512
+ # 参数:filename text, context text
513
+ # filename 表示文件名称, context 表示文件内容
514
+ inputSchema = {
515
+ "type": "object",
516
+ "properties": {
517
+ "filename": {
518
+ "type": "string",
519
+ "description": "The file name need to upload"
520
+ },
521
+ "context": {
522
+ "type": "string",
523
+ "description": "the context of your file"
524
+ }
525
+ },
526
+ "required": ["filename", "context"]
527
+ }
528
+ ),
529
+ Tool(
530
+ name = "adbpg_graphrag_query",
531
+ description = "Execute graphrag query operation",
532
+ # 参数:query_str text, [query_mode text]
533
+ # query_str 是询问的问题,query_mode 选择查询模式
534
+ inputSchema = {
535
+ "type": "object",
536
+ "properties": {
537
+ "query_str": {
538
+ "type": "string",
539
+ "description": "The query you want to ask"
540
+ },
541
+ "query_mode": {
542
+ "type": "string",
543
+ "description": "The query mode you need to choose [ bypass,naive, local, global, hybrid, mix[default], tree ]."
544
+ },
545
+ "start_search_node_id": {
546
+ "type": "string",
547
+ "description": "If using 'tree' query mode, set the start node ID of tree."
548
+ }
549
+ },
550
+ "required": ["query_str"]
551
+ }
552
+ ),
553
+ Tool(
554
+ name = "adbpg_graphrag_upload_decision_tree",
555
+ description = " Upload a decision tree with the specified root_node. If the root_node does not exist, a new decision tree will be created. ",
556
+ # context text, root_node text
557
+ inputSchema = {
558
+ "type": "object",
559
+ "properties": {
560
+ "root_node": {
561
+ "type": "string",
562
+ "description": "the root_noot (optional)"
563
+ },
564
+ "context": {
565
+ "type": "string",
566
+ "description": "the context of decision"
567
+ }
568
+ },
569
+ "required": ["context"]
570
+ }
571
+ ),
572
+ Tool(
573
+ name = "adbpg_graphrag_append_decision_tree",
574
+ description = "Append a subtree to an existing decision tree at the node specified by root_node_id. ",
575
+ # para: context text, root_node_id text
576
+ inputSchema = {
577
+ "type": "object",
578
+ "properties": {
579
+ "root_node_id": {
580
+ "type": "string",
581
+ "description": "the root_noot_id"
582
+ },
583
+ "context": {
584
+ "type": "string",
585
+ "description": "the context of decision"
586
+ }
587
+ },
588
+ "required": ["context", "root_node_id"]
589
+ }
590
+ ),
591
+ Tool(
592
+ name = "adbpg_graphrag_delete_decision_tree",
593
+ description = " Delete a sub-decision tree under the node specified by root_node_entity. ",
594
+ # para: root_node_entity text
595
+ inputSchema = {
596
+ "type": "object",
597
+ "properties": {
598
+ "root_node_entity": {
599
+ "type": "string",
600
+ "description": "the root_noot_entity"
601
+
602
+ }
603
+ },
604
+ "required": ["root_node_entity"]
605
+ }
606
+ ),
607
+ Tool(
608
+ name = "adbpg_graphrag_reset_tree_query",
609
+ description = " Reset the decision tree in the tree query mode",
610
+ # para:
611
+ inputSchema = {
612
+ "type": "object",
613
+ "required": []
614
+ }
615
+ ),
616
+ Tool(
617
+ name = "adbpg_llm_memory_add",
618
+ description = "Execute llm_memory add operation",
619
+ # 参数:messages json, user_id text, run_id text, agent_id text, metadata json
620
+ # 增加新的记忆
621
+ inputSchema={
622
+ "type": "object",
623
+ "properties": {
624
+ "messages": {
625
+ "type": "array",
626
+ "items": {
627
+ "type": "object",
628
+ "properties": {
629
+ "role": {"type": "string"},
630
+ "content": {"type": "string"}
631
+ },
632
+ "required": ["role", "content"]
633
+ },
634
+ "description": "List of messages objects (e.g., conversation history)"
635
+ },
636
+ "user_id": {
637
+ "type": "string",
638
+ "description": "the user_id"
639
+ },
640
+ "run_id": {
641
+ "type": "string",
642
+ "description": "the run_id"
643
+ },
644
+ "agent_id": {
645
+ "type": "string",
646
+ "description": "the agent_id"
647
+ },
648
+ "metadata": {
649
+ "type": "object",
650
+ "description": "the metatdata json"
651
+ },
652
+ "memory_type": {
653
+ "type": "string",
654
+ "description": "the memory_type text"
655
+ },
656
+ "prompt": {
657
+ "type": "string",
658
+ "description": "the prompt"
659
+ }
660
+ },
661
+ "required": ["messages"]
662
+ }
663
+ ),
664
+ Tool(
665
+ name = "adbpg_llm_memory_get_all",
666
+ description = "Execute llm_memory get_all operation",
667
+ # 参数:user_id text, run_id text, agent_id text
668
+ # 获取某个用户或者某个agent的所有记忆
669
+ inputSchema={
670
+ "type": "object",
671
+ "properties": {
672
+ "user_id": {
673
+ "type": "string",
674
+ "description": "The user_id"
675
+ },
676
+ "run_id": {
677
+ "type": "string",
678
+ "description": "The run_id"
679
+ },
680
+ "agent_id": {
681
+ "type": "string",
682
+ "description": "The agent_id"
683
+ }
684
+ },
685
+ "required": []
686
+ }
687
+ ),
688
+ Tool(
689
+ name = "adbpg_llm_memory_search",
690
+ description = "Execute llm_memory search operation",
691
+ # 参数:query text, user_id text, run_id text, agent_id text, filter json
692
+ # 获取与给定 query 相关的记忆
693
+ inputSchema={
694
+ "type": "object",
695
+ "properties": {
696
+ "query": {
697
+ "type": "string",
698
+ "description": "llm_memory relevant query"
699
+ },
700
+ "user_id": {
701
+ "type": "string",
702
+ "description": "The search of user_id"
703
+ },
704
+ "run_id": {
705
+ "type": "string",
706
+ "description": "The search of run_id"
707
+ },
708
+ "agent_id": {
709
+ "type": "string",
710
+ "description": "The search of agent_id"
711
+ },
712
+ "filter": {
713
+ "type": "object",
714
+ "description": "The search of filter"
715
+ }
716
+ },
717
+ "required": ["query"]
718
+ }
719
+ )
720
+ ,
721
+ Tool(
722
+ name = "adbpg_llm_memory_delete_all",
723
+ description = "Execute llm_memory delete_all operation",
724
+ # 参数:user_id text, run_id text, agent_id text
725
+ # 删除某个用户或者agent的所有记忆
726
+ inputSchema={
727
+ "type": "object",
728
+ "properties": {
729
+ "user_id": {
730
+ "type": "string",
731
+ "description": "The user_id"
732
+ },
733
+ "run_id": {
734
+ "type": "string",
735
+ "description": "The run_id"
736
+ },
737
+ "agent_id": {
738
+ "type": "string",
739
+ "description": "The agent_id"
740
+ }
741
+ },
742
+ "required": []
743
+ }
744
+ )
745
+
746
+ ]
747
+
748
+ def get_graphrag_tool_result(wrapped_sql, params) -> list[TextContent]:
749
+ try:
750
+ conn = get_graphrag_tool_connection()
751
+ with conn.cursor() as cursor:
752
+ cursor.execute(wrapped_sql, params)
753
+ if cursor.description:
754
+ json_result = cursor.fetchone()[0]
755
+ return [TextContent(type="text", text=json_result)]
756
+ else:
757
+ return [TextContent(type="text", text="graphrag command executed successfully")]
758
+ except Exception as e:
759
+ return [TextContent(type="text", text=f"Error executing graphrag command: {str(e)}")]
760
+
761
+ def get_llm_memory_tool_result(wrapped_sql, params) -> list[TextContent]:
762
+ try:
763
+ conn = get_llm_memory_tool_connection()
764
+ with conn.cursor() as cursor:
765
+
766
+ cursor.execute(wrapped_sql, params)
767
+
768
+ if cursor.description:
769
+ json_result = cursor.fetchone()[0]
770
+ return [TextContent(type="text", text=json_result)]
771
+ else:
772
+ return [TextContent(type="text", text="llm_memory command executed successfully")]
773
+ except Exception as e:
774
+ return [TextContent(type="text", text=f"Error executing llm_memory command: {str(e)}")]
775
+
776
+
777
+ @app.call_tool()
778
+ async def call_tool(name: str, arguments: dict) -> list[TextContent]:
779
+ """
780
+ 执行工具操作
781
+
782
+ 参数:
783
+ name (str): 工具名称
784
+ arguments (dict): 工具参数
785
+
786
+ 返回:
787
+ list[TextContent]: 执行结果
788
+
789
+ 支持的工具:
790
+ - execute_select_sql: 执行SELECT查询
791
+ - execute_dml_sql: 执行DML操作
792
+ - execute_ddl_sql: 执行DDL操作
793
+ - analyze_table: 分析表统计信息
794
+ - explain_query: 获取查询执行计划
795
+
796
+ - adbpg_graphrag_upload: 执行 graphRAG upload 操作,上传文本
797
+ - adbpg_graphrag_query: 执行 graphRAG query 操作
798
+
799
+ - adbpg_llm_memory_add: 执行新增记忆操作
800
+ - adbpg_llm_memory_get_all: 获取所有记忆
801
+ - adbpg_llm_memory_search: 根据查询检索记忆
802
+ - adbpg_llm_memory_delete_all: 删除所有记忆
803
+ """
804
+ config = get_db_config()
805
+ global GRAPHRAG_ENV_IS_READY
806
+ # 根据工具名称处理不同的操作
807
+ if name == "execute_select_sql":
808
+ query = arguments.get("query")
809
+ if not query:
810
+ raise ValueError("Query is required")
811
+ if not query.strip().upper().startswith("SELECT"):
812
+ raise ValueError("Query must be a SELECT statement")
813
+ query = query.rstrip().rstrip(';')
814
+ query = f"""
815
+ SELECT json_agg(row_to_json(t))
816
+ FROM ({query}) AS t
817
+ """
818
+ elif name == "execute_dml_sql":
819
+ query = arguments.get("query")
820
+ if not query:
821
+ raise ValueError("Query is required")
822
+ if not any(query.strip().upper().startswith(keyword) for keyword in ["INSERT", "UPDATE", "DELETE"]):
823
+ raise ValueError("Query must be a DML statement (INSERT, UPDATE, DELETE)")
824
+ elif name == "execute_ddl_sql":
825
+ query = arguments.get("query")
826
+ if not query:
827
+ raise ValueError("Query is required")
828
+ if not any(query.strip().upper().startswith(keyword) for keyword in ["CREATE", "ALTER", "DROP", "TRUNCATE"]):
829
+ raise ValueError("Query must be a DDL statement (CREATE, ALTER, DROP)")
830
+ elif name == "analyze_table":
831
+ schema = arguments.get("schema")
832
+ table = arguments.get("table")
833
+ if not all([schema, table]):
834
+ raise ValueError("Schema and table are required")
835
+ query = f"ANALYZE {schema}.{table}"
836
+ elif name == "explain_query":
837
+ query = arguments.get("query")
838
+ if not query:
839
+ raise ValueError("Query is required")
840
+ query = f"EXPLAIN {query}"
841
+
842
+ # adbpg_graphrag tool
843
+ elif name == "adbpg_graphrag_upload":
844
+ # GraphRAG 服务初始化失败,工具不可用
845
+ if GRAPHRAG_ENV_IS_READY == False:
846
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
847
+ filename = arguments.get("filename")
848
+ context = arguments.get("context")
849
+ if not filename:
850
+ raise ValueError("Filename is required")
851
+ if not context:
852
+ raise ValueError("Context if required")
853
+ # 命令拼接
854
+ wrapped_sql = f"""
855
+ SELECT adbpg_graphrag.upload(%s::text, %s::text)
856
+ """
857
+ params = [filename, context]
858
+ return get_graphrag_tool_result(wrapped_sql, params)
859
+
860
+ elif name == "adbpg_graphrag_query":
861
+ # GraphRAG 服务初始化失败,工具不可用
862
+ if GRAPHRAG_ENV_IS_READY == False:
863
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
864
+ query_str = arguments.get("query_str")
865
+ query_mode = arguments.get("query_mode")
866
+ start_search_node_id = arguments.get("start_search_node_id")
867
+
868
+ if not query_str:
869
+ raise ValueError("Query is required")
870
+ if not query_mode:
871
+ # default mode
872
+ query_mode = "mix"
873
+ if not start_search_node_id:
874
+ start_search_node_id = None
875
+
876
+ # 命令拼接
877
+ wrapped_sql = f"""
878
+ SELECT adbpg_graphrag.query(%s::text, %s::text, %s::text)
879
+ """
880
+ params = [query_str, query_mode, start_search_node_id]
881
+ return get_graphrag_tool_result(wrapped_sql, params)
882
+
883
+ elif name == "adbpg_graphrag_reset_tree_query":
884
+ if GRAPHRAG_ENV_IS_READY == False:
885
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
886
+ wrapped_sql = f"""
887
+ SELECT adbpg_graphrag.reset_tree_query()
888
+ """
889
+ params = []
890
+ return get_graphrag_tool_result(wrapped_sql, params)
891
+
892
+ elif name == "adbpg_graphrag_upload_decision_tree":
893
+ if GRAPHRAG_ENV_IS_READY == False:
894
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
895
+ root_node = arguments.get("root_node")
896
+ context = arguments.get("context")
897
+ if not context:
898
+ raise ValueError("Decision Tree Context is required")
899
+ if not root_node:
900
+ root_node = None
901
+ wrapped_sql = f"""
902
+ SELECT adbpg_graphrag.upload_decision_tree(%s::text, %s::text)
903
+ """
904
+ params = [context, root_node]
905
+ return get_graphrag_tool_result(wrapped_sql, params)
906
+
907
+ elif name == "adbpg_graphrag_append_decision_tree":
908
+ if GRAPHRAG_ENV_IS_READY == False:
909
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
910
+ root_node = arguments.get("root_node_id")
911
+ context = arguments.get("context")
912
+ if not context:
913
+ raise ValueError("Decision Tree Context is required")
914
+ if not root_node:
915
+ raise ValueError("Root node id is required")
916
+ wrapped_sql = f"""
917
+ SELECT adbpg_graphrag.append_decision_tree(%s::text, %s::text)
918
+ """
919
+ params = [context, root_node]
920
+ return get_graphrag_tool_result(wrapped_sql, params)
921
+
922
+ elif name == "adbpg_graphrag_delete_decision_tree":
923
+ if GRAPHRAG_ENV_IS_READY == False:
924
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
925
+ root_node = arguments.get("root_node_entity")
926
+ if not root_node:
927
+ raise ValueError("Root node entity is required")
928
+ wrapped_sql = f"""
929
+ SELECT adbpg_graphrag.delete_decision_tree(%s::text, %s::text)
930
+ """
931
+ params = [root_node]
932
+ return get_graphrag_tool_result(wrapped_sql, params)
933
+
934
+ # adbpg_llm_memory tool
935
+ elif name == "adbpg_llm_memory_add":
936
+ # LLMEMORY 服务初始化失败,工具不可用
937
+ if LLMEMORY_ENV_IS_READY == False:
938
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
939
+
940
+ messages = arguments.get("messages")
941
+ if not messages:
942
+ raise ValueError("messages is required")
943
+ messages_str = json.dumps(messages, ensure_ascii = False)
944
+
945
+ user_id = arguments.get("user_id")
946
+ if not user_id:
947
+ user_id = None
948
+ run_id = arguments.get("run_id")
949
+ if not run_id:
950
+ run_id = None
951
+ agent_id = arguments.get("agent_id")
952
+ if not agent_id:
953
+ agent_id = None
954
+ if user_id == None and run_id == None and agent_id == None:
955
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
956
+
957
+ metadata = arguments.get("metadata")
958
+ metadata_str = None
959
+ if metadata:
960
+ metadata_str = json.dumps(metadata, ensure_ascii = False)
961
+
962
+ memory_type = arguments.get("memory_type")
963
+ memory_prompt = arguments.get("prompt")
964
+ if not memory_type:
965
+ memory_type = None
966
+ if not memory_prompt:
967
+ memory_prompt = None
968
+
969
+
970
+ wrapped_sql = """
971
+ SELECT adbpg_llm_memory.add(
972
+ %s::json,
973
+ %s::text,
974
+ %s::text,
975
+ %s::text,
976
+ %s::json,
977
+ %s::text,
978
+ %s::text
979
+ )
980
+ """
981
+ params = [messages_str, user_id, run_id, agent_id, metadata_str, memory_type, memory_prompt]
982
+ return get_llm_memory_tool_result(wrapped_sql, params)
983
+
984
+ elif name == "adbpg_llm_memory_get_all":
985
+ # LLMEMORY 服务初始化失败,工具不可用
986
+ if LLMEMORY_ENV_IS_READY == False:
987
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
988
+
989
+ user_id = arguments.get("user_id")
990
+ if not user_id:
991
+ user_id = None
992
+ run_id = arguments.get("run_id")
993
+ if not run_id:
994
+ run_id = None
995
+ agent_id = arguments.get("agent_id")
996
+ if not agent_id:
997
+ agent_id = None
998
+ if user_id == None and run_id == None and agent_id == None:
999
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
1000
+ wrapped_sql = f"""
1001
+ SELECT adbpg_llm_memory.get_all(
1002
+ %s::text,
1003
+ %s::text,
1004
+ %s::text
1005
+ )
1006
+ """
1007
+ params = [user_id, run_id, agent_id]
1008
+ return get_llm_memory_tool_result(wrapped_sql, params)
1009
+
1010
+
1011
+ elif name == "adbpg_llm_memory_search":
1012
+ # LLMEMORY 服务初始化失败,工具不可用
1013
+ if LLMEMORY_ENV_IS_READY == False:
1014
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
1015
+ query = arguments.get("query")
1016
+ if not query:
1017
+ raise ValueError("Query is required")
1018
+
1019
+ user_id = arguments.get("user_id")
1020
+ if not user_id:
1021
+ user_id = None
1022
+ run_id = arguments.get("run_id")
1023
+ if not run_id:
1024
+ run_id = None
1025
+ agent_id = arguments.get("agent_id")
1026
+ if not agent_id:
1027
+ agent_id = None
1028
+ if user_id == None and run_id == None and agent_id == None:
1029
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
1030
+
1031
+ filter_json = arguments.get("filter")
1032
+ filter_json_str = None
1033
+ # json格式载入
1034
+ if filter_json:
1035
+ filter_json_str = json.dumps(filter_json, ensure_ascii = False)
1036
+ # 拼接命令
1037
+ wrapped_sql = f"""
1038
+ SELECT adbpg_llm_memory.search(
1039
+ %s::text,
1040
+ %s::text,
1041
+ %s::text,
1042
+ %s::text,
1043
+ %s::json
1044
+ )
1045
+ """
1046
+ params = [query, user_id, run_id, agent_id, filter_json_str]
1047
+ return get_llm_memory_tool_result(wrapped_sql, params)
1048
+
1049
+ elif name == "adbpg_llm_memory_delete_all":
1050
+ # LLMEMORY 服务初始化失败,工具不可用
1051
+ if LLMEMORY_ENV_IS_READY == False:
1052
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
1053
+
1054
+ user_id = arguments.get("user_id")
1055
+ if not user_id:
1056
+ user_id = None
1057
+ run_id = arguments.get("run_id")
1058
+ if not run_id:
1059
+ run_id = None
1060
+ agent_id = arguments.get("agent_id")
1061
+ if not agent_id:
1062
+ agent_id = None
1063
+ if user_id == None and run_id == None and agent_id == None:
1064
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
1065
+
1066
+ wrapped_sql = f"""
1067
+ SELECT adbpg_llm_memory.delete_all(
1068
+ %s::text,
1069
+ %s::text,
1070
+ %s::text
1071
+ )
1072
+ """
1073
+ params = [user_id, run_id, agent_id]
1074
+ return get_llm_memory_tool_result(wrapped_sql, params)
1075
+
1076
+ else:
1077
+ raise ValueError(f"Unknown tool: {name}")
1078
+
1079
+ try:
1080
+ with psycopg.connect(**config) as conn:
1081
+ conn.autocommit = True
1082
+ with conn.cursor() as cursor:
1083
+
1084
+ cursor.execute(query)
1085
+
1086
+ if name == "analyze_table":
1087
+ return [TextContent(type="text", text=f"Successfully analyzed table {schema}.{table}")]
1088
+
1089
+ if cursor.description:
1090
+ # 将返回结果存储为json格式
1091
+ json_result = cursor.fetchone()[0]
1092
+ json_str = json.dumps(json_result, ensure_ascii = False, indent = 2)
1093
+ result = [TextContent(type="text", text=json_str)]
1094
+ try:
1095
+ json.loads(result[0].text)
1096
+ except json.JSONDecodeError as e:
1097
+ raise Exception(f"JSON decode error: {e}\nRaw text: {result[0].text}") from e
1098
+ return result
1099
+
1100
+ else:
1101
+ return [TextContent(type="text", text="Query executed successfully")]
1102
+ except Exception as e:
1103
+ return [TextContent(type="text", text=f"Error executing query: {str(e)}")]
1104
+
1105
+ async def main():
1106
+ """服务器主入口点"""
1107
+ try:
1108
+ config = get_db_config()
1109
+ logger.info("Starting ADBPG MCP server...")
1110
+
1111
+ # 测试数据库连接
1112
+ try:
1113
+ with psycopg.connect(**config) as conn:
1114
+ logger.info("Successfully connected to database")
1115
+ except Exception as e:
1116
+ logger.error(f"Failed to connect to database: {e}")
1117
+ sys.exit(1)
1118
+ # 使用 stdio 传输
1119
+ async with stdio_server() as (read_stream, write_stream):
1120
+ try:
1121
+ logger.info("Running MCP server with stdio transport...")
1122
+ await app.run(
1123
+ read_stream=read_stream,
1124
+ write_stream=write_stream,
1125
+ initialization_options=app.create_initialization_options()
1126
+ )
1127
+ except Exception as e:
1128
+ logger.error(f"Error running server: {str(e)}")
1129
+ raise
1130
+ except Exception as e:
1131
+ logger.error(f"Server initialization error: {str(e)}")
1132
+ raise
1133
+
1134
+ def run():
1135
+ """同步运行入口点"""
1136
+ try:
1137
+ asyncio.run(main())
1138
+ except Exception as e:
1139
+ logger.error(f"Fatal error: {e}")
1140
+ sys.exit(1)
1141
+
1142
+ if __name__ == "__main__":
1143
+ run()