adbpg-mcp-server 1.0.4__py3-none-any.whl → 1.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
adbpg_mcp_server.py ADDED
@@ -0,0 +1,1110 @@
1
+ import asyncio
2
+ import logging
3
+ import os
4
+ import sys
5
+ import json
6
+ import psycopg
7
+ import re
8
+ import ast
9
+ from psycopg import OperationalError as Error
10
+ from psycopg import Connection
11
+ from mcp.server import Server
12
+ from mcp.types import Resource, Tool, TextContent, ResourceTemplate
13
+ from pydantic import AnyUrl
14
+ from dotenv import load_dotenv
15
+ from mcp.server.stdio import stdio_server
16
+
17
+ # 配置日志,输出到标准错误
18
+ logging.basicConfig(
19
+ level=logging.DEBUG,
20
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
21
+ stream=sys.stderr
22
+ )
23
+ logger = logging.getLogger("adbpg-mcp-server")
24
+
25
+ # 检查环境变量Flag
26
+ GRAPHRAG_ENV_IS_READY = True
27
+ LLMEMORY_ENV_IS_READY = True
28
+ # 加载环境变量
29
+ try:
30
+ load_dotenv()
31
+ logger.info("Environment variables loaded")
32
+
33
+ # 检查必要的环境变量
34
+ required_vars = ["ADBPG_HOST", "ADBPG_PORT", "ADBPG_USER", "ADBPG_PASSWORD", "ADBPG_DATABASE"]
35
+ missing_vars = [var for var in required_vars if not os.getenv(var)]
36
+ if missing_vars:
37
+ error_msg = f"Missing required environment variables: {', '.join(missing_vars)}"
38
+ logger.error(error_msg)
39
+ raise ValueError(error_msg)
40
+ logger.info("All ADBPG required environment variables are set")
41
+
42
+ # 检查graphrag/llmemory 环境变量
43
+ required_graphrag_vars = [
44
+ "GRAPHRAG_LLM_MODEL",
45
+ "GRAPHRAG_API_KEY",
46
+ "GRAPHRAG_BASE_URL",
47
+ "GRAPHRAG_EMBEDDING_MODEL",
48
+ "GRAPHRAG_EMBEDDING_BASE_URL",
49
+ "GRAPHRAG_EMBEDDING_API_KEY"
50
+ ]
51
+ missing_graphrag_vars = [var for var in required_graphrag_vars if not os.getenv(var)]
52
+ if missing_graphrag_vars:
53
+ GRAPHRAG_ENV_IS_READY = False
54
+ error_msg = f"Missing required graphrag environment variables:{', '.join(missing_graphrag_vars)}"
55
+ logger.error(error_msg)
56
+ else:
57
+ logger.info("All graphRAG required environment variables are set")
58
+
59
+ required_llmemory_vars = ["LLMEMORY_LLM_MODEL", "LLMEMORY_API_KEY", "LLMEMORY_BASE_URL", "LLMEMORY_EMBEDDING_MODEL"]
60
+ missing_llmemory_vars = [var for var in required_llmemory_vars if not os.getenv(var)]
61
+ if missing_llmemory_vars:
62
+ LLMEMORY_ENV_IS_READY = False
63
+ error_msg = f"Missing required llm memory environment variables:{', '.join(missing_llmemory_vars)}"
64
+ logger.error(error_msg)
65
+ else:
66
+ logger.info("All llm memory required environment variables are set")
67
+
68
+
69
+ except Exception as e:
70
+ logger.error(f"Error loading environment variables: {e}")
71
+ sys.exit(1)
72
+
73
+ SERVER_VERSION = "0.1.0"
74
+
75
+
76
+ # 获得 graphrag 初始化配置
77
+ def get_graphrag_config():
78
+ graphrag_config = {
79
+ "llm_model": os.getenv("GRAPHRAG_LLM_MODEL"),
80
+ "llm_api_key": os.getenv("GRAPHRAG_API_KEY"),
81
+ "llm_url": os.getenv("GRAPHRAG_BASE_URL"),
82
+ "embedding_model": os.getenv("GRAPHRAG_EMBEDDING_MODEL"),
83
+ "embedding_api_key": os.getenv("GRAPHRAG_EMBEDDING_API_KEY"),
84
+ "embedding_url": os.getenv("GRAPHRAG_EMBEDDING_BASE_URL"),
85
+ "language": os.getenv("GRAPHRAG_LANGUAGE", "English"),
86
+ "entity_types": os.getenv("GRAPHRAG_ENTITY_TYPES"),
87
+ "relationship_types": os.getenv("GRAPHRAG_RELATIONSHIP_TYPES")
88
+ }
89
+ return graphrag_config
90
+
91
+ # 获得llmemory 初始化配置
92
+ def get_llmemory_config():
93
+ llm_memory_config = {
94
+ "llm": {
95
+ "provider": "openai",
96
+ "config": {
97
+ "model": os.getenv("LLMEMORY_LLM_MODEL"),
98
+ "openai_base_url": os.getenv("LLMEMORY_BASE_URL"),
99
+ "api_key": os.getenv("LLMEMORY_API_KEY")
100
+ }
101
+ },
102
+ "embedder": {
103
+ "provider": "openai",
104
+ "config": {
105
+ "model": os.getenv("LLMEMORY_EMBEDDING_MODEL"),
106
+ "embedding_dims": os.getenv("LLMEMORY_EMBEDDING_DIMS", 1024),
107
+ "api_key": os.getenv("LLMEMORY_API_KEY"),
108
+ "openai_base_url": os.getenv("LLMEMORY_BASE_URL")
109
+ }
110
+ },
111
+ "vector_store": {
112
+ "provider": "adbpg",
113
+ "config": {
114
+ "user": os.getenv("ADBPG_USER"),
115
+ "password": os.getenv("ADBPG_PASSWORD"),
116
+ "dbname": os.getenv("ADBPG_DATABASE"),
117
+ "hnsw": "True",
118
+ "embedding_model_dims": os.getenv("LLMEMORY_EMBEDDING_DIMS", 1024)
119
+ }
120
+ }
121
+ }
122
+ return llm_memory_config
123
+
124
+ def get_db_config():
125
+ """从环境变量获取数据库配置信息"""
126
+ try:
127
+ config = {
128
+ "host": os.getenv("ADBPG_HOST", "localhost"),
129
+ "port": os.getenv("ADBPG_PORT"),
130
+ "user": os.getenv("ADBPG_USER"),
131
+ "password": os.getenv("ADBPG_PASSWORD"),
132
+ "dbname": os.getenv("ADBPG_DATABASE"),
133
+ "application_name": f"adbpg-mcp-server-{SERVER_VERSION}"
134
+ }
135
+
136
+ # 记录配置信息(不包含密码)
137
+ logger.info(f"Database config: host={config['host']}, port={config['port']}, user={config['user']}, dbname={config['dbname']}")
138
+ return config
139
+ except Exception as e:
140
+ logger.error(f"Error getting database config: {str(e)}")
141
+ raise
142
+
143
+ # 全局graphrag长连接 并 初始化
144
+ GRAPHRAG_CONN: Connection | None = None
145
+ def get_graphrag_tool_connection() -> Connection:
146
+ global GRAPHRAG_CONN
147
+ global GRAPHRAG_ENV_IS_READY
148
+ config = get_db_config()
149
+ # 如果未连接,或者连接失效 重新连接
150
+ if GRAPHRAG_CONN is None or GRAPHRAG_CONN.closed:
151
+ GRAPHRAG_CONN = psycopg.connect(**config)
152
+ GRAPHRAG_CONN.autocommit = True
153
+ try:
154
+ graphrag_conn = GRAPHRAG_CONN
155
+ with graphrag_conn.cursor() as cursor:
156
+ cursor.execute("SELECT adbpg_graphrag.initialize(%s::json);", (graphrag_config_str,))
157
+ logger.info(f"[GraphRAG] Use the connection {id(graphrag_conn)} when executing the graphrag init")
158
+ logger.info("Successfully initialize the graphrag server\n")
159
+ except Exception as e:
160
+ GRAPHRAG_ENV_IS_READY = False
161
+ logger.error(f"Failed to initialize the graphrag server: {e}")
162
+ # 重新执行初始化
163
+ else:
164
+ # 发送一个轻量级查询 检测 连接是否健康
165
+ try:
166
+ with GRAPHRAG_CONN.cursor() as cur:
167
+ cur.execute("SELECT 1;")
168
+ _ = cur.fetchone()
169
+ except Exception:
170
+ # 重连
171
+ GRAPHRAG_CONN.close()
172
+ GRAPHRAG_CONN = psycopg.connect(**config)
173
+ GRAPHRAG_CONN.autocommit = True
174
+ # 重新执行初始化
175
+ try:
176
+ graphrag_conn = GRAPHRAG_CONN
177
+ with graphrag_conn.cursor() as cursor:
178
+ cursor.execute("SELECT adbpg_graphrag.initialize(%s::json);", (graphrag_config_str,))
179
+ logger.info(f"[GraphRAG] Use the connection {id(graphrag_conn)} when executing the graphrag init")
180
+ logger.info("Successfully initialize the graphrag server\n")
181
+ except Exception as e:
182
+ GRAPHRAG_ENV_IS_READY = False
183
+ logger.error(f"Failed to initialize the graphrag server: {e}")
184
+
185
+ return GRAPHRAG_CONN
186
+
187
+ LLM_MEMORY_CONN: Connection | None = None
188
+ def get_llm_memory_tool_connection() -> Connection:
189
+ global LLM_MEMORY_CONN
190
+ global LLMEMORY_ENV_IS_READY
191
+ config = get_db_config()
192
+ # 如果未连接,或者连接失效 重新连接
193
+ if LLM_MEMORY_CONN is None or LLM_MEMORY_CONN.closed:
194
+ LLM_MEMORY_CONN = psycopg.connect(**config)
195
+ LLM_MEMORY_CONN.autocommit = True
196
+ try:
197
+ llm_memory_conn = LLM_MEMORY_CONN
198
+ with llm_memory_conn.cursor() as cursor:
199
+ cursor.execute("SELECT adbpg_llm_memory.config(%s::json)", (llm_memory_config_str,))
200
+ logger.info(f"[LLM Memory] Use the connection {id(llm_memory_conn)} when executing the llm_memory init")
201
+ logger.info("Successfully initialize the llm server\n")
202
+ except Exception as e:
203
+ LLMEMORY_ENV_IS_READY = False
204
+ logger.error(f"Failed to initialize the llm_memory server: {e}")
205
+ else:
206
+ # 发送一个轻量级查询 检测 连接是否健康
207
+ try:
208
+ with LLM_MEMORY_CONN.cursor() as cur:
209
+ cur.execute("SELECT 1;")
210
+ _ = cur.fetchone()
211
+ except Exception:
212
+ # 重连
213
+ LLM_MEMORY_CONN.close()
214
+ LLM_MEMORY_CONN = psycopg.connect(**config)
215
+ LLM_MEMORY_CONN.autocommit = True
216
+ try:
217
+ llm_memory_conn = LLM_MEMORY_CONN
218
+ with llm_memory_conn.cursor() as cursor:
219
+ cursor.execute("SELECT adbpg_llm_memory.config(%s::json)", (llm_memory_config_str,))
220
+ logger.info(f"[LLM Memory] Use the connection {id(llm_memory_conn)} when executing the llm_memory init")
221
+ logger.info("Successfully initialize the llm server\n")
222
+ except Exception as e:
223
+ LLMEMORY_ENV_IS_READY = False
224
+ logger.error(f"Failed to initialize the llm_memory server: {e}")
225
+
226
+ return LLM_MEMORY_CONN
227
+
228
+ #### 初始化
229
+ if GRAPHRAG_ENV_IS_READY == True:
230
+ # 初始化 graphrag
231
+ logger.info("Starting graphRAG server...")
232
+ graphrag_config = get_graphrag_config()
233
+ graphrag_config_str = json.dumps(graphrag_config)
234
+ # 建立长连接 并 初始化
235
+ get_graphrag_tool_connection()
236
+ if LLMEMORY_ENV_IS_READY == True:
237
+ # 初始化 llmemory
238
+ logger.info("Starting llmemory server...")
239
+ llm_memory_config = get_llmemory_config()
240
+ llm_memory_config_str = json.dumps(llm_memory_config)
241
+ # 建立长连接 并 初始化
242
+ get_llm_memory_tool_connection()
243
+
244
+ # 初始化服务器
245
+ try:
246
+ app = Server("adbpg-mcp-server")
247
+ logger.info("MCP server initialized")
248
+ except Exception as e:
249
+ logger.error(f"Error initializing MCP server: {e}")
250
+ sys.exit(1)
251
+
252
+ @app.list_resources()
253
+ async def list_resources() -> list[Resource]:
254
+ """列出可用的基本资源"""
255
+ try:
256
+ return [
257
+ Resource(
258
+ uri="adbpg:///schemas",
259
+ name="All Schemas",
260
+ description="AnalyticDB PostgreSQL schemas. List all schemas in the database",
261
+ mimeType="text/plain"
262
+ )
263
+ ]
264
+ except Exception as e:
265
+ logger.error(f"Error listing resources: {str(e)}")
266
+ raise
267
+
268
+ @app.list_resource_templates()
269
+ async def list_resource_templates() -> list[ResourceTemplate]:
270
+ """
271
+ 定义动态资源模板
272
+
273
+ 返回:
274
+ list[ResourceTemplate]: 资源模板列表
275
+ 包含以下模板:
276
+ - 列出schema中的表
277
+ - 获取表DDL
278
+ - 获取表统计信息
279
+ """
280
+ return [
281
+ ResourceTemplate(
282
+ uriTemplate="adbpg:///{schema}/tables", # 表列表模板
283
+ name="Schema Tables",
284
+ description="List all tables in a specific schema",
285
+ mimeType="text/plain"
286
+ ),
287
+ ResourceTemplate(
288
+ uriTemplate="adbpg:///{schema}/{table}/ddl", # 表DDL模板
289
+ name="Table DDL",
290
+ description="Get the DDL script of a table in a specific schema",
291
+ mimeType="text/plain"
292
+ ),
293
+ ResourceTemplate(
294
+ uriTemplate="adbpg:///{schema}/{table}/statistics", # 表统计信息模板
295
+ name="Table Statistics",
296
+ description="Get statistics information of a table",
297
+ mimeType="text/plain"
298
+ )
299
+ ]
300
+
301
+ @app.read_resource()
302
+ async def read_resource(uri: AnyUrl) -> str:
303
+ """
304
+ 读取资源内容
305
+
306
+ 参数:
307
+ uri (AnyUrl): 资源URI
308
+
309
+ 返回:
310
+ str: 资源内容
311
+
312
+ 支持的URI格式:
313
+ - adbpg:///schemas: 列出所有schema
314
+ - adbpg:///{schema}/tables: 列出指定schema中的表
315
+ - adbpg:///{schema}/{table}/ddl: 获取表的DDL
316
+ - adbpg:///{schema}/{table}/statistics: 获取表的统计信息
317
+ """
318
+ config = get_db_config()
319
+ uri_str = str(uri)
320
+
321
+ if not uri_str.startswith("adbpg:///"):
322
+ raise ValueError(f"Invalid URI scheme: {uri_str}")
323
+
324
+ try:
325
+ with psycopg.connect(**config) as conn: # 建立数据库连接
326
+ conn.autocommit = True # 设置自动提交
327
+ with conn.cursor() as cursor: # 创建游标
328
+ path_parts = uri_str[9:].split('/') # 解析URI路径
329
+
330
+ if path_parts[0] == "schemas":
331
+ # 列出所有schema
332
+ query = """
333
+ SELECT schema_name
334
+ FROM information_schema.schemata
335
+ WHERE schema_name NOT IN ('pg_catalog', 'information_schema')
336
+ ORDER BY schema_name;
337
+ """
338
+ cursor.execute(query)
339
+ schemas = cursor.fetchall()
340
+ return "\n".join([schema[0] for schema in schemas])
341
+
342
+ elif len(path_parts) == 2 and path_parts[1] == "tables":
343
+ # 列出指定schema中的表
344
+ schema = path_parts[0]
345
+ query = f"""
346
+ SELECT table_name, table_type
347
+ FROM information_schema.tables
348
+ WHERE table_schema = %s
349
+ ORDER BY table_name;
350
+ """
351
+ cursor.execute(query, (schema,))
352
+ tables = cursor.fetchall()
353
+ return "\n".join([f"{table[0]} ({table[1]})" for table in tables])
354
+
355
+ elif len(path_parts) == 3 and path_parts[2] == "ddl":
356
+ # 获取表的DDL
357
+ schema = path_parts[0]
358
+ table = path_parts[1]
359
+ query = f"""
360
+ SELECT pg_get_ddl('{schema}.{table}'::regclass);
361
+ """
362
+ cursor.execute(query)
363
+ ddl = cursor.fetchone()
364
+ return ddl[0] if ddl else f"No DDL found for {schema}.{table}"
365
+
366
+ elif len(path_parts) == 3 and path_parts[2] == "statistics":
367
+ # 获取表的统计信息
368
+ schema = path_parts[0]
369
+ table = path_parts[1]
370
+ query = """
371
+ SELECT
372
+ schemaname,
373
+ tablename,
374
+ attname,
375
+ null_frac,
376
+ avg_width,
377
+ n_distinct,
378
+ most_common_vals,
379
+ most_common_freqs
380
+ FROM pg_stats
381
+ WHERE schemaname = %s AND tablename = %s
382
+ ORDER BY attname;
383
+ """
384
+ cursor.execute(query, (schema, table))
385
+ rows = cursor.fetchall()
386
+ if not rows:
387
+ return f"No statistics found for {schema}.{table}"
388
+
389
+ result = []
390
+ for row in rows:
391
+ result.append(f"Column: {row[2]}")
392
+ result.append(f" Null fraction: {row[3]}")
393
+ result.append(f" Average width: {row[4]}")
394
+ result.append(f" Distinct values: {row[5]}")
395
+ if row[6]:
396
+ result.append(f" Most common values: {row[6]}")
397
+ result.append(f" Most common frequencies: {row[7]}")
398
+ result.append("")
399
+ return "\n".join(result)
400
+
401
+ raise ValueError(f"Invalid resource URI format: {uri_str}")
402
+
403
+ except Error as e:
404
+ raise RuntimeError(f"Database error: {str(e)}")
405
+
406
+ @app.list_tools()
407
+ async def list_tools() -> list[Tool]:
408
+ """
409
+ 列出可用的工具
410
+
411
+ 返回:
412
+ list[Tool]: 工具列表
413
+ 包含以下工具:
414
+ - execute_select_sql: 执行SELECT查询
415
+ - execute_dml_sql: 执行DML操作
416
+ - execute_ddl_sql: 执行DDL操作
417
+ - analyze_table: 分析表统计信息
418
+ - explain_query: 获取查询执行计划
419
+
420
+ - adbpg_graphrag_upload: 执行 graphRAG upload 操作,上传文本
421
+ - adbpg_graphrag_query: 执行 graphRAG query 操作
422
+ - adbpg_graphrag_upload_decision_tree: 上传一个决策树
423
+ - adbpg_graphrag_append_decision_tree: 在某个节点上新增子树
424
+ - adbpg_graphrag_delete_decision_tree: 根据节点id删除起下层子树
425
+
426
+ - adbpg_llm_memory_add: 执行新增记忆操作
427
+ - adbpg_llm_memory_get_all: 获取所有记忆
428
+ - adbpg_llm_memory_search: 根据查询检索记忆
429
+ - adbpg_llm_memory_delete_all: 删除所有记忆
430
+ """
431
+ return [
432
+ Tool(
433
+ name="execute_select_sql",
434
+ description="Execute SELECT SQL to query data from ADBPG database.",
435
+ inputSchema={
436
+ "type": "object",
437
+ "properties": {
438
+ "query": {
439
+ "type": "string",
440
+ "description": "The (SELECT) SQL query to execute"
441
+ }
442
+ },
443
+ "required": ["query"]
444
+ }
445
+ ),
446
+ Tool(
447
+ name="execute_dml_sql",
448
+ description="Execute (INSERT, UPDATE, DELETE) SQL to modify data in ADBPG database.",
449
+ inputSchema={
450
+ "type": "object",
451
+ "properties": {
452
+ "query": {
453
+ "type": "string",
454
+ "description": "The DML SQL query to execute"
455
+ }
456
+ },
457
+ "required": ["query"]
458
+ }
459
+ ),
460
+ Tool(
461
+ name="execute_ddl_sql",
462
+ description="Execute (CREATE, ALTER, DROP) SQL statements to manage database objects.",
463
+ inputSchema={
464
+ "type": "object",
465
+ "properties": {
466
+ "query": {
467
+ "type": "string",
468
+ "description": "The DDL SQL query to execute"
469
+ }
470
+ },
471
+ "required": ["query"]
472
+ }
473
+ ),
474
+ Tool(
475
+ name="analyze_table",
476
+ description="Execute ANALYZE command to collect table statistics.",
477
+ inputSchema={
478
+ "type": "object",
479
+ "properties": {
480
+ "schema": {
481
+ "type": "string",
482
+ "description": "Schema name"
483
+ },
484
+ "table": {
485
+ "type": "string",
486
+ "description": "Table name"
487
+ }
488
+ },
489
+ "required": ["schema", "table"]
490
+ }
491
+ ),
492
+ Tool(
493
+ name="explain_query",
494
+ description="Get query execution plan.",
495
+ inputSchema={
496
+ "type": "object",
497
+ "properties": {
498
+ "query": {
499
+ "type": "string",
500
+ "description": "The SQL query to analyze"
501
+ }
502
+ },
503
+ "required": ["query"]
504
+ }
505
+ ),
506
+
507
+ #### graphrag & llm_memory tool list
508
+ Tool(
509
+ name = "adbpg_graphrag_upload",
510
+ description = "Upload a text file (with its name) and file content to graphrag to generate a knowledge graph.",
511
+ # 参数:filename text, context text
512
+ # filename 表示文件名称, context 表示文件内容
513
+ inputSchema = {
514
+ "type": "object",
515
+ "properties": {
516
+ "filename": {
517
+ "type": "string",
518
+ "description": "The name of the file to be uploaded"
519
+ },
520
+ "context": {
521
+ "type": "string",
522
+ "description": "The textual content of the file."
523
+ }
524
+ },
525
+ "required": ["filename", "context"]
526
+ }
527
+ ),
528
+ Tool(
529
+ name = "adbpg_graphrag_query",
530
+ description = "Query the graphrag using the specified query string and mode.",
531
+ # 参数:query_str text, [query_mode text]
532
+ # query_str 是询问的问题,query_mode 选择查询模式
533
+ inputSchema = {
534
+ "type": "object",
535
+ "properties": {
536
+ "query_str": {
537
+ "type": "string",
538
+ "description": "The query content."
539
+ },
540
+ "query_mode": {
541
+ "type": "string",
542
+ "description": "The query mode, choose from [bypass, naive, local, global, hybrid, mix]. If null, defaults to mix."
543
+ }
544
+ },
545
+ "required": ["query_str"]
546
+ }
547
+ ),
548
+ Tool(
549
+ name = "adbpg_graphrag_upload_decision_tree",
550
+ description = " Upload a decision tree with the specified root_node. If the root_node does not exist, a new decision tree will be created. ",
551
+ # context text, root_node text
552
+ inputSchema = {
553
+ "type": "object",
554
+ "properties": {
555
+ "root_node": {
556
+ "type": "string",
557
+ "description": "the root_noot (optional)"
558
+ },
559
+ "context": {
560
+ "type": "string",
561
+ "description": "the context of decision"
562
+ }
563
+ },
564
+ "required": ["context"]
565
+ }
566
+ ),
567
+ Tool(
568
+ name = "adbpg_graphrag_append_decision_tree",
569
+ description = "Append a subtree to an existing decision tree at the node specified by root_node_id. ",
570
+ # para: context text, root_node_id text
571
+ inputSchema = {
572
+ "type": "object",
573
+ "properties": {
574
+ "root_node_id": {
575
+ "type": "string",
576
+ "description": "the root_noot_id"
577
+ },
578
+ "context": {
579
+ "type": "string",
580
+ "description": "the context of decision"
581
+ }
582
+ },
583
+ "required": ["context", "root_node_id"]
584
+ }
585
+ ),
586
+ Tool(
587
+ name = "adbpg_graphrag_delete_decision_tree",
588
+ description = " Delete a sub-decision tree under the node specified by root_node_entity. ",
589
+ # para: root_node_entity text
590
+ inputSchema = {
591
+ "type": "object",
592
+ "properties": {
593
+ "root_node_entity": {
594
+ "type": "string",
595
+ "description": "the root_noot_entity"
596
+ }
597
+ },
598
+ "required": ["root_node_entity"]
599
+ }
600
+ ),
601
+
602
+
603
+ Tool(
604
+ name = "adbpg_llm_memory_add",
605
+ description = "Add LLM long memory with a specific user, run or agent.",
606
+ # 参数:messages json, user_id text, run_id text, agent_id text, metadata json
607
+ # 增加新的记忆
608
+ inputSchema={
609
+ "type": "object",
610
+ "properties": {
611
+ "messages": {
612
+ "type": "object",
613
+ "description": "llm_memory messages"
614
+ },
615
+ "user_id": {
616
+ "type": "string",
617
+ "description": "the user_id"
618
+ },
619
+ "run_id": {
620
+ "type": "string",
621
+ "description": "the run_id"
622
+ },
623
+ "agent_id": {
624
+ "type": "string",
625
+ "description": "the agent_id"
626
+ },
627
+ "metadata": {
628
+ "type": "object",
629
+ "description": "the metatdata json"
630
+ },
631
+ "memory_type": {
632
+ "type": "string",
633
+ "description": "the memory_type text"
634
+ },
635
+ "prompt": {
636
+ "type": "string",
637
+ "description": "the prompt"
638
+ }
639
+ },
640
+ "required": ["messages"]
641
+ }
642
+ ),
643
+ Tool(
644
+ name = "adbpg_llm_memory_get_all",
645
+ description = "Retrieves all memory records associated with a specific user, run or agent.",
646
+ # 参数:user_id text, run_id text, agent_id text
647
+ # 获取某个用户或者某个agent的所有记忆
648
+ inputSchema={
649
+ "type": "object",
650
+ "properties": {
651
+ "user_id": {
652
+ "type": "string",
653
+ "description": "The user_id"
654
+ },
655
+ "run_id": {
656
+ "type": "string",
657
+ "description": "The run_id"
658
+ },
659
+ "agent_id": {
660
+ "type": "string",
661
+ "description": "The agent_id"
662
+ }
663
+ },
664
+ "required": []
665
+ }
666
+ ),
667
+ Tool(
668
+ name = "adbpg_llm_memory_search",
669
+ description = "Retrieves memories relevant to the given query for a specific user, run, or agent.",
670
+ # 参数:query text, user_id text, run_id text, agent_id text, filter json
671
+ # 获取与给定 query 相关的记忆
672
+ inputSchema={
673
+ "type": "object",
674
+ "properties": {
675
+ "query": {
676
+ "type": "string",
677
+ "description": "llm_memory relevant query"
678
+ },
679
+ "user_id": {
680
+ "type": "string",
681
+ "description": "The search of user_id"
682
+ },
683
+ "run_id": {
684
+ "type": "string",
685
+ "description": "The search of run_id"
686
+ },
687
+ "agent_id": {
688
+ "type": "string",
689
+ "description": "The search of agent_id"
690
+ },
691
+ "filter": {
692
+ "type": "object",
693
+ "description": "The search of filter"
694
+ }
695
+ },
696
+ "required": ["query"]
697
+ }
698
+ )
699
+ ,
700
+ Tool(
701
+ name = "adbpg_llm_memory_delete_all",
702
+ description = "Delete all memory records associated with a specific user, run or agent.",
703
+ # 参数:user_id text, run_id text, agent_id text
704
+ # 删除某个用户或者agent的所有记忆
705
+ inputSchema={
706
+ "type": "object",
707
+ "properties": {
708
+ "user_id": {
709
+ "type": "string",
710
+ "description": "The user_id"
711
+ },
712
+ "run_id": {
713
+ "type": "string",
714
+ "description": "The run_id"
715
+ },
716
+ "agent_id": {
717
+ "type": "string",
718
+ "description": "The agent_id"
719
+ }
720
+ },
721
+ "required": []
722
+ }
723
+ )
724
+
725
+ ]
726
+
727
+ def get_graphrag_tool_result(wrapped_sql, params) -> list[TextContent]:
728
+ try:
729
+ conn = get_graphrag_tool_connection()
730
+ with conn.cursor() as cursor:
731
+ cursor.execute(wrapped_sql, params)
732
+ if cursor.description:
733
+ json_result = cursor.fetchone()[0]
734
+ return [TextContent(type="text", text=json_result)]
735
+ else:
736
+ return [TextContent(type="text", text="graphrag command executed successfully")]
737
+ except Exception as e:
738
+ return [TextContent(type="text", text=f"Error executing graphrag command: {str(e)}")]
739
+
740
+ def get_llm_memory_tool_result(wrapped_sql, params) -> list[TextContent]:
741
+ try:
742
+ conn = get_llm_memory_tool_connection()
743
+ with conn.cursor() as cursor:
744
+ cursor.execute(wrapped_sql, params)
745
+
746
+ if cursor.description:
747
+ json_result = cursor.fetchone()[0]
748
+ return [TextContent(type="text", text=json_result)]
749
+ else:
750
+ return [TextContent(type="text", text="llm_memory command executed successfully")]
751
+ except Exception as e:
752
+ return [TextContent(type="text", text=f"Error executing llm_memory command: {str(e)}")]
753
+
754
+
755
+ @app.call_tool()
756
+ async def call_tool(name: str, arguments: dict) -> list[TextContent]:
757
+ """
758
+ 执行工具操作
759
+
760
+ 参数:
761
+ name (str): 工具名称
762
+ arguments (dict): 工具参数
763
+
764
+ 返回:
765
+ list[TextContent]: 执行结果
766
+
767
+ 支持的工具:
768
+ - execute_select_sql: 执行SELECT查询
769
+ - execute_dml_sql: 执行DML操作
770
+ - execute_ddl_sql: 执行DDL操作
771
+ - analyze_table: 分析表统计信息
772
+ - explain_query: 获取查询执行计划
773
+
774
+ - adbpg_graphrag_upload: 执行 graphRAG upload 操作,上传文本
775
+ - adbpg_graphrag_query: 执行 graphRAG query 操作
776
+
777
+ - adbpg_llm_memory_add: 执行新增记忆操作
778
+ - adbpg_llm_memory_get_all: 获取所有记忆
779
+ - adbpg_llm_memory_search: 根据查询检索记忆
780
+ - adbpg_llm_memory_delete_all: 删除所有记忆
781
+ """
782
+ config = get_db_config()
783
+ global GRAPHRAG_ENV_IS_READY
784
+ # 根据工具名称处理不同的操作
785
+ if name == "execute_select_sql":
786
+ query = arguments.get("query")
787
+ if not query:
788
+ raise ValueError("Query is required")
789
+ if not query.strip().upper().startswith("SELECT"):
790
+ raise ValueError("Query must be a SELECT statement")
791
+ elif name == "execute_dml_sql":
792
+ query = arguments.get("query")
793
+ if not query:
794
+ raise ValueError("Query is required")
795
+ if not any(query.strip().upper().startswith(keyword) for keyword in ["INSERT", "UPDATE", "DELETE"]):
796
+ raise ValueError("Query must be a DML statement (INSERT, UPDATE, DELETE)")
797
+ elif name == "execute_ddl_sql":
798
+ query = arguments.get("query")
799
+ if not query:
800
+ raise ValueError("Query is required")
801
+ if not any(query.strip().upper().startswith(keyword) for keyword in ["CREATE", "ALTER", "DROP"]):
802
+ raise ValueError("Query must be a DDL statement (CREATE, ALTER, DROP)")
803
+ elif name == "analyze_table":
804
+ schema = arguments.get("schema")
805
+ table = arguments.get("table")
806
+ if not all([schema, table]):
807
+ raise ValueError("Schema and table are required")
808
+ query = f"ANALYZE {schema}.{table}"
809
+ elif name == "explain_query":
810
+ query = arguments.get("query")
811
+ if not query:
812
+ raise ValueError("Query is required")
813
+ query = f"EXPLAIN {query}"
814
+
815
+ # adbpg_graphrag tool
816
+ elif name == "adbpg_graphrag_upload":
817
+ # GraphRAG 服务初始化失败,工具不可用
818
+ if GRAPHRAG_ENV_IS_READY == False:
819
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
820
+ filename = arguments.get("filename")
821
+ context = arguments.get("context")
822
+ if not filename:
823
+ raise ValueError("Filename is required")
824
+ if not context:
825
+ raise ValueError("Context if required")
826
+ # 命令拼接
827
+ wrapped_sql = f"""
828
+ SELECT adbpg_graphrag.upload(%s::text, %s::text)
829
+ """
830
+ params = [filename, context]
831
+ return get_graphrag_tool_result(wrapped_sql, params)
832
+
833
+ elif name == "adbpg_graphrag_query":
834
+ # GraphRAG 服务初始化失败,工具不可用
835
+ if GRAPHRAG_ENV_IS_READY == False:
836
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
837
+ query_str = arguments.get("query_str")
838
+ query_mode = arguments.get("query_mode")
839
+ if not query_str:
840
+ raise ValueError("Query is required")
841
+ if not query_mode:
842
+ # default mode
843
+ query_mode = "mix"
844
+ # 命令拼接
845
+ wrapped_sql = f"""
846
+ SELECT adbpg_graphrag.query(%s::text, %s::text)
847
+ """
848
+ params = [query_str, query_mode]
849
+ return get_graphrag_tool_result(wrapped_sql, params)
850
+
851
+ elif name == "adbpg_graphrag_upload_decision_tree":
852
+ if GRAPHRAG_ENV_IS_READY == False:
853
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
854
+ root_node = arguments.get("root_node")
855
+ context = arguments.get("context")
856
+ if not context:
857
+ raise ValueError("Decision Tree Context is required")
858
+ if not root_node:
859
+ root_node = None
860
+ wrapped_sql = f"""
861
+ SELECT adbpg_graphrag.upload_decision_tree(%s::text, %s::text)
862
+ """
863
+ params = [context, root_node]
864
+ return get_graphrag_tool_result(wrapped_sql, params)
865
+
866
+ elif name == "adbpg_graphrag_append_decision_tree":
867
+ if GRAPHRAG_ENV_IS_READY == False:
868
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
869
+ root_node = arguments.get("root_node_id")
870
+ context = arguments.get("context")
871
+ if not context:
872
+ raise ValueError("Decision Tree Context is required")
873
+ if not root_node:
874
+ raise ValueError("Root node id is required")
875
+ wrapped_sql = f"""
876
+ SELECT adbpg_graphrag.append_decision_tree(%s::text, %s::text)
877
+ """
878
+ params = [context, root_node]
879
+ return get_graphrag_tool_result(wrapped_sql, params)
880
+
881
+ elif name == "adbpg_graphrag_delete_decision_tree":
882
+ if GRAPHRAG_ENV_IS_READY == False:
883
+ raise ValueError("GraphRAG Server initialization failed. This tool cannot be used.")
884
+ root_node = arguments.get("root_node_entity")
885
+ if not root_node:
886
+ raise ValueError("Root node entity is required")
887
+ wrapped_sql = f"""
888
+ SELECT adbpg_graphrag.delete_decision_tree(%s::text, %s::text)
889
+ """
890
+ params = [root_node]
891
+ return get_graphrag_tool_result(wrapped_sql, params)
892
+
893
+ # adbpg_llm_memory tool
894
+ elif name == "adbpg_llm_memory_add":
895
+ # LLMEMORY 服务初始化失败,工具不可用
896
+ if LLMEMORY_ENV_IS_READY == False:
897
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
898
+
899
+ messages = arguments.get("messages")
900
+ if not messages:
901
+ raise ValueError("messages is required")
902
+ messages_str = json.dumps(messages, ensure_ascii = False)
903
+
904
+ user_id = arguments.get("user_id")
905
+ if not user_id:
906
+ user_id = None
907
+ run_id = arguments.get("run_id")
908
+ if not run_id:
909
+ run_id = None
910
+ agent_id = arguments.get("agent_id")
911
+ if not agent_id:
912
+ agent_id = None
913
+ if user_id == None and run_id == None and agent_id == None:
914
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
915
+
916
+ metadata = arguments.get("metadata")
917
+ metadata_str = None
918
+ if metadata:
919
+ metadata_str = json.dumps(metadata, ensure_ascii = False)
920
+
921
+ memory_type = arguments.get("memory_type")
922
+ memory_prompt = arguments.get("prompt")
923
+ if not memory_type:
924
+ memory_type = None
925
+ if not memory_prompt:
926
+ memory_prompt = None
927
+
928
+
929
+ wrapped_sql = """
930
+ SELECT adbpg_llm_memory.add(
931
+ %s::json,
932
+ %s::text,
933
+ %s::text,
934
+ %s::text,
935
+ %s::json,
936
+ %s::text,
937
+ %s::text
938
+ )
939
+ """
940
+ params = [messages_str, user_id, run_id, agent_id, metadata_str, memory_type, memory_prompt]
941
+ return get_llm_memory_tool_result(wrapped_sql, params)
942
+
943
+ elif name == "adbpg_llm_memory_get_all":
944
+ # LLMEMORY 服务初始化失败,工具不可用
945
+ if LLMEMORY_ENV_IS_READY == False:
946
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
947
+
948
+ user_id = arguments.get("user_id")
949
+ if not user_id:
950
+ user_id = None
951
+ run_id = arguments.get("run_id")
952
+ if not run_id:
953
+ run_id = None
954
+ agent_id = arguments.get("agent_id")
955
+ if not agent_id:
956
+ agent_id = None
957
+ if user_id == None and run_id == None and agent_id == None:
958
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
959
+ wrapped_sql = f"""
960
+ SELECT adbpg_llm_memory.get_all(
961
+ %s::text,
962
+ %s::text,
963
+ %s::text
964
+ )
965
+ """
966
+ params = [user_id, run_id, agent_id]
967
+ return get_llm_memory_tool_result(wrapped_sql, params)
968
+
969
+
970
+ elif name == "adbpg_llm_memory_search":
971
+ # LLMEMORY 服务初始化失败,工具不可用
972
+ if LLMEMORY_ENV_IS_READY == False:
973
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
974
+ query = arguments.get("query")
975
+ if not query:
976
+ raise ValueError("Query is required")
977
+
978
+ user_id = arguments.get("user_id")
979
+ if not user_id:
980
+ user_id = None
981
+ run_id = arguments.get("run_id")
982
+ if not run_id:
983
+ run_id = None
984
+ agent_id = arguments.get("agent_id")
985
+ if not agent_id:
986
+ agent_id = None
987
+ if user_id == None and run_id == None and agent_id == None:
988
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
989
+
990
+ filter_json = arguments.get("filter")
991
+ filter_json_str = None
992
+ # json格式载入
993
+ if filter_json:
994
+ filter_json_str = json.dumps(filter_json, ensure_ascii = False)
995
+ # 拼接命令
996
+ wrapped_sql = f"""
997
+ SELECT adbpg_llm_memory.search(
998
+ %s::text,
999
+ %s::text,
1000
+ %s::text,
1001
+ %s::text,
1002
+ %s::json
1003
+ )
1004
+ """
1005
+ params = [query, user_id, run_id, agent_id, filter_json_str]
1006
+ return get_llm_memory_tool_result(wrapped_sql, params)
1007
+
1008
+ elif name == "adbpg_llm_memory_delete_all":
1009
+ # LLMEMORY 服务初始化失败,工具不可用
1010
+ if LLMEMORY_ENV_IS_READY == False:
1011
+ raise ValueError("LLMEMORY Server initialization failed. This tool cannot be used.")
1012
+
1013
+ user_id = arguments.get("user_id")
1014
+ if not user_id:
1015
+ user_id = None
1016
+ run_id = arguments.get("run_id")
1017
+ if not run_id:
1018
+ run_id = None
1019
+ agent_id = arguments.get("agent_id")
1020
+ if not agent_id:
1021
+ agent_id = None
1022
+ if user_id == None and run_id == None and agent_id == None:
1023
+ raise ValueError("At least one of user_id, run_id, or agent_id must be provided.")
1024
+
1025
+ wrapped_sql = f"""
1026
+ SELECT adbpg_llm_memory.delete_all(
1027
+ %s::text,
1028
+ %s::text,
1029
+ %s::text
1030
+ )
1031
+ """
1032
+ params = [user_id, run_id, agent_id]
1033
+ return get_llm_memory_tool_result(wrapped_sql, params)
1034
+
1035
+ else:
1036
+ raise ValueError(f"Unknown tool: {name}")
1037
+
1038
+ try:
1039
+ with psycopg.connect(**config) as conn:
1040
+ conn.autocommit = True
1041
+ with conn.cursor() as cursor:
1042
+
1043
+ #cursor.execute("SET statement_timeout = 300000")
1044
+ #过滤子查询的分号
1045
+ query = query.rstrip().rstrip(';')
1046
+ wrapped_query = f"""
1047
+ SELECT json_agg(row_to_json(t))
1048
+ FROM ({query}) AS t
1049
+ """
1050
+ cursor.execute(wrapped_query)
1051
+
1052
+ if name == "analyze_table":
1053
+ return [TextContent(type="text", text=f"Successfully analyzed table {schema}.{table}")]
1054
+
1055
+ if cursor.description:
1056
+ # 将返回结果存储为json格式
1057
+ json_result = cursor.fetchone()[0]
1058
+ json_str = json.dumps(json_result, ensure_ascii = False, indent = 2)
1059
+ result = [TextContent(type="text", text=json_str)]
1060
+
1061
+ try:
1062
+ json.loads(result[0].text)
1063
+ except json.JSONDecodeError as e:
1064
+ raise Exception(f"JSON decode error: {e}\nRaw text: {result[0].text}") from e
1065
+ return result
1066
+
1067
+ else:
1068
+ return [TextContent(type="text", text="Query executed successfully")]
1069
+ except Exception as e:
1070
+ return [TextContent(type="text", text=f"Error executing query: {str(e)}")]
1071
+
1072
+ async def main():
1073
+ """服务器主入口点"""
1074
+ try:
1075
+ config = get_db_config()
1076
+ logger.info("Starting ADBPG MCP server...")
1077
+
1078
+ # 测试数据库连接
1079
+ try:
1080
+ with psycopg.connect(**config) as conn:
1081
+ logger.info("Successfully connected to database")
1082
+ except Exception as e:
1083
+ logger.error(f"Failed to connect to database: {e}")
1084
+ sys.exit(1)
1085
+ # 使用 stdio 传输
1086
+ async with stdio_server() as (read_stream, write_stream):
1087
+ try:
1088
+ logger.info("Running MCP server with stdio transport...")
1089
+ await app.run(
1090
+ read_stream=read_stream,
1091
+ write_stream=write_stream,
1092
+ initialization_options=app.create_initialization_options()
1093
+ )
1094
+ except Exception as e:
1095
+ logger.error(f"Error running server: {str(e)}")
1096
+ raise
1097
+ except Exception as e:
1098
+ logger.error(f"Server initialization error: {str(e)}")
1099
+ raise
1100
+
1101
+ def run():
1102
+ """同步运行入口点"""
1103
+ try:
1104
+ asyncio.run(main())
1105
+ except Exception as e:
1106
+ logger.error(f"Fatal error: {e}")
1107
+ sys.exit(1)
1108
+
1109
+ if __name__ == "__main__":
1110
+ run()