adbpg-mcp-server 1.0.7__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- adbpg_mcp_server/__init__.py +0 -0
- adbpg_mcp_server/__main__.py +45 -0
- adbpg_mcp_server/adbpg.py +113 -0
- adbpg_mcp_server/adbpg_basic_operation.py +160 -0
- adbpg_mcp_server/adbpg_config.py +155 -0
- adbpg_mcp_server/adbpg_graphrag.py +197 -0
- adbpg_mcp_server/adbpg_memory.py +201 -0
- adbpg_mcp_server/server_http.py +206 -0
- adbpg_mcp_server/server_stdio.py +216 -0
- adbpg_mcp_server-2.0.0.dist-info/METADATA +278 -0
- adbpg_mcp_server-2.0.0.dist-info/RECORD +14 -0
- {adbpg_mcp_server-1.0.7.dist-info → adbpg_mcp_server-2.0.0.dist-info}/WHEEL +2 -1
- adbpg_mcp_server-2.0.0.dist-info/entry_points.txt +2 -0
- adbpg_mcp_server-2.0.0.dist-info/top_level.txt +1 -0
- adbpg_mcp_server-1.0.7.dist-info/METADATA +0 -10
- adbpg_mcp_server-1.0.7.dist-info/RECORD +0 -6
- adbpg_mcp_server-1.0.7.dist-info/entry_points.txt +0 -2
- adbpg_mcp_server-1.0.7.dist-info/licenses/LICENSE +0 -201
- adbpg_mcp_server.py +0 -1143
File without changes
|
@@ -0,0 +1,45 @@
|
|
1
|
+
import argparse
|
2
|
+
import sys
|
3
|
+
|
4
|
+
from .server_http import run_http_server
|
5
|
+
from .server_stdio import run_stdio_server
|
6
|
+
|
7
|
+
def main():
|
8
|
+
"""
|
9
|
+
通过参数选择使用 stdio 或 http 服务器
|
10
|
+
"""
|
11
|
+
parser = argparse.ArgumentParser(
|
12
|
+
description="ADBPG Management & Control Plane MCP Server"
|
13
|
+
)
|
14
|
+
parser.add_argument(
|
15
|
+
"--transport",
|
16
|
+
choices = ["stdio", "http"],
|
17
|
+
default = "stdio",
|
18
|
+
help = "Transport protocol to use (stdio or streamable-http)"
|
19
|
+
)
|
20
|
+
http_group = parser.add_argument_group("HTTP Server Options")
|
21
|
+
http_group.add_argument(
|
22
|
+
"--host",
|
23
|
+
default="127.0.0.1",
|
24
|
+
help = "Host to bind the HTTP server to (default: 127.0.0.1)."
|
25
|
+
)
|
26
|
+
http_group.add_argument(
|
27
|
+
"--port",
|
28
|
+
type = int,
|
29
|
+
default = 3000,
|
30
|
+
help = "Port to bind the HTTP server to (default: 3000)."
|
31
|
+
)
|
32
|
+
|
33
|
+
args = parser.parse_args()
|
34
|
+
print(f"Starting ADBPG MCP Server with transport {args.transport}")
|
35
|
+
|
36
|
+
if args.transport == "http":
|
37
|
+
run_http_server(host=args.host, port=args.port)
|
38
|
+
elif args.transport == "stdio":
|
39
|
+
run_stdio_server()
|
40
|
+
else:
|
41
|
+
raise ValueError(f"Invalid transport: {args.transport}")
|
42
|
+
|
43
|
+
if __name__ == "__main__":
|
44
|
+
main()
|
45
|
+
|
@@ -0,0 +1,113 @@
|
|
1
|
+
# mcp_server/db.py
|
2
|
+
import psycopg
|
3
|
+
import logging
|
4
|
+
import json
|
5
|
+
from psycopg import Connection
|
6
|
+
from .adbpg_config import AppConfig
|
7
|
+
from typing import Optional, Callable
|
8
|
+
from .adbpg_config import settings
|
9
|
+
|
10
|
+
logger = logging.getLogger(__name__)
|
11
|
+
|
12
|
+
class DatabaseManager:
|
13
|
+
def __init__(self, config: AppConfig):
|
14
|
+
self._config_info = config.get_db_connection_info()
|
15
|
+
self._conn = None
|
16
|
+
self._graphrag_conn = None
|
17
|
+
self._llm_memory_conn = None
|
18
|
+
self.db_master_port = self.get_master_port()
|
19
|
+
|
20
|
+
def _get_connection(self, conn_attr: str, initializer: Optional[Callable[[Connection],None]] = None) -> Connection:
|
21
|
+
"""通用连接获取和健康检查逻辑"""
|
22
|
+
|
23
|
+
def _create_and_initialize() -> Connection:
|
24
|
+
new_conn = psycopg.connect(**self._config_info)
|
25
|
+
new_conn.autocommit = True
|
26
|
+
|
27
|
+
if initializer:
|
28
|
+
logger.info(f"Running initializer for {conn_attr} on new connection...")
|
29
|
+
try:
|
30
|
+
initializer(new_conn)
|
31
|
+
except Exception as e:
|
32
|
+
logger.error(f"Error initializing {conn_attr}: {e}")
|
33
|
+
new_conn.close()
|
34
|
+
raise e
|
35
|
+
setattr(self, conn_attr, new_conn)
|
36
|
+
return new_conn
|
37
|
+
|
38
|
+
conn = getattr(self, conn_attr)
|
39
|
+
|
40
|
+
if conn is None or conn.closed:
|
41
|
+
logger.info(f"Connecting to database for {conn_attr}...")
|
42
|
+
conn = _create_and_initialize()
|
43
|
+
logger.info(f"New database connection established and initialized for {conn_attr} (id: {id(conn)})")
|
44
|
+
else:
|
45
|
+
try:
|
46
|
+
with conn.cursor() as cur:
|
47
|
+
cur.execute("SELECT 1;")
|
48
|
+
except psycopg.Error:
|
49
|
+
logger.warning(f"Connection for {conn_attr} is stale. Reconnecting...")
|
50
|
+
conn.close()
|
51
|
+
conn = psycopg.connect(**self._config_info)
|
52
|
+
conn.autocommit = True
|
53
|
+
setattr(self, conn_attr, conn)
|
54
|
+
logger.info(f"Reconnected for {conn_attr} (id: {id(conn)})")
|
55
|
+
return conn
|
56
|
+
|
57
|
+
def get_basic_connection(self) -> Connection:
|
58
|
+
"""获取用于基本操作的连接"""
|
59
|
+
return self._get_connection('_conn')
|
60
|
+
|
61
|
+
def get_graphrag_connection(self) -> Connection:
|
62
|
+
"""
|
63
|
+
获取 GraphRAG 专用的长连接
|
64
|
+
每次重新获得连接就初始化
|
65
|
+
"""
|
66
|
+
def initializer(conn: Connection):
|
67
|
+
try:
|
68
|
+
config_json = json.dumps(settings.get_graphrag_init_config())
|
69
|
+
with conn.cursor() as cursor:
|
70
|
+
cursor.execute("SELECT adbpg_graphrag.initialize(%s::json);", (config_json,))
|
71
|
+
except (psycopg.Error, json.JSONDecodeError, AttributeError) as e:
|
72
|
+
logger.error(f"ADBPG GraphRAG initialization failed: {e}")
|
73
|
+
raise RuntimeError(f"ADBPG GraphRAG initialization failed: {e}") from e
|
74
|
+
return self._get_connection('_graphrag_conn', initializer=initializer)
|
75
|
+
|
76
|
+
def get_llm_memory_connection(self) -> Connection:
|
77
|
+
"""
|
78
|
+
获取 LLM Memory 专用的长连接
|
79
|
+
每次重新连接就初始化
|
80
|
+
"""
|
81
|
+
def initializer(conn: Connection):
|
82
|
+
try:
|
83
|
+
config_json = json.dumps(settings.get_memory_init_config(self.db_master_port))
|
84
|
+
with conn.cursor() as cursor:
|
85
|
+
cursor.execute("SELECT adbpg_llm_memory.config(%s::json);", (config_json,))
|
86
|
+
except (psycopg.Error, json.JSONDecodeError, AttributeError) as e:
|
87
|
+
logger.error(f"ADBPG LLM Memory initialization failed: {e}")
|
88
|
+
raise RuntimeError(f"ADBPG LLM Memory initialization failed: {e}") from e
|
89
|
+
|
90
|
+
return self._get_connection('_llm_memory_conn',initializer=initializer)
|
91
|
+
|
92
|
+
def get_master_port(self) -> int:
|
93
|
+
"""获取 master 节点的端口,用于 llm_memory 配置"""
|
94
|
+
sql = "SELECT port FROM gp_segment_configuration WHERE content = -1 AND role = 'p';"
|
95
|
+
try:
|
96
|
+
with self.get_basic_connection().cursor() as cursor:
|
97
|
+
cursor.execute(sql)
|
98
|
+
port = cursor.fetchone()[0]
|
99
|
+
return port
|
100
|
+
except psycopg.Error as e:
|
101
|
+
logger.error(f"Database error while getting master port: {e}")
|
102
|
+
raise RuntimeError(f"Database error: {str(e)}") from e
|
103
|
+
|
104
|
+
def close_all(self):
|
105
|
+
"""关闭所有连接"""
|
106
|
+
if self._conn and not self._conn.closed:
|
107
|
+
self._conn.close()
|
108
|
+
if self._graphrag_conn and not self._graphrag_conn.closed:
|
109
|
+
self._graphrag_conn.close()
|
110
|
+
if self._llm_memory_conn and not self._llm_memory_conn.closed:
|
111
|
+
self._llm_memory_conn.close()
|
112
|
+
logger.info("All database connections closed.")
|
113
|
+
|
@@ -0,0 +1,160 @@
|
|
1
|
+
# mcp_server/basic_operation.py
|
2
|
+
import psycopg
|
3
|
+
import logging
|
4
|
+
from pydantic import AnyUrl
|
5
|
+
from mcp.types import Resource, ResourceTemplate, Tool
|
6
|
+
from .adbpg import DatabaseManager
|
7
|
+
from typing import Tuple
|
8
|
+
|
9
|
+
|
10
|
+
logger = logging.getLogger(__name__)
|
11
|
+
|
12
|
+
async def list_resources() -> list[Resource]:
|
13
|
+
"""列出可用的基本资源"""
|
14
|
+
return [
|
15
|
+
Resource(
|
16
|
+
uri="adbpg:///schemas",
|
17
|
+
name="All Schemas",
|
18
|
+
description="AnalyticDB PostgreSQL schemas. List all schemas in the database",
|
19
|
+
mimeType="text/plain"
|
20
|
+
)
|
21
|
+
]
|
22
|
+
|
23
|
+
async def list_resource_templates() -> list[ResourceTemplate]:
|
24
|
+
"""定义动态资源模板"""
|
25
|
+
return [
|
26
|
+
ResourceTemplate(
|
27
|
+
uriTemplate="adbpg:///{schema}/tables",
|
28
|
+
name="Schema Tables",
|
29
|
+
description="List all tables in a specific schema",
|
30
|
+
mimeType="text/plain"
|
31
|
+
),
|
32
|
+
ResourceTemplate(
|
33
|
+
uriTemplate="adbpg:///{schema}/{table}/ddl",
|
34
|
+
name="Table DDL",
|
35
|
+
description="Get the DDL script of a table in a specific schema",
|
36
|
+
mimeType="text/plain"
|
37
|
+
),
|
38
|
+
ResourceTemplate(
|
39
|
+
uriTemplate="adbpg:///{schema}/{table}/statistics",
|
40
|
+
name="Table Statistics",
|
41
|
+
description="Get statistics information of a table",
|
42
|
+
mimeType="text/plain"
|
43
|
+
)
|
44
|
+
]
|
45
|
+
|
46
|
+
async def read_resource(uri: AnyUrl, db: DatabaseManager) -> str:
|
47
|
+
"""读取资源内容"""
|
48
|
+
uri_str = str(uri)
|
49
|
+
if not uri_str.startswith("adbpg:///"):
|
50
|
+
raise ValueError(f"Invalid URI scheme: {uri_str}")
|
51
|
+
|
52
|
+
try:
|
53
|
+
conn = db.get_basic_connection()
|
54
|
+
with conn.cursor() as cursor:
|
55
|
+
path_parts = uri_str[9:].split('/')
|
56
|
+
|
57
|
+
if path_parts[0] == "schemas":
|
58
|
+
query = "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT IN ('pg_catalog', 'information_schema') ORDER BY schema_name;"
|
59
|
+
cursor.execute(query)
|
60
|
+
return "\n".join([schema[0] for schema in cursor.fetchall()])
|
61
|
+
|
62
|
+
elif len(path_parts) == 2 and path_parts[1] == "tables":
|
63
|
+
schema = path_parts[0]
|
64
|
+
query = "SELECT table_name, table_type FROM information_schema.tables WHERE table_schema = %s ORDER BY table_name;"
|
65
|
+
cursor.execute(query, (schema,))
|
66
|
+
return "\n".join([f"{table[0]} ({table[1]})" for table in cursor.fetchall()])
|
67
|
+
|
68
|
+
elif len(path_parts) == 3 and path_parts[2] == "ddl":
|
69
|
+
schema, table = path_parts[0], path_parts[1]
|
70
|
+
query = f"SELECT pg_get_ddl('{schema}.{table}'::regclass);"
|
71
|
+
cursor.execute(query)
|
72
|
+
ddl = cursor.fetchone()
|
73
|
+
return ddl[0] if ddl else f"No DDL found for {schema}.{table}"
|
74
|
+
|
75
|
+
elif len(path_parts) == 3 and path_parts[2] == "statistics":
|
76
|
+
schema, table = path_parts[0], path_parts[1]
|
77
|
+
query = "SELECT attname, null_frac, avg_width, n_distinct, most_common_vals, most_common_freqs FROM pg_stats WHERE schemaname = %s AND tablename = %s ORDER BY attname;"
|
78
|
+
cursor.execute(query, (schema, table))
|
79
|
+
rows = cursor.fetchall()
|
80
|
+
if not rows: return f"No statistics found for {schema}.{table}"
|
81
|
+
|
82
|
+
result = [f"Statistics for {schema}.{table}:\n"]
|
83
|
+
for row in rows:
|
84
|
+
result.append(f"Column: {row[0]}, Null fraction: {row[1]}, Avg width: {row[2]}, Distinct values: {row[3]}")
|
85
|
+
return "\n".join(result)
|
86
|
+
|
87
|
+
raise ValueError(f"Invalid resource URI format: {uri_str}")
|
88
|
+
|
89
|
+
except psycopg.Error as e:
|
90
|
+
logger.error(f"Database error in read_resource: {e}")
|
91
|
+
raise RuntimeError(f"Database error: {str(e)}")
|
92
|
+
|
93
|
+
def get_basic_tools() -> list[Tool]:
|
94
|
+
"""返回基础数据库操作工具列表"""
|
95
|
+
return [
|
96
|
+
Tool(
|
97
|
+
name="execute_select_sql",
|
98
|
+
description="Execute SELECT SQL to query data from ADBPG database. Returns data in JSON format.",
|
99
|
+
inputSchema={ "type": "object", "properties": {"query": {"type": "string", "description": "The (SELECT) SQL query to execute"}}, "required": ["query"]}
|
100
|
+
),
|
101
|
+
Tool(
|
102
|
+
name="execute_dml_sql",
|
103
|
+
description="Execute (INSERT, UPDATE, DELETE) SQL to modify data in ADBPG database.",
|
104
|
+
inputSchema={ "type": "object", "properties": {"query": {"type": "string", "description": "The DML SQL query to execute"}}, "required": ["query"]}
|
105
|
+
),
|
106
|
+
Tool(
|
107
|
+
name="execute_ddl_sql",
|
108
|
+
description="Execute (CREATE, ALTER, DROP) SQL statements to manage database objects.",
|
109
|
+
inputSchema={ "type": "object", "properties": {"query": {"type": "string", "description": "The DDL SQL query to execute"}}, "required": ["query"]}
|
110
|
+
),
|
111
|
+
Tool(
|
112
|
+
name="analyze_table",
|
113
|
+
description="Execute ANALYZE command to collect table statistics.",
|
114
|
+
inputSchema={ "type": "object", "properties": {"schema": {"type": "string"}, "table": {"type": "string"}}, "required": ["schema", "table"]}
|
115
|
+
),
|
116
|
+
Tool(
|
117
|
+
name="explain_query",
|
118
|
+
description="Get query execution plan.",
|
119
|
+
inputSchema={ "type": "object", "properties": {"query": {"type": "string", "description": "The SQL query to analyze"}}, "required": ["query"]}
|
120
|
+
),
|
121
|
+
]
|
122
|
+
|
123
|
+
async def call_basic_tool(name: str, arguments: dict, db: DatabaseManager) -> Tuple[str, dict, bool]:
|
124
|
+
"""
|
125
|
+
准备执行基础工具的SQL和参数。
|
126
|
+
返回 (query_string, params, needs_json_agg)
|
127
|
+
"""
|
128
|
+
query, params, needs_json_agg = None, None, False
|
129
|
+
|
130
|
+
if name == "execute_select_sql":
|
131
|
+
query_text = arguments.get("query")
|
132
|
+
if not query_text or not query_text.strip().upper().startswith("SELECT"):
|
133
|
+
raise ValueError("Query must be a SELECT statement")
|
134
|
+
query = f"SELECT json_agg(row_to_json(t)) FROM ({query_text.rstrip(';')}) AS t"
|
135
|
+
needs_json_agg = True
|
136
|
+
elif name == "execute_dml_sql":
|
137
|
+
query = arguments.get("query")
|
138
|
+
if not query or not any(query.strip().upper().startswith(k) for k in ["INSERT", "UPDATE", "DELETE"]):
|
139
|
+
raise ValueError("Query must be a DML statement (INSERT, UPDATE, DELETE)")
|
140
|
+
elif name == "execute_ddl_sql":
|
141
|
+
query = arguments.get("query")
|
142
|
+
if not query or not any(query.strip().upper().startswith(k) for k in ["CREATE", "ALTER", "DROP", "TRUNCATE"]):
|
143
|
+
raise ValueError("Query must be a DDL statement (CREATE, ALTER, DROP)")
|
144
|
+
elif name == "analyze_table":
|
145
|
+
schema, table = arguments.get("schema"), arguments.get("table")
|
146
|
+
if not all([schema, table]):
|
147
|
+
raise ValueError("Schema and table are required")
|
148
|
+
query = f"ANALYZE {schema}.{table}"
|
149
|
+
elif name == "explain_query":
|
150
|
+
query_text = arguments.get("query")
|
151
|
+
if not query_text:
|
152
|
+
raise ValueError("Query is required")
|
153
|
+
query = f"EXPLAIN (FORMAT JSON) {query_text}"
|
154
|
+
needs_json_agg = True # The output is already a single JSON value in a single row
|
155
|
+
|
156
|
+
if query is None:
|
157
|
+
raise ValueError(f"Unknown basic tool: {name}")
|
158
|
+
|
159
|
+
return (query, params, needs_json_agg)
|
160
|
+
|
@@ -0,0 +1,155 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
import logging
|
4
|
+
from dotenv import load_dotenv
|
5
|
+
|
6
|
+
logger = logging.getLogger(__name__)
|
7
|
+
|
8
|
+
SERVER_VERSION = "0.2.0"
|
9
|
+
|
10
|
+
try:
|
11
|
+
load_dotenv()
|
12
|
+
logger.info("Environment variables loaded")
|
13
|
+
except Exception as e:
|
14
|
+
logger.error(f"Error loading .env file: {str(e)}")
|
15
|
+
sys.exit(1)
|
16
|
+
|
17
|
+
class AppConfig:
|
18
|
+
def __init__(self):
|
19
|
+
# Database Config
|
20
|
+
self.db_host = os.getenv("ADBPG_HOST")
|
21
|
+
self.db_port = os.getenv("ADBPG_PORT")
|
22
|
+
self.db_user = os.getenv("ADBPG_USER")
|
23
|
+
self.db_password = os.getenv("ADBPG_PASSWORD")
|
24
|
+
self.db_name = os.getenv("ADBPG_DATABASE")
|
25
|
+
|
26
|
+
# GraphRAG Config
|
27
|
+
self.graphrag_llm_model = os.getenv("GRAPHRAG_LLM_MODEL")
|
28
|
+
self.graphrag_api_key = os.getenv("GRAPHRAG_API_KEY")
|
29
|
+
self.graphrag_base_url = os.getenv("GRAPHRAG_BASE_URL")
|
30
|
+
self.graphrag_embedding_model = os.getenv("GRAPHRAG_EMBEDDING_MODEL")
|
31
|
+
self.graphrag_embedding_api_key = os.getenv("GRAPHRAG_EMBEDDING_API_KEY")
|
32
|
+
self.graphrag_embedding_base_url = os.getenv("GRAPHRAG_EMBEDDING_BASE_URL")
|
33
|
+
self.graphrag_language = os.getenv("GRAPHRAG_LANGUAGE", "English")
|
34
|
+
self.graphrag_entity_types = os.getenv("GRAPHRAG_ENTITY_TYPES", '["Organization", "Person", "Location", "Event", "Technology", "Equipment", "Product", "Document", "Category"]')
|
35
|
+
self.graphrag_relationship_types = os.getenv("GRAPHRAG_RELATIONSHIP_TYPES", '["Causes", "Used For", "Helps In", "Includes", "Originated From", "Seasonal", "Coreference Of", "Synonym Of", "Conjunction", "Affects"]')
|
36
|
+
|
37
|
+
# LLM Memory Config
|
38
|
+
self.memory_llm_model = os.getenv("LLMEMORY_LLM_MODEL")
|
39
|
+
self.memory_api_key = os.getenv("LLMEMORY_API_KEY")
|
40
|
+
self.memory_base_url = os.getenv("LLMEMORY_BASE_URL")
|
41
|
+
self.memory_embedding_model = os.getenv("LLMEMORY_EMBEDDING_MODEL")
|
42
|
+
self.memory_embedding_dims = os.getenv("LLMEMORY_EMBEDDING_DIMS", 1024)
|
43
|
+
self.memory_enable_graph = os.getenv("LLMEMORY_ENABLE_GRAPH", "False").lower() in ('true', '1', 't')
|
44
|
+
|
45
|
+
self.db_env_ready = self._check_db_env()
|
46
|
+
self.graphrag_env_ready = self._check_graphrag_env()
|
47
|
+
self.memory_env_ready = self._check_memory_env()
|
48
|
+
|
49
|
+
def _check_db_env(self):
|
50
|
+
required = ["db_host", "db_port", "db_user", "db_password", "db_name"]
|
51
|
+
missing = [var.replace('db_', 'ADBPG_').upper() for var in required if not getattr(self, var)]
|
52
|
+
if missing:
|
53
|
+
logger.error(f"Missing required ADBPG environment variables: {', '.join(missing)}")
|
54
|
+
return False
|
55
|
+
logger.info("All ADBPG required environment variables are set")
|
56
|
+
return True
|
57
|
+
|
58
|
+
def _check_graphrag_env(self):
|
59
|
+
required = [
|
60
|
+
"graphrag_llm_model",
|
61
|
+
"graphrag_api_key",
|
62
|
+
"graphrag_base_url",
|
63
|
+
"graphrag_embedding_model",
|
64
|
+
"graphrag_embedding_api_key",
|
65
|
+
"graphrag_embedding_base_url"
|
66
|
+
]
|
67
|
+
missing = [var.replace('graphrag_', 'GRAPHRAG_').upper() for var in required if not getattr(self, var)]
|
68
|
+
if missing:
|
69
|
+
logger.warning(f"Missing GraphRAG environment variables: {', '.join(missing)}. GraphRAG tools will be disabled.")
|
70
|
+
return False
|
71
|
+
logger.info("All GraphRAG required environment variables are set")
|
72
|
+
return True
|
73
|
+
|
74
|
+
def _check_memory_env(self):
|
75
|
+
required = [
|
76
|
+
"memory_llm_model", "memory_api_key", "memory_base_url", "memory_embedding_model"
|
77
|
+
]
|
78
|
+
missing = [var.replace('memory_', 'LLMEMORY_').upper() for var in required if not getattr(self, var)]
|
79
|
+
if missing:
|
80
|
+
logger.warning(f"Missing LLM Memory environment variables: {', '.join(missing)}. LLM Memory tools will be disabled.")
|
81
|
+
return False
|
82
|
+
logger.info("All LLM Memory required environment variables are set")
|
83
|
+
return True
|
84
|
+
|
85
|
+
def get_db_connection_info(self):
|
86
|
+
if not self.db_env_ready:
|
87
|
+
raise ValueError("Database environment variables are not set.")
|
88
|
+
return {
|
89
|
+
"host": self.db_host,
|
90
|
+
"port": self.db_port,
|
91
|
+
"user": self.db_user,
|
92
|
+
"password": self.db_password,
|
93
|
+
"dbname": self.db_name,
|
94
|
+
"application_name": f"adbpg-mcp-server-{SERVER_VERSION}"
|
95
|
+
}
|
96
|
+
|
97
|
+
def get_graphrag_init_config(self):
|
98
|
+
if not self.graphrag_env_ready:
|
99
|
+
return None
|
100
|
+
return {
|
101
|
+
"llm_model": self.graphrag_llm_model,
|
102
|
+
"llm_api_key": self.graphrag_api_key,
|
103
|
+
"llm_url": self.graphrag_base_url,
|
104
|
+
"embedding_model": self.graphrag_embedding_model,
|
105
|
+
"embedding_api_key": self.graphrag_embedding_api_key,
|
106
|
+
"embedding_url": self.graphrag_embedding_base_url,
|
107
|
+
"language": self.graphrag_language,
|
108
|
+
"entity_types": self.graphrag_entity_types,
|
109
|
+
"relationship_types": self.graphrag_relationship_types,
|
110
|
+
"postgres_password": self.db_password
|
111
|
+
}
|
112
|
+
def get_memory_init_config(self, db_master_port: int):
|
113
|
+
if not self.memory_env_ready:
|
114
|
+
return None
|
115
|
+
config = {
|
116
|
+
"llm": {
|
117
|
+
"provider": "openai",
|
118
|
+
"config": {
|
119
|
+
"model": self.memory_llm_model,
|
120
|
+
"openai_base_url": self.memory_base_url,
|
121
|
+
"api_key": self.memory_api_key
|
122
|
+
}},
|
123
|
+
"embedder": {
|
124
|
+
"provider": "openai",
|
125
|
+
"config": {
|
126
|
+
"model": self.memory_embedding_model,
|
127
|
+
"embedding_dims": self.memory_embedding_dims,
|
128
|
+
"api_key": self.memory_api_key,
|
129
|
+
"openai_base_url": self.memory_base_url
|
130
|
+
}},
|
131
|
+
"vector_store": {
|
132
|
+
"provider": "adbpg",
|
133
|
+
"config": {
|
134
|
+
"user": self.db_user,
|
135
|
+
"password": self.db_password,
|
136
|
+
"dbname": self.db_name,
|
137
|
+
"hnsw": "True",
|
138
|
+
"embedding_model_dims": self.memory_embedding_dims,
|
139
|
+
"port": db_master_port
|
140
|
+
}}
|
141
|
+
}
|
142
|
+
if self.memory_enable_graph:
|
143
|
+
config["graph_store"] = {
|
144
|
+
"provider": "adbpg",
|
145
|
+
"config": {
|
146
|
+
"url": "http://localhost",
|
147
|
+
"username": self.db_user,
|
148
|
+
"password": self.db_password,
|
149
|
+
"database": self.db_name,
|
150
|
+
"port": db_master_port
|
151
|
+
}
|
152
|
+
}
|
153
|
+
return config
|
154
|
+
|
155
|
+
settings = AppConfig()
|
@@ -0,0 +1,197 @@
|
|
1
|
+
import logging
|
2
|
+
from mcp.types import Tool, TextContent
|
3
|
+
from .adbpg import DatabaseManager
|
4
|
+
|
5
|
+
logger = logging.getLogger(__name__)
|
6
|
+
|
7
|
+
def get_graphrag_tools() -> list[Tool]:
|
8
|
+
"""
|
9
|
+
返回 ADBPG GraphRAG 工具列表
|
10
|
+
"""
|
11
|
+
return [
|
12
|
+
Tool(
|
13
|
+
name = "adbpg_graphrag_upload",
|
14
|
+
description = "Execute graphrag upload operation",
|
15
|
+
# 参数:filename text, context text
|
16
|
+
# filename 表示文件名称, context 表示文件内容
|
17
|
+
inputSchema = {
|
18
|
+
"type": "object",
|
19
|
+
"properties": {
|
20
|
+
"filename": {
|
21
|
+
"type": "string",
|
22
|
+
"description": "The file name need to upload"
|
23
|
+
},
|
24
|
+
"context": {
|
25
|
+
"type": "string",
|
26
|
+
"description": "the context of your file"
|
27
|
+
}
|
28
|
+
},
|
29
|
+
"required": ["filename", "context"]
|
30
|
+
}
|
31
|
+
),
|
32
|
+
Tool(
|
33
|
+
name = "adbpg_graphrag_query",
|
34
|
+
description = "Execute graphrag query operation",
|
35
|
+
# 参数:query_str text, [query_mode text]
|
36
|
+
# query_str 是询问的问题,query_mode 选择查询模式
|
37
|
+
inputSchema = {
|
38
|
+
"type": "object",
|
39
|
+
"properties": {
|
40
|
+
"query_str": {
|
41
|
+
"type": "string",
|
42
|
+
"description": "The query you want to ask"
|
43
|
+
},
|
44
|
+
"query_mode": {
|
45
|
+
"type": "string",
|
46
|
+
"description": "The query mode you need to choose [ bypass,naive, local, global, hybrid, mix[default], tree ]."
|
47
|
+
},
|
48
|
+
"start_search_node_id": {
|
49
|
+
"type": "string",
|
50
|
+
"description": "If using 'tree' query mode, set the start node ID of tree."
|
51
|
+
}
|
52
|
+
},
|
53
|
+
"required": ["query_str"]
|
54
|
+
}
|
55
|
+
),
|
56
|
+
Tool(
|
57
|
+
name = "adbpg_graphrag_upload_decision_tree",
|
58
|
+
description = " Upload a decision tree with the specified root_node. If the root_node does not exist, a new decision tree will be created. ",
|
59
|
+
# context text, root_node text
|
60
|
+
inputSchema = {
|
61
|
+
"type": "object",
|
62
|
+
"properties": {
|
63
|
+
"root_node": {
|
64
|
+
"type": "string",
|
65
|
+
"description": "the root_noot (optional)"
|
66
|
+
},
|
67
|
+
"context": {
|
68
|
+
"type": "string",
|
69
|
+
"description": "the context of decision"
|
70
|
+
}
|
71
|
+
},
|
72
|
+
"required": ["context"]
|
73
|
+
}
|
74
|
+
),
|
75
|
+
Tool(
|
76
|
+
name = "adbpg_graphrag_append_decision_tree",
|
77
|
+
description = "Append a subtree to an existing decision tree at the node specified by root_node_id. ",
|
78
|
+
# para: context text, root_node_id text
|
79
|
+
inputSchema = {
|
80
|
+
"type": "object",
|
81
|
+
"properties": {
|
82
|
+
"root_node_id": {
|
83
|
+
"type": "string",
|
84
|
+
"description": "the root_noot_id"
|
85
|
+
},
|
86
|
+
"context": {
|
87
|
+
"type": "string",
|
88
|
+
"description": "the context of decision"
|
89
|
+
}
|
90
|
+
},
|
91
|
+
"required": ["context", "root_node_id"]
|
92
|
+
}
|
93
|
+
),
|
94
|
+
Tool(
|
95
|
+
name = "adbpg_graphrag_delete_decision_tree",
|
96
|
+
description = " Delete a sub-decision tree under the node specified by root_node_entity. ",
|
97
|
+
# para: root_node_entity text
|
98
|
+
inputSchema = {
|
99
|
+
"type": "object",
|
100
|
+
"properties": {
|
101
|
+
"root_node_entity": {
|
102
|
+
"type": "string",
|
103
|
+
"description": "the root_noot_entity"
|
104
|
+
|
105
|
+
}
|
106
|
+
},
|
107
|
+
"required": ["root_node_entity"]
|
108
|
+
}
|
109
|
+
),
|
110
|
+
Tool(
|
111
|
+
name = "adbpg_graphrag_reset_tree_query",
|
112
|
+
description = " Reset the decision tree in the tree query mode",
|
113
|
+
# para:
|
114
|
+
inputSchema = {
|
115
|
+
"type": "object",
|
116
|
+
"required": []
|
117
|
+
}
|
118
|
+
),
|
119
|
+
]
|
120
|
+
|
121
|
+
def _execute_graphrag_tool(sql: str, params: list, db: DatabaseManager) -> list[TextContent]:
|
122
|
+
"""
|
123
|
+
执行 ADBPG GraphRAG 工具并返回结果
|
124
|
+
"""
|
125
|
+
try:
|
126
|
+
conn = db.get_graphrag_connection()
|
127
|
+
with conn.cursor() as cursor:
|
128
|
+
cursor.execute(sql, params)
|
129
|
+
if cursor.description:
|
130
|
+
json_result = cursor.fetchone()[0]
|
131
|
+
return [TextContent(type="text", text=json_result)]
|
132
|
+
else:
|
133
|
+
return [TextContent(type="text", text="ADBPG GraphRAG Tool executed successfully")]
|
134
|
+
except Exception as e:
|
135
|
+
logger.error(f"Error executing ADBPG GraphRAG Tool: {e}")
|
136
|
+
return [TextContent(type="text", text="Error executing ADBPG GraphRAG Tool")]
|
137
|
+
|
138
|
+
async def call_graphrag_tool(name: str, arguments: dict, db: DatabaseManager) -> list[TextContent]:
|
139
|
+
"""
|
140
|
+
调用 ADBPG GraphRAG 工具
|
141
|
+
"""
|
142
|
+
if name == "adbpg_graphrag_upload":
|
143
|
+
filename, context = arguments.get("filename"), arguments.get("context")
|
144
|
+
if not all([filename, context]):
|
145
|
+
raise ValueError("Filename and context are required.")
|
146
|
+
sql = "SELECT adbpg_graphrag.upload(%s::text, %s::text)"
|
147
|
+
params = [filename, context]
|
148
|
+
|
149
|
+
elif name == "adbpg_graphrag_query":
|
150
|
+
query_str = arguments.get("query_str")
|
151
|
+
if not query_str:
|
152
|
+
raise ValueError("Query string is required.")
|
153
|
+
query_mode = arguments.get("query_mode", "mix")
|
154
|
+
start_node = arguments.get("start_search_node_id")
|
155
|
+
|
156
|
+
if start_node:
|
157
|
+
sql = "SELECT adbpg_graphrag.query(%s::text, %s::text, %s::text)"
|
158
|
+
params = [query_str, query_mode, start_node]
|
159
|
+
else:
|
160
|
+
sql = "SELECT adbpg_graphrag.query(%s::text, %s::text)"
|
161
|
+
params = [query_str, query_mode]
|
162
|
+
|
163
|
+
elif name == "adbpg_graphrag_reset_tree_query":
|
164
|
+
sql = "SELECT adbpg_graphrag.reset_tree_query()"
|
165
|
+
params = []
|
166
|
+
|
167
|
+
elif name == "adbpg_graphrag_upload_decision_tree":
|
168
|
+
root_node = arguments.get("root_node")
|
169
|
+
context = arguments.get("context")
|
170
|
+
if not context:
|
171
|
+
raise ValueError("Decision Tree Context is required")
|
172
|
+
if not root_node:
|
173
|
+
root_node = None
|
174
|
+
sql = "SELECT adbpg_graphrag.upload_decision_tree(%s::text, %s::text)"
|
175
|
+
params = [context, root_node]
|
176
|
+
|
177
|
+
elif name == "adbpg_graphrag_append_decision_tree":
|
178
|
+
root_node = arguments.get("root_node_id")
|
179
|
+
context = arguments.get("context")
|
180
|
+
if not context:
|
181
|
+
raise ValueError("Decision Tree Context is required")
|
182
|
+
if not root_node:
|
183
|
+
raise ValueError("Root node id is required")
|
184
|
+
sql = "SELECT adbpg_graphrag.append_decision_tree(%s::text, %s::text)"
|
185
|
+
params = [context, root_node]
|
186
|
+
|
187
|
+
elif name == "adbpg_graphrag_delete_decision_tree":
|
188
|
+
root_node = arguments.get("root_node_entity")
|
189
|
+
if not root_node:
|
190
|
+
raise ValueError("Root node entity is required")
|
191
|
+
sql = "SELECT adbpg_graphrag.delete_decision_tree(%s::text)"
|
192
|
+
params = [root_node]
|
193
|
+
|
194
|
+
else:
|
195
|
+
raise ValueError(f"Unknown graphrag tool: {name}")
|
196
|
+
|
197
|
+
return _execute_graphrag_tool(sql, params, db)
|