auto-coder 0.1.214__py3-none-any.whl → 0.1.217__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

@@ -6,17 +6,90 @@ from dataclasses import dataclass
6
6
  import byzerllm
7
7
  from autocoder.common.mcp_hub import McpHub
8
8
  from autocoder.common.mcp_tools import McpExecutor
9
+ from autocoder.common.mcp_hub import MCP_BUILD_IN_SERVERS
10
+ import json
11
+ import os
12
+ import time
13
+ from pydantic import BaseModel
14
+ import sys
15
+ from loguru import logger
9
16
 
10
- @dataclass
17
+
18
+ @dataclass
11
19
  class McpRequest:
12
20
  query: str
13
21
  model: Optional[str] = None
14
-
22
+
23
+
24
+ @dataclass
25
+ class McpInstallRequest:
26
+ server_name_or_config: Optional[str] = None
27
+
28
+
29
+ @dataclass
30
+ class McpRemoveRequest:
31
+ server_name: str
32
+
33
+
34
+ @dataclass
35
+ class McpListRequest:
36
+ """Request to list all builtin MCP servers"""
37
+ pass
38
+
39
+
40
+ @dataclass
41
+ class McpListRunningRequest:
42
+ """Request to list all running MCP servers"""
43
+ pass
44
+
45
+
15
46
  @dataclass
16
47
  class McpResponse:
17
48
  result: str
18
49
  error: Optional[str] = None
19
50
 
51
+
52
+ class McpExternalServer(BaseModel):
53
+ """Represents an external MCP server configuration"""
54
+ name: str
55
+ description: str
56
+ vendor: str
57
+ sourceUrl: str
58
+ homepage: str
59
+ license: str
60
+ runtime: str
61
+
62
+
63
+ def get_mcp_external_servers() -> List[McpExternalServer]:
64
+ """Get external MCP servers list from GitHub"""
65
+ cache_dir = os.path.join(".auto-coder", "tmp")
66
+ os.makedirs(cache_dir, exist_ok=True)
67
+ cache_file = os.path.join(cache_dir, "mcp_external_servers.json")
68
+
69
+ # Check cache first
70
+ if os.path.exists(cache_file):
71
+ cache_time = os.path.getmtime(cache_file)
72
+ if time.time() - cache_time < 3600: # 1 hour cache
73
+ with open(cache_file, "r") as f:
74
+ raw_data = json.load(f)
75
+ return [McpExternalServer(**item) for item in raw_data]
76
+
77
+ # Fetch from GitHub
78
+ url = "https://raw.githubusercontent.com/michaellatman/mcp-get/refs/heads/main/packages/package-list.json"
79
+ try:
80
+ import requests
81
+ response = requests.get(url)
82
+ if response.status_code == 200:
83
+ raw_data = response.json()
84
+ with open(cache_file, "w") as f:
85
+ json.dump(raw_data, f)
86
+ return [McpExternalServer(**item) for item in raw_data]
87
+ return []
88
+ except Exception as e:
89
+ logger.error(f"Failed to fetch external MCP servers: {e}")
90
+ return []
91
+
92
+
20
93
  class McpServer:
21
94
  def __init__(self):
22
95
  self._request_queue = AsyncQueue()
@@ -24,57 +97,183 @@ class McpServer:
24
97
  self._running = False
25
98
  self._task = None
26
99
  self._loop = None
27
-
100
+
28
101
  def start(self):
29
102
  if self._running:
30
103
  return
31
-
104
+
32
105
  self._running = True
33
106
  self._loop = asyncio.new_event_loop()
34
107
  threading.Thread(target=self._run_event_loop, daemon=True).start()
35
-
108
+
36
109
  def stop(self):
37
110
  if self._running:
38
- self._running = False
111
+ self._running = False
39
112
  if self._loop:
40
113
  self._loop.stop()
41
114
  self._loop.close()
42
-
115
+
43
116
  def _run_event_loop(self):
44
117
  asyncio.set_event_loop(self._loop)
45
118
  self._task = self._loop.create_task(self._process_request())
46
119
  self._loop.run_forever()
47
-
120
+
121
+ async def _install_server(self, request: McpInstallRequest) -> McpResponse:
122
+ """Install an MCP server with module dependency check"""
123
+ name = ""
124
+ config = {}
125
+ try:
126
+ server_name_or_config = request.server_name_or_config
127
+ try:
128
+ raw_config = json.loads(server_name_or_config)
129
+ # 用户给了一个完整的配置
130
+ if "mcpServers" in raw_config:
131
+ raw_config = raw_config["mcpServers"]
132
+
133
+ # 取第一个server 配置
134
+ config = list(raw_config.values())[0]
135
+ name = list(raw_config.keys())[0]
136
+ except json.JSONDecodeError:
137
+ name = server_name_or_config
138
+ if name not in MCP_BUILD_IN_SERVERS:
139
+ # 查找外部server
140
+ external_servers = get_mcp_external_servers()
141
+ for s in external_servers:
142
+ if s.name == name:
143
+ if s.runtime == "python":
144
+ # Check if module exists
145
+ try:
146
+ import importlib
147
+ importlib.import_module(name)
148
+ except ImportError:
149
+ # Install missing module
150
+ import subprocess
151
+ try:
152
+ subprocess.run(
153
+ [sys.executable, "-m", "pip", "install", name], check=True)
154
+ except subprocess.CalledProcessError:
155
+ print(f"\n\033[93mFailed to automatically install {name}. Please manually install it using:\n")
156
+ print(f" pip install {name}\n")
157
+ print(f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
158
+ print(f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
159
+
160
+ config = {
161
+ "command": "python",
162
+ "args": [
163
+ "-m", name.replace("-", "_")
164
+ ],
165
+ }
166
+ elif s.runtime == "node":
167
+ # Check if package exists
168
+ try:
169
+ subprocess.run(
170
+ ["npx", name, "--version"], check=True)
171
+ except:
172
+ # Install missing package
173
+ try:
174
+ subprocess.run(
175
+ ["npm", "install", "-y", "-g", name], check=True)
176
+ except subprocess.CalledProcessError:
177
+ print(f"\n\033[93mFailed to automatically install {name}. Please manually install it using:\n")
178
+ print(f" npm install -g {name}\n")
179
+ print(f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
180
+ print(f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
181
+
182
+ config = {
183
+ "command": "npx",
184
+ "args": [
185
+ "-y",
186
+ name
187
+ ]
188
+ }
189
+ break
190
+ else:
191
+ config = MCP_BUILD_IN_SERVERS[name]
192
+ if not name:
193
+ raise ValueError("MCP server name is not available in MCP_BUILD_IN_SERVERS or external servers")
194
+ hub = McpHub()
195
+ await hub.add_server_config(name, config)
196
+ return McpResponse(result=f"Successfully installed MCP server: {request.server_name_or_config}")
197
+ except Exception as e:
198
+ return McpResponse(result="", error=f"Failed to install MCP server: {str(e)}")
199
+
48
200
  async def _process_request(self):
49
201
  hub = McpHub()
50
202
  await hub.initialize()
51
-
203
+
52
204
  while self._running:
53
205
  try:
54
206
  request = await self._request_queue.get()
55
207
  if request is None:
56
208
  break
57
-
58
- llm = byzerllm.ByzerLLM.from_default_model(model=request.model)
59
- mcp_executor = McpExecutor(hub, llm)
60
- conversations = [{"role": "user", "content": request.query}]
61
- _, results = await mcp_executor.run(conversations)
62
- results_str = "\n\n".join(mcp_executor.format_mcp_result(result) for result in results)
63
- await self._response_queue.put(McpResponse(result=results_str))
209
+
210
+ if isinstance(request, McpInstallRequest):
211
+ response = await self._install_server(request)
212
+ await self._response_queue.put(response)
213
+
214
+ elif isinstance(request, McpRemoveRequest):
215
+ try:
216
+ await hub.remove_server_config(request.server_name)
217
+ await self._response_queue.put(McpResponse(result=f"Successfully removed MCP server: {request.server_name}"))
218
+ except Exception as e:
219
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to remove MCP server: {str(e)}"))
220
+
221
+ elif isinstance(request, McpListRequest):
222
+ try:
223
+ # Get built-in servers
224
+ builtin_servers = [
225
+ f"- Built-in: {name}" for name in MCP_BUILD_IN_SERVERS.keys()]
226
+
227
+ # Get external servers
228
+ external_servers = get_mcp_external_servers()
229
+ external_list = [
230
+ f"- External: {s.name} ({s.description})" for s in external_servers]
231
+
232
+ # Combine results
233
+ all_servers = builtin_servers + external_list
234
+ result = "Available MCP servers:\n" + \
235
+ "\n".join(all_servers)
236
+
237
+ await self._response_queue.put(McpResponse(result=result))
238
+ except Exception as e:
239
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to list servers: {str(e)}"))
240
+
241
+ elif isinstance(request, McpListRunningRequest):
242
+ try:
243
+ running_servers = "\n".join(
244
+ [f"- {server.name}" for server in hub.get_servers()])
245
+ await self._response_queue.put(McpResponse(result=running_servers))
246
+ except Exception as e:
247
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to list running servers: {str(e)}"))
248
+
249
+ else:
250
+ llm = byzerllm.ByzerLLM.from_default_model(
251
+ model=request.model)
252
+ mcp_executor = McpExecutor(hub, llm)
253
+ conversations = [
254
+ {"role": "user", "content": request.query}]
255
+ _, results = await mcp_executor.run(conversations)
256
+ if not results:
257
+ await self._response_queue.put(McpResponse(result="[No Result]", error="No results"))
258
+ results_str = "\n\n".join(
259
+ mcp_executor.format_mcp_result(result) for result in results)
260
+ await self._response_queue.put(McpResponse(result=results_str))
64
261
  except Exception as e:
65
262
  await self._response_queue.put(McpResponse(result="", error=str(e)))
66
-
263
+
67
264
  def send_request(self, request: McpRequest) -> McpResponse:
68
265
  async def _send():
69
266
  await self._request_queue.put(request)
70
267
  return await self._response_queue.get()
71
-
268
+
72
269
  future = asyncio.run_coroutine_threadsafe(_send(), self._loop)
73
270
  return future.result()
74
271
 
272
+
75
273
  # Global MCP server instance
76
274
  _mcp_server = None
77
275
 
276
+
78
277
  def get_mcp_server():
79
278
  global _mcp_server
80
279
  if _mcp_server is None:
File without changes
@@ -0,0 +1,135 @@
1
+ from os import getenv
2
+ from textwrap import dedent
3
+
4
+ import httpx
5
+ import mcp.server.stdio
6
+ import mcp.types as types
7
+ from mcp.server import NotificationOptions, Server
8
+ from mcp.server.models import InitializationOptions
9
+ import json
10
+
11
+ PERPLEXITY_API_KEY = getenv("PERPLEXITY_API_KEY")
12
+ PERPLEXITY_API_BASE_URL = "https://api.perplexity.ai"
13
+
14
+
15
+ server = Server("mcp-server-perplexity")
16
+
17
+
18
+ @server.list_tools()
19
+ async def handle_list_tools() -> list[types.Tool]:
20
+ return [
21
+ types.Tool(
22
+ name="ask_perplexity",
23
+ description=dedent(
24
+ """
25
+ Perplexity equips agents with a specialized tool for efficiently
26
+ gathering source-backed information from the internet, ideal for
27
+ scenarios requiring research, fact-checking, or contextual data to
28
+ inform decisions and responses.
29
+ Each response includes citations, which provide transparent references
30
+ to the sources used for the generated answer, and choices, which
31
+ contain the model's suggested responses, enabling users to access
32
+ reliable information and diverse perspectives.
33
+ This function may encounter timeout errors due to long processing times,
34
+ but retrying the operation can lead to successful completion.
35
+ [Response structure]
36
+ - id: An ID generated uniquely for each response.
37
+ - model: The model used to generate the response.
38
+ - object: The object type, which always equals `chat.completion`.
39
+ - created: The Unix timestamp (in seconds) of when the completion was
40
+ created.
41
+ - citations[]: Citations for the generated answer.
42
+ - choices[]: The list of completion choices the model generated for the
43
+ input prompt.
44
+ - usage: Usage statistics for the completion request.
45
+ """
46
+ ),
47
+ inputSchema={
48
+ "type": "object",
49
+ "properties": {
50
+ "model": {
51
+ "type": "string",
52
+ "description": "The name of the model that will complete your prompt.",
53
+ "enum": [
54
+ "llama-3.1-sonar-small-128k-online",
55
+ # Commenting out larger models,which have higher risks of timing out,
56
+ # until Claude Desktop can handle long-running tasks effectively.
57
+ # "llama-3.1-sonar-large-128k-online",
58
+ # "llama-3.1-sonar-huge-128k-online",
59
+ ],
60
+ },
61
+ "messages": {
62
+ "type": "array",
63
+ "description": "A list of messages comprising the conversation so far.",
64
+ "items": {
65
+ "type": "object",
66
+ "properties": {
67
+ "content": {
68
+ "type": "string",
69
+ "description": "The contents of the message in this turn of conversation.",
70
+ },
71
+ "role": {
72
+ "type": "string",
73
+ "description": "The role of the speaker in this turn of conversation. After the (optional) system message, user and assistant roles should alternate with user then assistant, ending in user.",
74
+ "enum": ["system", "user", "assistant"],
75
+ },
76
+ },
77
+ "required": ["content", "role"],
78
+ },
79
+ },
80
+ },
81
+ "required": ["model", "messages"],
82
+ },
83
+ )
84
+ ]
85
+
86
+
87
+ @server.call_tool()
88
+ async def handle_call_tool(
89
+ name: str, arguments: dict
90
+ ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
91
+ if name != "ask_perplexity":
92
+ raise ValueError(f"Unknown tool: {name}")
93
+
94
+ try:
95
+ async with httpx.AsyncClient() as client:
96
+ response = await client.post(
97
+ f"{PERPLEXITY_API_BASE_URL}/chat/completions",
98
+ headers={
99
+ "Authorization": f"Bearer {PERPLEXITY_API_KEY}",
100
+ "Content-Type": "application/json",
101
+ },
102
+ json=arguments,
103
+ timeout=None,
104
+ )
105
+ response.raise_for_status()
106
+ except httpx.HTTPError as e:
107
+ raise RuntimeError(f"API error: {str(e)}")
108
+
109
+ result = json.loads(response.text)
110
+ c = result["choices"][0]["message"]["content"]
111
+ return [types.TextContent(
112
+ type="text",
113
+ text= c,
114
+ )]
115
+
116
+
117
+ async def main():
118
+ async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
119
+ await server.run(
120
+ read_stream,
121
+ write_stream,
122
+ InitializationOptions(
123
+ server_name="mcp-server-perplexity",
124
+ server_version="0.1.2",
125
+ capabilities=server.get_capabilities(
126
+ notification_options=NotificationOptions(
127
+ tools_changed=True),
128
+ experimental_capabilities={},
129
+ ),
130
+ ),
131
+ )
132
+
133
+ if __name__ == "__main__":
134
+ import asyncio
135
+ asyncio.run(main())