auto-coder 0.1.215__py3-none-any.whl → 0.1.218__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

@@ -6,17 +6,100 @@ from dataclasses import dataclass
6
6
  import byzerllm
7
7
  from autocoder.common.mcp_hub import McpHub
8
8
  from autocoder.common.mcp_tools import McpExecutor
9
+ from autocoder.common.mcp_hub import MCP_BUILD_IN_SERVERS
10
+ import json
11
+ import os
12
+ import time
13
+ from pydantic import BaseModel
14
+ import sys
15
+ from loguru import logger
9
16
 
10
- @dataclass
17
+ @dataclass
11
18
  class McpRequest:
12
19
  query: str
13
20
  model: Optional[str] = None
14
-
21
+
22
+
23
+ @dataclass
24
+ class McpInstallRequest:
25
+ server_name_or_config: Optional[str] = None
26
+
27
+
28
+ @dataclass
29
+ class McpRemoveRequest:
30
+ server_name: str
31
+
32
+
33
+ @dataclass
34
+ class McpListRequest:
35
+ """Request to list all builtin MCP servers"""
36
+ pass
37
+
38
+
39
+ @dataclass
40
+ class McpListRunningRequest:
41
+ """Request to list all running MCP servers"""
42
+ pass
43
+
15
44
  @dataclass
16
45
  class McpResponse:
17
46
  result: str
18
47
  error: Optional[str] = None
19
48
 
49
+
50
+ @dataclass
51
+ class McpRefreshRequest:
52
+ """Request to refresh MCP server connections"""
53
+ name: Optional[str] = None
54
+
55
+
56
+ @dataclass
57
+ class McpExternalServer(BaseModel):
58
+ result: str
59
+ error: Optional[str] = None
60
+
61
+
62
+ class McpExternalServer(BaseModel):
63
+ """Represents an external MCP server configuration"""
64
+ name: str
65
+ description: str
66
+ vendor: str
67
+ sourceUrl: str
68
+ homepage: str
69
+ license: str
70
+ runtime: str
71
+
72
+
73
+ def get_mcp_external_servers() -> List[McpExternalServer]:
74
+ """Get external MCP servers list from GitHub"""
75
+ cache_dir = os.path.join(".auto-coder", "tmp")
76
+ os.makedirs(cache_dir, exist_ok=True)
77
+ cache_file = os.path.join(cache_dir, "mcp_external_servers.json")
78
+
79
+ # Check cache first
80
+ if os.path.exists(cache_file):
81
+ cache_time = os.path.getmtime(cache_file)
82
+ if time.time() - cache_time < 3600: # 1 hour cache
83
+ with open(cache_file, "r") as f:
84
+ raw_data = json.load(f)
85
+ return [McpExternalServer(**item) for item in raw_data]
86
+
87
+ # Fetch from GitHub
88
+ url = "https://raw.githubusercontent.com/michaellatman/mcp-get/refs/heads/main/packages/package-list.json"
89
+ try:
90
+ import requests
91
+ response = requests.get(url)
92
+ if response.status_code == 200:
93
+ raw_data = response.json()
94
+ with open(cache_file, "w") as f:
95
+ json.dump(raw_data, f)
96
+ return [McpExternalServer(**item) for item in raw_data]
97
+ return []
98
+ except Exception as e:
99
+ logger.error(f"Failed to fetch external MCP servers: {e}")
100
+ return []
101
+
102
+
20
103
  class McpServer:
21
104
  def __init__(self):
22
105
  self._request_queue = AsyncQueue()
@@ -24,57 +107,205 @@ class McpServer:
24
107
  self._running = False
25
108
  self._task = None
26
109
  self._loop = None
27
-
110
+
28
111
  def start(self):
29
112
  if self._running:
30
113
  return
31
-
114
+
32
115
  self._running = True
33
116
  self._loop = asyncio.new_event_loop()
34
117
  threading.Thread(target=self._run_event_loop, daemon=True).start()
35
-
118
+
36
119
  def stop(self):
37
120
  if self._running:
38
- self._running = False
121
+ self._running = False
39
122
  if self._loop:
40
123
  self._loop.stop()
41
124
  self._loop.close()
42
-
125
+
43
126
  def _run_event_loop(self):
44
127
  asyncio.set_event_loop(self._loop)
45
128
  self._task = self._loop.create_task(self._process_request())
46
129
  self._loop.run_forever()
47
-
130
+
131
+ async def _install_server(self, request: McpInstallRequest, hub: McpHub) -> McpResponse:
132
+ """Install an MCP server with module dependency check"""
133
+ name = ""
134
+ config = {}
135
+ try:
136
+ server_name_or_config = request.server_name_or_config
137
+ try:
138
+ raw_config = json.loads(server_name_or_config)
139
+ # 用户给了一个完整的配置
140
+ if "mcpServers" in raw_config:
141
+ raw_config = raw_config["mcpServers"]
142
+
143
+ # 取第一个server 配置
144
+ config = list(raw_config.values())[0]
145
+ name = list(raw_config.keys())[0]
146
+ except json.JSONDecodeError:
147
+ name = server_name_or_config.strip()
148
+ if name not in MCP_BUILD_IN_SERVERS:
149
+ # 查找外部server
150
+ external_servers = get_mcp_external_servers()
151
+ for s in external_servers:
152
+ if s.name == name:
153
+ if s.runtime == "python":
154
+ # Check if module exists
155
+ try:
156
+ import importlib
157
+ importlib.import_module(
158
+ name.replace("-", "_"))
159
+ except ImportError:
160
+ # Install missing module
161
+ import subprocess
162
+ try:
163
+ subprocess.run(
164
+ [sys.executable, "-m", "pip", "install", name], check=True)
165
+ except subprocess.CalledProcessError:
166
+ print(
167
+ f"\n\033[93mFailed to automatically install {name}. Please manually install it using:\n")
168
+ print(f" pip install {name}\n")
169
+ print(
170
+ f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
171
+ print(
172
+ f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
173
+
174
+ config = {
175
+ "command": "python",
176
+ "args": [
177
+ "-m", name.replace("-", "_")
178
+ ],
179
+ }
180
+ elif s.runtime == "node":
181
+ # Check if package exists
182
+ try:
183
+ subprocess.run(
184
+ ["npx", name, "--version"], check=True)
185
+ except:
186
+ # Install missing package
187
+ try:
188
+ subprocess.run(
189
+ ["npm", "install", "-y", "-g", name], check=True)
190
+ except subprocess.CalledProcessError:
191
+ print(
192
+ f"\n\033[93mFailed to automatically install {name}. Please manually install it using:\n")
193
+ print(f" npm install -g {name}\n")
194
+ print(
195
+ f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
196
+ print(
197
+ f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
198
+
199
+ config = {
200
+ "command": "npx",
201
+ "args": [
202
+ "-y",
203
+ name
204
+ ]
205
+ }
206
+ break
207
+ else:
208
+ config = MCP_BUILD_IN_SERVERS[name]
209
+ if not name:
210
+ raise ValueError(
211
+ "MCP server name is not available in MCP_BUILD_IN_SERVERS or external servers")
212
+
213
+ logger.info(f"Installing MCP server: {name} with config: {config}")
214
+ if not config:
215
+ raise ValueError(f"MCP server {name} config is not available")
216
+
217
+ await hub.add_server_config(name, config)
218
+ return McpResponse(result=f"Successfully installed MCP server: {request.server_name_or_config}")
219
+ except Exception as e:
220
+ return McpResponse(result="", error=f"Failed to install MCP server: {str(e)}")
221
+
48
222
  async def _process_request(self):
49
223
  hub = McpHub()
50
224
  await hub.initialize()
51
-
225
+
52
226
  while self._running:
53
227
  try:
54
228
  request = await self._request_queue.get()
55
229
  if request is None:
56
230
  break
57
-
58
- llm = byzerllm.ByzerLLM.from_default_model(model=request.model)
59
- mcp_executor = McpExecutor(hub, llm)
60
- conversations = [{"role": "user", "content": request.query}]
61
- _, results = await mcp_executor.run(conversations)
62
- results_str = "\n\n".join(mcp_executor.format_mcp_result(result) for result in results)
63
- await self._response_queue.put(McpResponse(result=results_str))
231
+
232
+ if isinstance(request, McpInstallRequest):
233
+ response = await self._install_server(request, hub)
234
+ await self._response_queue.put(response)
235
+
236
+ elif isinstance(request, McpRemoveRequest):
237
+ try:
238
+ await hub.remove_server_config(request.server_name)
239
+ await self._response_queue.put(McpResponse(result=f"Successfully removed MCP server: {request.server_name}"))
240
+ except Exception as e:
241
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to remove MCP server: {str(e)}"))
242
+
243
+ elif isinstance(request, McpListRequest):
244
+ try:
245
+ # Get built-in servers
246
+ builtin_servers = [
247
+ f"- Built-in: {name}" for name in MCP_BUILD_IN_SERVERS.keys()]
248
+
249
+ # Get external servers
250
+ external_servers = get_mcp_external_servers()
251
+ external_list = [
252
+ f"- External: {s.name} ({s.description})" for s in external_servers]
253
+
254
+ # Combine results
255
+ all_servers = builtin_servers + external_list
256
+ result = "Available MCP servers:\n" + \
257
+ "\n".join(all_servers)
258
+
259
+ await self._response_queue.put(McpResponse(result=result))
260
+ except Exception as e:
261
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to list servers: {str(e)}"))
262
+
263
+ elif isinstance(request, McpListRunningRequest):
264
+ try:
265
+ running_servers = "\n".join(
266
+ [f"- {server.name}" for server in hub.get_servers()])
267
+ await self._response_queue.put(McpResponse(result=running_servers))
268
+ except Exception as e:
269
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to list running servers: {str(e)}"))
270
+
271
+ elif isinstance(request, McpRefreshRequest):
272
+ try:
273
+ if request.name:
274
+ await hub.refresh_server_connection(request.name)
275
+ else:
276
+ await hub.initialize()
277
+ await self._response_queue.put(McpResponse(result="Successfully refreshed MCP server connections"))
278
+ except Exception as e:
279
+ await self._response_queue.put(McpResponse(result="", error=f"Failed to refresh MCP servers: {str(e)}"))
280
+
281
+ else:
282
+ llm = byzerllm.ByzerLLM.from_default_model(
283
+ model=request.model)
284
+ mcp_executor = McpExecutor(hub, llm)
285
+ conversations = [
286
+ {"role": "user", "content": request.query}]
287
+ _, results = await mcp_executor.run(conversations)
288
+ if not results:
289
+ await self._response_queue.put(McpResponse(result="[No Result]", error="No results"))
290
+ results_str = "\n\n".join(
291
+ mcp_executor.format_mcp_result(result) for result in results)
292
+ await self._response_queue.put(McpResponse(result=results_str))
64
293
  except Exception as e:
65
294
  await self._response_queue.put(McpResponse(result="", error=str(e)))
66
-
295
+
67
296
  def send_request(self, request: McpRequest) -> McpResponse:
68
297
  async def _send():
69
298
  await self._request_queue.put(request)
70
299
  return await self._response_queue.get()
71
-
300
+
72
301
  future = asyncio.run_coroutine_threadsafe(_send(), self._loop)
73
302
  return future.result()
74
303
 
304
+
75
305
  # Global MCP server instance
76
306
  _mcp_server = None
77
307
 
308
+
78
309
  def get_mcp_server():
79
310
  global _mcp_server
80
311
  if _mcp_server is None:
File without changes
@@ -0,0 +1,135 @@
1
+ from os import getenv
2
+ from textwrap import dedent
3
+
4
+ import httpx
5
+ import mcp.server.stdio
6
+ import mcp.types as types
7
+ from mcp.server import NotificationOptions, Server
8
+ from mcp.server.models import InitializationOptions
9
+ import json
10
+
11
+ PERPLEXITY_API_KEY = getenv("PERPLEXITY_API_KEY")
12
+ PERPLEXITY_API_BASE_URL = "https://api.perplexity.ai"
13
+
14
+
15
+ server = Server("mcp-server-perplexity")
16
+
17
+
18
+ @server.list_tools()
19
+ async def handle_list_tools() -> list[types.Tool]:
20
+ return [
21
+ types.Tool(
22
+ name="ask_perplexity",
23
+ description=dedent(
24
+ """
25
+ Perplexity equips agents with a specialized tool for efficiently
26
+ gathering source-backed information from the internet, ideal for
27
+ scenarios requiring research, fact-checking, or contextual data to
28
+ inform decisions and responses.
29
+ Each response includes citations, which provide transparent references
30
+ to the sources used for the generated answer, and choices, which
31
+ contain the model's suggested responses, enabling users to access
32
+ reliable information and diverse perspectives.
33
+ This function may encounter timeout errors due to long processing times,
34
+ but retrying the operation can lead to successful completion.
35
+ [Response structure]
36
+ - id: An ID generated uniquely for each response.
37
+ - model: The model used to generate the response.
38
+ - object: The object type, which always equals `chat.completion`.
39
+ - created: The Unix timestamp (in seconds) of when the completion was
40
+ created.
41
+ - citations[]: Citations for the generated answer.
42
+ - choices[]: The list of completion choices the model generated for the
43
+ input prompt.
44
+ - usage: Usage statistics for the completion request.
45
+ """
46
+ ),
47
+ inputSchema={
48
+ "type": "object",
49
+ "properties": {
50
+ "model": {
51
+ "type": "string",
52
+ "description": "The name of the model that will complete your prompt.",
53
+ "enum": [
54
+ "llama-3.1-sonar-small-128k-online",
55
+ # Commenting out larger models,which have higher risks of timing out,
56
+ # until Claude Desktop can handle long-running tasks effectively.
57
+ # "llama-3.1-sonar-large-128k-online",
58
+ # "llama-3.1-sonar-huge-128k-online",
59
+ ],
60
+ },
61
+ "messages": {
62
+ "type": "array",
63
+ "description": "A list of messages comprising the conversation so far.",
64
+ "items": {
65
+ "type": "object",
66
+ "properties": {
67
+ "content": {
68
+ "type": "string",
69
+ "description": "The contents of the message in this turn of conversation.",
70
+ },
71
+ "role": {
72
+ "type": "string",
73
+ "description": "The role of the speaker in this turn of conversation. After the (optional) system message, user and assistant roles should alternate with user then assistant, ending in user.",
74
+ "enum": ["system", "user", "assistant"],
75
+ },
76
+ },
77
+ "required": ["content", "role"],
78
+ },
79
+ },
80
+ },
81
+ "required": ["model", "messages"],
82
+ },
83
+ )
84
+ ]
85
+
86
+
87
+ @server.call_tool()
88
+ async def handle_call_tool(
89
+ name: str, arguments: dict
90
+ ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
91
+ if name != "ask_perplexity":
92
+ raise ValueError(f"Unknown tool: {name}")
93
+
94
+ try:
95
+ async with httpx.AsyncClient() as client:
96
+ response = await client.post(
97
+ f"{PERPLEXITY_API_BASE_URL}/chat/completions",
98
+ headers={
99
+ "Authorization": f"Bearer {PERPLEXITY_API_KEY}",
100
+ "Content-Type": "application/json",
101
+ },
102
+ json=arguments,
103
+ timeout=None,
104
+ )
105
+ response.raise_for_status()
106
+ except httpx.HTTPError as e:
107
+ raise RuntimeError(f"API error: {str(e)}")
108
+
109
+ result = json.loads(response.text)
110
+ c = result["choices"][0]["message"]["content"]
111
+ return [types.TextContent(
112
+ type="text",
113
+ text= c,
114
+ )]
115
+
116
+
117
+ async def main():
118
+ async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
119
+ await server.run(
120
+ read_stream,
121
+ write_stream,
122
+ InitializationOptions(
123
+ server_name="mcp-server-perplexity",
124
+ server_version="0.1.2",
125
+ capabilities=server.get_capabilities(
126
+ notification_options=NotificationOptions(
127
+ tools_changed=True),
128
+ experimental_capabilities={},
129
+ ),
130
+ ),
131
+ )
132
+
133
+ if __name__ == "__main__":
134
+ import asyncio
135
+ asyncio.run(main())