stigmergy 1.0.68 → 1.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.en.md +306 -300
- package/README.md +469 -301
- package/package.json +97 -81
- package/scripts/publish.js +268 -0
- package/scripts/simple-publish.js +59 -0
- package/src/index.js +12 -0
- package/test/enhanced-main-alignment.test.js +298 -0
- package/test/hook-system-integration-test.js +307 -0
- package/test/natural-language-skills-test.js +320 -0
- package/test/nl-integration-test.js +179 -0
- package/test/parameter-parsing-test.js +143 -0
- package/test/real-test.js +435 -0
- package/test/system-compatibility-test.js +447 -0
- package/test/tdd-fixes-test.js +211 -0
- package/test/third-party-skills-test.js +321 -0
- package/test/tool-selection-integration-test.js +157 -0
- package/test/unit/cli-scanner.test.js +291 -0
- package/test/unit/cross-cli-executor.test.js +399 -0
- package/src/adapters/claude/__init__.py +0 -13
- package/src/adapters/claude/claude_skills_integration.py +0 -609
- package/src/adapters/claude/hook_adapter.py +0 -663
- package/src/adapters/claude/install_claude_integration.py +0 -265
- package/src/adapters/claude/skills_hook_adapter.py +0 -841
- package/src/adapters/claude/standalone_claude_adapter.py +0 -384
- package/src/adapters/cline/__init__.py +0 -20
- package/src/adapters/cline/config.py +0 -108
- package/src/adapters/cline/install_cline_integration.py +0 -617
- package/src/adapters/cline/mcp_server.py +0 -713
- package/src/adapters/cline/standalone_cline_adapter.py +0 -459
- package/src/adapters/codebuddy/__init__.py +0 -13
- package/src/adapters/codebuddy/buddy_adapter.py +0 -1125
- package/src/adapters/codebuddy/install_codebuddy_integration.py +0 -279
- package/src/adapters/codebuddy/skills_hook_adapter.py +0 -672
- package/src/adapters/codebuddy/skills_integration.py +0 -395
- package/src/adapters/codebuddy/standalone_codebuddy_adapter.py +0 -403
- package/src/adapters/codex/__init__.py +0 -11
- package/src/adapters/codex/base.py +0 -46
- package/src/adapters/codex/install_codex_integration.py +0 -311
- package/src/adapters/codex/mcp_server.py +0 -493
- package/src/adapters/codex/natural_language_parser.py +0 -82
- package/src/adapters/codex/slash_command_adapter.py +0 -326
- package/src/adapters/codex/standalone_codex_adapter.py +0 -362
- package/src/adapters/copilot/__init__.py +0 -13
- package/src/adapters/copilot/install_copilot_integration.py +0 -564
- package/src/adapters/copilot/mcp_adapter.py +0 -772
- package/src/adapters/copilot/mcp_server.py +0 -168
- package/src/adapters/copilot/standalone_copilot_adapter.py +0 -114
- package/src/adapters/gemini/__init__.py +0 -13
- package/src/adapters/gemini/extension_adapter.py +0 -690
- package/src/adapters/gemini/install_gemini_integration.py +0 -257
- package/src/adapters/gemini/standalone_gemini_adapter.py +0 -366
- package/src/adapters/iflow/__init__.py +0 -7
- package/src/adapters/iflow/hook_adapter.py +0 -1038
- package/src/adapters/iflow/hook_installer.py +0 -536
- package/src/adapters/iflow/install_iflow_integration.py +0 -271
- package/src/adapters/iflow/official_hook_adapter.py +0 -1272
- package/src/adapters/iflow/standalone_iflow_adapter.py +0 -48
- package/src/adapters/iflow/workflow_adapter.py +0 -793
- package/src/adapters/qoder/hook_installer.py +0 -732
- package/src/adapters/qoder/install_qoder_integration.py +0 -265
- package/src/adapters/qoder/notification_hook_adapter.py +0 -863
- package/src/adapters/qoder/standalone_qoder_adapter.py +0 -48
- package/src/adapters/qwen/__init__.py +0 -17
- package/src/adapters/qwencode/__init__.py +0 -13
- package/src/adapters/qwencode/inheritance_adapter.py +0 -818
- package/src/adapters/qwencode/install_qwencode_integration.py +0 -276
- package/src/adapters/qwencode/standalone_qwencode_adapter.py +0 -399
- package/src/atomic_collaboration_handler.py +0 -461
- package/src/cli_collaboration_agent.py +0 -697
- package/src/collaboration/hooks.py +0 -315
- package/src/core/__init__.py +0 -21
- package/src/core/ai_environment_scanner.py +0 -331
- package/src/core/base_adapter.py +0 -220
- package/src/core/cli_hook_integration.py +0 -406
- package/src/core/cross_cli_executor.py +0 -713
- package/src/core/cross_cli_mapping.py +0 -1165
- package/src/core/cross_platform_encoding.py +0 -365
- package/src/core/cross_platform_safe_cli.py +0 -894
- package/src/core/direct_cli_executor.py +0 -805
- package/src/core/direct_cli_hook_system.py +0 -958
- package/src/core/enhanced_init_processor.py +0 -467
- package/src/core/graceful_cli_executor.py +0 -912
- package/src/core/md_enhancer.py +0 -342
- package/src/core/md_generator.py +0 -619
- package/src/core/models.py +0 -218
- package/src/core/parser.py +0 -108
- package/src/core/real_cli_hook_system.py +0 -852
- package/src/core/real_cross_cli_system.py +0 -925
- package/src/core/verified_cross_cli_system.py +0 -961
- package/src/deploy.js +0 -737
- package/src/enhanced-main.js +0 -626
- package/src/enhanced_deploy.js +0 -303
- package/src/enhanced_universal_cli_setup.py +0 -930
- package/src/kimi_wrapper.py +0 -104
- package/src/main.js +0 -1309
- package/src/shell_integration.py +0 -398
- package/src/simple-main.js +0 -315
- package/src/smart_router_creator.py +0 -323
- package/src/universal_cli_setup.py +0 -1289
- package/src/utils/__init__.py +0 -12
- package/src/utils/cli_detector.py +0 -445
- package/src/utils/file_utils.py +0 -246
|
@@ -1,713 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Stigmergy MCP Server for Cline Integration
|
|
3
|
-
|
|
4
|
-
This module implements a Model Context Protocol (MCP) server that provides
|
|
5
|
-
tools and resources for the Stigmergy CLI Multi-Agents system integration
|
|
6
|
-
with Cline CLI.
|
|
7
|
-
|
|
8
|
-
Key Features:
|
|
9
|
-
- Tool exposure for cross-CLI operations
|
|
10
|
-
- Resource management for project context
|
|
11
|
-
- Bidirectional communication with Cline
|
|
12
|
-
- Dynamic tool creation and management
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
import asyncio
|
|
16
|
-
import json
|
|
17
|
-
import logging
|
|
18
|
-
import os
|
|
19
|
-
import sys
|
|
20
|
-
from pathlib import Path
|
|
21
|
-
from typing import Dict, Any, List, Optional
|
|
22
|
-
from dataclasses import dataclass, asdict
|
|
23
|
-
|
|
24
|
-
# Configure logging
|
|
25
|
-
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
26
|
-
logger = logging.getLogger(__name__)
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
@dataclass
|
|
30
|
-
class MCPToolDefinition:
|
|
31
|
-
"""MCP Tool definition"""
|
|
32
|
-
name: str
|
|
33
|
-
description: str
|
|
34
|
-
inputSchema: Dict[str, Any]
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@dataclass
|
|
38
|
-
class MCPResourceDefinition:
|
|
39
|
-
"""MCP Resource definition"""
|
|
40
|
-
uri: str
|
|
41
|
-
name: str
|
|
42
|
-
description: str
|
|
43
|
-
mimeType: Optional[str] = None
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
class StigmergyMCPServer:
|
|
47
|
-
"""MCP Server implementation for Stigmergy-Cline integration"""
|
|
48
|
-
|
|
49
|
-
def __init__(self):
|
|
50
|
-
self.tools: Dict[str, MCPToolDefinition] = {}
|
|
51
|
-
self.resources: Dict[str, MCPResourceDefinition] = {}
|
|
52
|
-
self.project_root = os.environ.get("STIGMERGY_PROJECT_ROOT", os.getcwd())
|
|
53
|
-
self.collaboration_mode = os.environ.get("STIGMERGY_COLLABORATION_MODE", "enabled")
|
|
54
|
-
self._setup_default_tools()
|
|
55
|
-
self._setup_default_resources()
|
|
56
|
-
|
|
57
|
-
def _setup_default_tools(self):
|
|
58
|
-
"""Setup default MCP tools"""
|
|
59
|
-
default_tools = [
|
|
60
|
-
MCPToolDefinition(
|
|
61
|
-
name="search_files",
|
|
62
|
-
description="Search for files in the project directory",
|
|
63
|
-
inputSchema={
|
|
64
|
-
"type": "object",
|
|
65
|
-
"properties": {
|
|
66
|
-
"pattern": {
|
|
67
|
-
"type": "string",
|
|
68
|
-
"description": "File pattern to search for (glob format)"
|
|
69
|
-
},
|
|
70
|
-
"directory": {
|
|
71
|
-
"type": "string",
|
|
72
|
-
"description": "Directory to search in (optional)"
|
|
73
|
-
}
|
|
74
|
-
},
|
|
75
|
-
"required": ["pattern"]
|
|
76
|
-
}
|
|
77
|
-
),
|
|
78
|
-
MCPToolDefinition(
|
|
79
|
-
name="read_project_file",
|
|
80
|
-
description="Read content of a project file",
|
|
81
|
-
inputSchema={
|
|
82
|
-
"type": "object",
|
|
83
|
-
"properties": {
|
|
84
|
-
"path": {
|
|
85
|
-
"type": "string",
|
|
86
|
-
"description": "Relative path to the file"
|
|
87
|
-
},
|
|
88
|
-
"max_lines": {
|
|
89
|
-
"type": "integer",
|
|
90
|
-
"description": "Maximum number of lines to read",
|
|
91
|
-
"default": 100
|
|
92
|
-
}
|
|
93
|
-
},
|
|
94
|
-
"required": ["path"]
|
|
95
|
-
}
|
|
96
|
-
),
|
|
97
|
-
MCPToolDefinition(
|
|
98
|
-
name="get_project_structure",
|
|
99
|
-
description="Get the project directory structure",
|
|
100
|
-
inputSchema={
|
|
101
|
-
"type": "object",
|
|
102
|
-
"properties": {
|
|
103
|
-
"max_depth": {
|
|
104
|
-
"type": "integer",
|
|
105
|
-
"description": "Maximum directory depth to explore",
|
|
106
|
-
"default": 3
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
),
|
|
111
|
-
MCPToolDefinition(
|
|
112
|
-
name="analyze_codebase",
|
|
113
|
-
description="Analyze the codebase for patterns and structure",
|
|
114
|
-
inputSchema={
|
|
115
|
-
"type": "object",
|
|
116
|
-
"properties": {
|
|
117
|
-
"analysis_type": {
|
|
118
|
-
"type": "string",
|
|
119
|
-
"enum": ["dependencies", "structure", "complexity", "patterns"],
|
|
120
|
-
"description": "Type of analysis to perform"
|
|
121
|
-
}
|
|
122
|
-
},
|
|
123
|
-
"required": ["analysis_type"]
|
|
124
|
-
}
|
|
125
|
-
),
|
|
126
|
-
MCPToolDefinition(
|
|
127
|
-
name="collaborate_with_cli",
|
|
128
|
-
description="Collaborate with other CLI tools in the system",
|
|
129
|
-
inputSchema={
|
|
130
|
-
"type": "object",
|
|
131
|
-
"properties": {
|
|
132
|
-
"target_cli": {
|
|
133
|
-
"type": "string",
|
|
134
|
-
"description": "Target CLI tool to collaborate with"
|
|
135
|
-
},
|
|
136
|
-
"task": {
|
|
137
|
-
"type": "string",
|
|
138
|
-
"description": "Task to delegate to the target CLI"
|
|
139
|
-
},
|
|
140
|
-
"context": {
|
|
141
|
-
"type": "object",
|
|
142
|
-
"description": "Context information for the task"
|
|
143
|
-
}
|
|
144
|
-
},
|
|
145
|
-
"required": ["target_cli", "task"]
|
|
146
|
-
}
|
|
147
|
-
),
|
|
148
|
-
MCPToolDefinition(
|
|
149
|
-
name="create_tool",
|
|
150
|
-
description="Create a new MCP tool dynamically",
|
|
151
|
-
inputSchema={
|
|
152
|
-
"type": "object",
|
|
153
|
-
"properties": {
|
|
154
|
-
"tool_name": {
|
|
155
|
-
"type": "string",
|
|
156
|
-
"description": "Name of the new tool"
|
|
157
|
-
},
|
|
158
|
-
"description": {
|
|
159
|
-
"type": "string",
|
|
160
|
-
"description": "Description of what the tool does"
|
|
161
|
-
},
|
|
162
|
-
"function_code": {
|
|
163
|
-
"type": "string",
|
|
164
|
-
"description": "Python code implementing the tool function"
|
|
165
|
-
}
|
|
166
|
-
},
|
|
167
|
-
"required": ["tool_name", "description", "function_code"]
|
|
168
|
-
}
|
|
169
|
-
)
|
|
170
|
-
]
|
|
171
|
-
|
|
172
|
-
for tool in default_tools:
|
|
173
|
-
self.tools[tool.name] = tool
|
|
174
|
-
|
|
175
|
-
def _setup_default_resources(self):
|
|
176
|
-
"""Setup default MCP resources"""
|
|
177
|
-
default_resources = [
|
|
178
|
-
MCPResourceDefinition(
|
|
179
|
-
uri="file://project_spec.json",
|
|
180
|
-
name="Project Specification",
|
|
181
|
-
description="Current project specification and configuration",
|
|
182
|
-
mimeType="application/json"
|
|
183
|
-
),
|
|
184
|
-
MCPResourceDefinition(
|
|
185
|
-
uri="file://collaboration_log.md",
|
|
186
|
-
name="Collaboration Log",
|
|
187
|
-
description="Log of cross-CLI collaboration activities",
|
|
188
|
-
mimeType="text/markdown"
|
|
189
|
-
),
|
|
190
|
-
MCPResourceDefinition(
|
|
191
|
-
uri="file://tasks.md",
|
|
192
|
-
name="Task Management",
|
|
193
|
-
description="Current tasks and their status",
|
|
194
|
-
mimeType="text/markdown"
|
|
195
|
-
),
|
|
196
|
-
MCPResourceDefinition(
|
|
197
|
-
uri="file://global_memory.json",
|
|
198
|
-
name="Global Memory",
|
|
199
|
-
description="Shared memory and learned patterns across CLI tools",
|
|
200
|
-
mimeType="application/json"
|
|
201
|
-
)
|
|
202
|
-
]
|
|
203
|
-
|
|
204
|
-
for resource in default_resources:
|
|
205
|
-
self.resources[resource.uri] = resource
|
|
206
|
-
|
|
207
|
-
async def handle_request(self, request: Dict[str, Any]) -> Dict[str, Any]:
|
|
208
|
-
"""Handle incoming MCP request"""
|
|
209
|
-
method = request.get("method", "")
|
|
210
|
-
params = request.get("params", {})
|
|
211
|
-
request_id = request.get("id")
|
|
212
|
-
|
|
213
|
-
logger.info(f"Handling MCP request: {method}")
|
|
214
|
-
|
|
215
|
-
try:
|
|
216
|
-
if method == "tools/list":
|
|
217
|
-
return await self._handle_tools_list(request_id)
|
|
218
|
-
elif method == "tools/call":
|
|
219
|
-
return await self._handle_tools_call(params, request_id)
|
|
220
|
-
elif method == "resources/list":
|
|
221
|
-
return await self._handle_resources_list(request_id)
|
|
222
|
-
elif method == "resources/read":
|
|
223
|
-
return await self._handle_resources_read(params, request_id)
|
|
224
|
-
elif method == "initialize":
|
|
225
|
-
return await self._handle_initialize(params, request_id)
|
|
226
|
-
else:
|
|
227
|
-
return self._create_error_response(request_id, -32601, f"Method {method} not found")
|
|
228
|
-
except Exception as e:
|
|
229
|
-
logger.error(f"Error handling request {method}: {e}")
|
|
230
|
-
return self._create_error_response(request_id, -32603, str(e))
|
|
231
|
-
|
|
232
|
-
async def _handle_tools_list(self, request_id: Any) -> Dict[str, Any]:
|
|
233
|
-
"""Handle tools/list request"""
|
|
234
|
-
tools_data = [asdict(tool) for tool in self.tools.values()]
|
|
235
|
-
return {
|
|
236
|
-
"jsonrpc": "2.0",
|
|
237
|
-
"id": request_id,
|
|
238
|
-
"result": {"tools": tools_data}
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
async def _handle_tools_call(self, params: Dict[str, Any], request_id: Any) -> Dict[str, Any]:
|
|
242
|
-
"""Handle tools/call request"""
|
|
243
|
-
tool_name = params.get("name")
|
|
244
|
-
tool_arguments = params.get("arguments", {})
|
|
245
|
-
|
|
246
|
-
if not tool_name:
|
|
247
|
-
return self._create_error_response(request_id, -32602, "Missing tool name")
|
|
248
|
-
|
|
249
|
-
if tool_name not in self.tools:
|
|
250
|
-
return self._create_error_response(request_id, -32602, f"Tool {tool_name} not found")
|
|
251
|
-
|
|
252
|
-
try:
|
|
253
|
-
result = await self._execute_tool(tool_name, tool_arguments)
|
|
254
|
-
return {
|
|
255
|
-
"jsonrpc": "2.0",
|
|
256
|
-
"id": request_id,
|
|
257
|
-
"result": result
|
|
258
|
-
}
|
|
259
|
-
except Exception as e:
|
|
260
|
-
logger.error(f"Tool execution error: {e}")
|
|
261
|
-
return self._create_error_response(request_id, -32603, f"Tool execution failed: {str(e)}")
|
|
262
|
-
|
|
263
|
-
async def _execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
|
|
264
|
-
"""Execute the specified tool"""
|
|
265
|
-
if tool_name == "search_files":
|
|
266
|
-
return await self._tool_search_files(arguments)
|
|
267
|
-
elif tool_name == "read_project_file":
|
|
268
|
-
return await self._tool_read_project_file(arguments)
|
|
269
|
-
elif tool_name == "get_project_structure":
|
|
270
|
-
return await self._tool_get_project_structure(arguments)
|
|
271
|
-
elif tool_name == "analyze_codebase":
|
|
272
|
-
return await self._tool_analyze_codebase(arguments)
|
|
273
|
-
elif tool_name == "collaborate_with_cli":
|
|
274
|
-
return await self._tool_collaborate_with_cli(arguments)
|
|
275
|
-
elif tool_name == "create_tool":
|
|
276
|
-
return await self._tool_create_tool(arguments)
|
|
277
|
-
else:
|
|
278
|
-
raise ValueError(f"Unknown tool: {tool_name}")
|
|
279
|
-
|
|
280
|
-
async def _tool_search_files(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
281
|
-
"""Search for files in the project"""
|
|
282
|
-
pattern = arguments.get("pattern", "*")
|
|
283
|
-
directory = arguments.get("directory", self.project_root)
|
|
284
|
-
|
|
285
|
-
if not os.path.isabs(directory):
|
|
286
|
-
directory = os.path.join(self.project_root, directory)
|
|
287
|
-
|
|
288
|
-
try:
|
|
289
|
-
import glob
|
|
290
|
-
search_path = os.path.join(directory, pattern)
|
|
291
|
-
files = glob.glob(search_path, recursive=True)
|
|
292
|
-
|
|
293
|
-
# Filter out common ignore patterns
|
|
294
|
-
filtered_files = []
|
|
295
|
-
for file_path in files:
|
|
296
|
-
rel_path = os.path.relpath(file_path, self.project_root)
|
|
297
|
-
if not any(part.startswith('.') for part in rel_path.split(os.sep)):
|
|
298
|
-
if not any(part in ['node_modules', '__pycache__', '.git'] for part in rel_path.split(os.sep)):
|
|
299
|
-
filtered_files.append(rel_path)
|
|
300
|
-
|
|
301
|
-
return {
|
|
302
|
-
"files": filtered_files,
|
|
303
|
-
"count": len(filtered_files),
|
|
304
|
-
"pattern": pattern,
|
|
305
|
-
"directory": directory
|
|
306
|
-
}
|
|
307
|
-
except Exception as e:
|
|
308
|
-
logger.error(f"File search error: {e}")
|
|
309
|
-
return {"error": str(e), "files": [], "count": 0}
|
|
310
|
-
|
|
311
|
-
async def _tool_read_project_file(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
312
|
-
"""Read content of a project file"""
|
|
313
|
-
file_path = arguments.get("path")
|
|
314
|
-
max_lines = arguments.get("max_lines", 100)
|
|
315
|
-
|
|
316
|
-
if not file_path:
|
|
317
|
-
return {"error": "File path is required"}
|
|
318
|
-
|
|
319
|
-
# Ensure path is within project root
|
|
320
|
-
full_path = os.path.join(self.project_root, file_path)
|
|
321
|
-
if not os.path.abspath(full_path).startswith(os.path.abspath(self.project_root)):
|
|
322
|
-
return {"error": "Path outside project root is not allowed"}
|
|
323
|
-
|
|
324
|
-
if not os.path.exists(full_path):
|
|
325
|
-
return {"error": f"File not found: {file_path}"}
|
|
326
|
-
|
|
327
|
-
try:
|
|
328
|
-
with open(full_path, 'r', encoding='utf-8') as f:
|
|
329
|
-
lines = f.readlines()
|
|
330
|
-
content = ''.join(lines[:max_lines])
|
|
331
|
-
|
|
332
|
-
return {
|
|
333
|
-
"path": file_path,
|
|
334
|
-
"content": content,
|
|
335
|
-
"total_lines": len(lines),
|
|
336
|
-
"read_lines": min(max_lines, len(lines)),
|
|
337
|
-
"truncated": len(lines) > max_lines
|
|
338
|
-
}
|
|
339
|
-
except Exception as e:
|
|
340
|
-
logger.error(f"File read error: {e}")
|
|
341
|
-
return {"error": str(e)}
|
|
342
|
-
|
|
343
|
-
async def _tool_get_project_structure(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
344
|
-
"""Get project directory structure"""
|
|
345
|
-
max_depth = arguments.get("max_depth", 3)
|
|
346
|
-
|
|
347
|
-
def build_tree(path: str, current_depth: int = 0) -> Dict[str, Any]:
|
|
348
|
-
if current_depth >= max_depth:
|
|
349
|
-
return {"name": os.path.basename(path), "type": "directory", "children": []}
|
|
350
|
-
|
|
351
|
-
try:
|
|
352
|
-
items = []
|
|
353
|
-
for item in sorted(os.listdir(path)):
|
|
354
|
-
if item.startswith('.'):
|
|
355
|
-
continue
|
|
356
|
-
|
|
357
|
-
item_path = os.path.join(path, item)
|
|
358
|
-
if os.path.isdir(item_path):
|
|
359
|
-
items.append(build_tree(item_path, current_depth + 1))
|
|
360
|
-
else:
|
|
361
|
-
items.append({
|
|
362
|
-
"name": item,
|
|
363
|
-
"type": "file",
|
|
364
|
-
"size": os.path.getsize(item_path)
|
|
365
|
-
})
|
|
366
|
-
|
|
367
|
-
return {
|
|
368
|
-
"name": os.path.basename(path) or path,
|
|
369
|
-
"type": "directory",
|
|
370
|
-
"children": items
|
|
371
|
-
}
|
|
372
|
-
except PermissionError:
|
|
373
|
-
return {"name": os.path.basename(path), "type": "directory", "error": "Permission denied"}
|
|
374
|
-
|
|
375
|
-
try:
|
|
376
|
-
structure = build_tree(self.project_root)
|
|
377
|
-
return {"structure": structure, "root": self.project_root}
|
|
378
|
-
except Exception as e:
|
|
379
|
-
logger.error(f"Project structure error: {e}")
|
|
380
|
-
return {"error": str(e)}
|
|
381
|
-
|
|
382
|
-
async def _tool_analyze_codebase(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
383
|
-
"""Analyze codebase for patterns and structure"""
|
|
384
|
-
analysis_type = arguments.get("analysis_type", "structure")
|
|
385
|
-
|
|
386
|
-
try:
|
|
387
|
-
if analysis_type == "dependencies":
|
|
388
|
-
return await self._analyze_dependencies()
|
|
389
|
-
elif analysis_type == "structure":
|
|
390
|
-
return await self._analyze_structure()
|
|
391
|
-
elif analysis_type == "complexity":
|
|
392
|
-
return await self._analyze_complexity()
|
|
393
|
-
elif analysis_type == "patterns":
|
|
394
|
-
return await self._analyze_patterns()
|
|
395
|
-
else:
|
|
396
|
-
return {"error": f"Unknown analysis type: {analysis_type}"}
|
|
397
|
-
except Exception as e:
|
|
398
|
-
logger.error(f"Codebase analysis error: {e}")
|
|
399
|
-
return {"error": str(e)}
|
|
400
|
-
|
|
401
|
-
async def _analyze_dependencies(self) -> Dict[str, Any]:
|
|
402
|
-
"""Analyze project dependencies"""
|
|
403
|
-
dependencies = {}
|
|
404
|
-
|
|
405
|
-
# Check for package.json
|
|
406
|
-
package_json = os.path.join(self.project_root, "package.json")
|
|
407
|
-
if os.path.exists(package_json):
|
|
408
|
-
try:
|
|
409
|
-
with open(package_json, 'r') as f:
|
|
410
|
-
data = json.load(f)
|
|
411
|
-
dependencies["npm"] = {
|
|
412
|
-
"dependencies": data.get("dependencies", {}),
|
|
413
|
-
"devDependencies": data.get("devDependencies", {})
|
|
414
|
-
}
|
|
415
|
-
except Exception as e:
|
|
416
|
-
dependencies["npm_error"] = str(e)
|
|
417
|
-
|
|
418
|
-
# Check for requirements.txt
|
|
419
|
-
requirements_txt = os.path.join(self.project_root, "requirements.txt")
|
|
420
|
-
if os.path.exists(requirements_txt):
|
|
421
|
-
try:
|
|
422
|
-
with open(requirements_txt, 'r') as f:
|
|
423
|
-
deps = [line.strip() for line in f if line.strip() and not line.startswith('#')]
|
|
424
|
-
dependencies["python"] = deps
|
|
425
|
-
except Exception as e:
|
|
426
|
-
dependencies["python_error"] = str(e)
|
|
427
|
-
|
|
428
|
-
return {"analysis_type": "dependencies", "results": dependencies}
|
|
429
|
-
|
|
430
|
-
async def _analyze_structure(self) -> Dict[str, Any]:
|
|
431
|
-
"""Analyze project structure"""
|
|
432
|
-
structure_info = {
|
|
433
|
-
"total_files": 0,
|
|
434
|
-
"total_directories": 0,
|
|
435
|
-
"file_types": {},
|
|
436
|
-
"languages": {}
|
|
437
|
-
}
|
|
438
|
-
|
|
439
|
-
for root, dirs, files in os.walk(self.project_root):
|
|
440
|
-
# Skip hidden directories
|
|
441
|
-
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
|
442
|
-
|
|
443
|
-
structure_info["total_directories"] += len(dirs)
|
|
444
|
-
structure_info["total_files"] += len(files)
|
|
445
|
-
|
|
446
|
-
for file in files:
|
|
447
|
-
ext = os.path.splitext(file)[1].lower()
|
|
448
|
-
if ext:
|
|
449
|
-
structure_info["file_types"][ext] = structure_info["file_types"].get(ext, 0) + 1
|
|
450
|
-
|
|
451
|
-
# Simple language detection
|
|
452
|
-
if ext in ['.py']:
|
|
453
|
-
structure_info["languages"]["python"] = structure_info["languages"].get("python", 0) + 1
|
|
454
|
-
elif ext in ['.js', '.ts']:
|
|
455
|
-
structure_info["languages"]["javascript"] = structure_info["languages"].get("javascript", 0) + 1
|
|
456
|
-
elif ext in ['.md']:
|
|
457
|
-
structure_info["languages"]["markdown"] = structure_info["languages"].get("markdown", 0) + 1
|
|
458
|
-
|
|
459
|
-
return {"analysis_type": "structure", "results": structure_info}
|
|
460
|
-
|
|
461
|
-
async def _analyze_complexity(self) -> Dict[str, Any]:
|
|
462
|
-
"""Analyze code complexity (simplified)"""
|
|
463
|
-
complexity_info = {
|
|
464
|
-
"total_lines": 0,
|
|
465
|
-
"file_complexity": {}
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
# Simple line counting
|
|
469
|
-
for root, dirs, files in os.walk(self.project_root):
|
|
470
|
-
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
|
471
|
-
|
|
472
|
-
for file in files:
|
|
473
|
-
if file.endswith(('.py', '.js', '.ts')):
|
|
474
|
-
file_path = os.path.join(root, file)
|
|
475
|
-
try:
|
|
476
|
-
with open(file_path, 'r', encoding='utf-8') as f:
|
|
477
|
-
lines = len(f.readlines())
|
|
478
|
-
complexity_info["total_lines"] += lines
|
|
479
|
-
rel_path = os.path.relpath(file_path, self.project_root)
|
|
480
|
-
complexity_info["file_complexity"][rel_path] = lines
|
|
481
|
-
except Exception:
|
|
482
|
-
continue
|
|
483
|
-
|
|
484
|
-
return {"analysis_type": "complexity", "results": complexity_info}
|
|
485
|
-
|
|
486
|
-
async def _analyze_patterns(self) -> Dict[str, Any]:
|
|
487
|
-
"""Analyze code patterns (simplified)"""
|
|
488
|
-
patterns = {
|
|
489
|
-
"imports": {},
|
|
490
|
-
"functions": {},
|
|
491
|
-
"classes": {}
|
|
492
|
-
}
|
|
493
|
-
|
|
494
|
-
for root, dirs, files in os.walk(self.project_root):
|
|
495
|
-
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
|
496
|
-
|
|
497
|
-
for file in files:
|
|
498
|
-
if file.endswith('.py'):
|
|
499
|
-
file_path = os.path.join(root, file)
|
|
500
|
-
try:
|
|
501
|
-
with open(file_path, 'r', encoding='utf-8') as f:
|
|
502
|
-
content = f.read()
|
|
503
|
-
|
|
504
|
-
# Simple pattern detection
|
|
505
|
-
import_lines = [line.strip() for line in content.split('\\n') if line.strip().startswith('import ') or line.strip().startswith('from ')]
|
|
506
|
-
if import_lines:
|
|
507
|
-
rel_path = os.path.relpath(file_path, self.project_root)
|
|
508
|
-
patterns["imports"][rel_path] = import_lines[:10] # Limit to first 10
|
|
509
|
-
except Exception:
|
|
510
|
-
continue
|
|
511
|
-
|
|
512
|
-
return {"analysis_type": "patterns", "results": patterns}
|
|
513
|
-
|
|
514
|
-
async def _tool_collaborate_with_cli(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
515
|
-
"""Collaborate with other CLI tools"""
|
|
516
|
-
target_cli = arguments.get("target_cli")
|
|
517
|
-
task = arguments.get("task")
|
|
518
|
-
context = arguments.get("context", {})
|
|
519
|
-
|
|
520
|
-
if not target_cli or not task:
|
|
521
|
-
return {"error": "target_cli and task are required"}
|
|
522
|
-
|
|
523
|
-
try:
|
|
524
|
-
# This would integrate with the broader Stigmergy system
|
|
525
|
-
# For now, return a mock response
|
|
526
|
-
return {
|
|
527
|
-
"target_cli": target_cli,
|
|
528
|
-
"task": task,
|
|
529
|
-
"result": f"Collaboration request sent to {target_cli}",
|
|
530
|
-
"status": "pending"
|
|
531
|
-
}
|
|
532
|
-
except Exception as e:
|
|
533
|
-
logger.error(f"CLI collaboration error: {e}")
|
|
534
|
-
return {"error": str(e)}
|
|
535
|
-
|
|
536
|
-
async def _tool_create_tool(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
|
|
537
|
-
"""Create a new tool dynamically"""
|
|
538
|
-
tool_name = arguments.get("tool_name")
|
|
539
|
-
description = arguments.get("description")
|
|
540
|
-
function_code = arguments.get("function_code")
|
|
541
|
-
|
|
542
|
-
if not all([tool_name, description, function_code]):
|
|
543
|
-
return {"error": "tool_name, description, and function_code are required"}
|
|
544
|
-
|
|
545
|
-
try:
|
|
546
|
-
# Create new tool definition
|
|
547
|
-
new_tool = MCPToolDefinition(
|
|
548
|
-
name=tool_name,
|
|
549
|
-
description=description,
|
|
550
|
-
inputSchema={
|
|
551
|
-
"type": "object",
|
|
552
|
-
"properties": {
|
|
553
|
-
"input": {"type": "string", "description": "Input parameter"}
|
|
554
|
-
},
|
|
555
|
-
"required": ["input"]
|
|
556
|
-
}
|
|
557
|
-
)
|
|
558
|
-
|
|
559
|
-
# Add to tools
|
|
560
|
-
self.tools[tool_name] = new_tool
|
|
561
|
-
|
|
562
|
-
# Store function code for execution (in a real implementation)
|
|
563
|
-
# This would require a more sophisticated execution environment
|
|
564
|
-
|
|
565
|
-
return {
|
|
566
|
-
"tool_name": tool_name,
|
|
567
|
-
"description": description,
|
|
568
|
-
"status": "created",
|
|
569
|
-
"message": f"Tool '{tool_name}' created successfully"
|
|
570
|
-
}
|
|
571
|
-
except Exception as e:
|
|
572
|
-
logger.error(f"Tool creation error: {e}")
|
|
573
|
-
return {"error": str(e)}
|
|
574
|
-
|
|
575
|
-
async def _handle_resources_list(self, request_id: Any) -> Dict[str, Any]:
|
|
576
|
-
"""Handle resources/list request"""
|
|
577
|
-
resources_data = [asdict(resource) for resource in self.resources.values()]
|
|
578
|
-
return {
|
|
579
|
-
"jsonrpc": "2.0",
|
|
580
|
-
"id": request_id,
|
|
581
|
-
"result": {"resources": resources_data}
|
|
582
|
-
}
|
|
583
|
-
|
|
584
|
-
async def _handle_resources_read(self, params: Dict[str, Any], request_id: Any) -> Dict[str, Any]:
|
|
585
|
-
"""Handle resources/read request"""
|
|
586
|
-
uri = params.get("uri")
|
|
587
|
-
|
|
588
|
-
if not uri:
|
|
589
|
-
return self._create_error_response(request_id, -32602, "Missing resource URI")
|
|
590
|
-
|
|
591
|
-
if uri not in self.resources:
|
|
592
|
-
return self._create_error_response(request_id, -32602, f"Resource {uri} not found")
|
|
593
|
-
|
|
594
|
-
try:
|
|
595
|
-
content = await self._read_resource(uri)
|
|
596
|
-
return {
|
|
597
|
-
"jsonrpc": "2.0",
|
|
598
|
-
"id": request_id,
|
|
599
|
-
"result": {"content": content}
|
|
600
|
-
}
|
|
601
|
-
except Exception as e:
|
|
602
|
-
logger.error(f"Resource read error: {e}")
|
|
603
|
-
return self._create_error_response(request_id, -32603, f"Failed to read resource: {str(e)}")
|
|
604
|
-
|
|
605
|
-
async def _read_resource(self, uri: str) -> str:
|
|
606
|
-
"""Read resource content"""
|
|
607
|
-
if uri == "file://project_spec.json":
|
|
608
|
-
spec_path = os.path.join(self.project_root, "PROJECT_SPEC.json")
|
|
609
|
-
if os.path.exists(spec_path):
|
|
610
|
-
with open(spec_path, 'r') as f:
|
|
611
|
-
return f.read()
|
|
612
|
-
return "{}"
|
|
613
|
-
|
|
614
|
-
elif uri == "file://collaboration_log.md":
|
|
615
|
-
log_path = os.path.join(self.project_root, "COLLABORATION_LOG.md")
|
|
616
|
-
if os.path.exists(log_path):
|
|
617
|
-
with open(log_path, 'r') as f:
|
|
618
|
-
return f.read()
|
|
619
|
-
return "# Collaboration Log\\n\\nNo collaboration activities yet."
|
|
620
|
-
|
|
621
|
-
elif uri == "file://tasks.md":
|
|
622
|
-
tasks_path = os.path.join(self.project_root, "TASKS.md")
|
|
623
|
-
if os.path.exists(tasks_path):
|
|
624
|
-
with open(tasks_path, 'r') as f:
|
|
625
|
-
return f.read()
|
|
626
|
-
return "# Tasks\\n\\nNo tasks defined yet."
|
|
627
|
-
|
|
628
|
-
elif uri == "file://global_memory.json":
|
|
629
|
-
memory_path = os.path.join(self.project_root, "global_cli_memory.json")
|
|
630
|
-
if os.path.exists(memory_path):
|
|
631
|
-
with open(memory_path, 'r') as f:
|
|
632
|
-
return f.read()
|
|
633
|
-
return "{}"
|
|
634
|
-
|
|
635
|
-
else:
|
|
636
|
-
raise ValueError(f"Unknown resource URI: {uri}")
|
|
637
|
-
|
|
638
|
-
async def _handle_initialize(self, params: Dict[str, Any], request_id: Any) -> Dict[str, Any]:
|
|
639
|
-
"""Handle initialize request"""
|
|
640
|
-
return {
|
|
641
|
-
"jsonrpc": "2.0",
|
|
642
|
-
"id": request_id,
|
|
643
|
-
"result": {
|
|
644
|
-
"protocolVersion": "2024-11-05",
|
|
645
|
-
"capabilities": {
|
|
646
|
-
"tools": {"listChanged": False},
|
|
647
|
-
"resources": {"listChanged": False, "subscribe": False}
|
|
648
|
-
},
|
|
649
|
-
"serverInfo": {
|
|
650
|
-
"name": "stigmergy-mcp-server",
|
|
651
|
-
"version": "1.0.0"
|
|
652
|
-
}
|
|
653
|
-
}
|
|
654
|
-
}
|
|
655
|
-
|
|
656
|
-
def _create_error_response(self, request_id: Any, code: int, message: str) -> Dict[str, Any]:
|
|
657
|
-
"""Create error response"""
|
|
658
|
-
return {
|
|
659
|
-
"jsonrpc": "2.0",
|
|
660
|
-
"id": request_id,
|
|
661
|
-
"error": {
|
|
662
|
-
"code": code,
|
|
663
|
-
"message": message
|
|
664
|
-
}
|
|
665
|
-
}
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
async def main():
|
|
669
|
-
"""Main entry point for the MCP server"""
|
|
670
|
-
logger.info("Starting Stigmergy MCP Server")
|
|
671
|
-
|
|
672
|
-
server = StigmergyMCPServer()
|
|
673
|
-
|
|
674
|
-
# Read from stdin and write to stdout for stdio transport
|
|
675
|
-
try:
|
|
676
|
-
while True:
|
|
677
|
-
line = await asyncio.get_event_loop().run_in_executor(None, sys.stdin.readline)
|
|
678
|
-
if not line:
|
|
679
|
-
break
|
|
680
|
-
|
|
681
|
-
try:
|
|
682
|
-
request = json.loads(line.strip())
|
|
683
|
-
response = await server.handle_request(request)
|
|
684
|
-
response_json = json.dumps(response) + "\\n"
|
|
685
|
-
sys.stdout.write(response_json)
|
|
686
|
-
sys.stdout.flush()
|
|
687
|
-
except json.JSONDecodeError as e:
|
|
688
|
-
logger.error(f"Invalid JSON received: {e}")
|
|
689
|
-
error_response = {
|
|
690
|
-
"jsonrpc": "2.0",
|
|
691
|
-
"error": {"code": -32700, "message": "Parse error"}
|
|
692
|
-
}
|
|
693
|
-
sys.stdout.write(json.dumps(error_response) + "\\n")
|
|
694
|
-
sys.stdout.flush()
|
|
695
|
-
except Exception as e:
|
|
696
|
-
logger.error(f"Error processing request: {e}")
|
|
697
|
-
error_response = {
|
|
698
|
-
"jsonrpc": "2.0",
|
|
699
|
-
"error": {"code": -32603, "message": "Internal error"}
|
|
700
|
-
}
|
|
701
|
-
sys.stdout.write(json.dumps(error_response) + "\\n")
|
|
702
|
-
sys.stdout.flush()
|
|
703
|
-
|
|
704
|
-
except KeyboardInterrupt:
|
|
705
|
-
logger.info("MCP server interrupted")
|
|
706
|
-
except Exception as e:
|
|
707
|
-
logger.error(f"MCP server error: {e}")
|
|
708
|
-
finally:
|
|
709
|
-
logger.info("MCP server shutting down")
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
if __name__ == "__main__":
|
|
713
|
-
asyncio.run(main())
|