mcp-code-indexer 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,699 @@
1
+ """
2
+ MCP Server implementation for the Code Indexer.
3
+
4
+ This module provides the main MCP server that handles JSON-RPC communication
5
+ for file description management tools.
6
+ """
7
+
8
+ import asyncio
9
+ import hashlib
10
+ import json
11
+ import logging
12
+ from datetime import datetime
13
+ from pathlib import Path
14
+ from typing import Any, Dict, List, Optional
15
+
16
+ from mcp import types
17
+ from mcp.server import Server
18
+ from mcp.server.stdio import stdio_server
19
+
20
+ from mcp_code_indexer.database.database import DatabaseManager
21
+ from mcp_code_indexer.file_scanner import FileScanner
22
+ from mcp_code_indexer.token_counter import TokenCounter
23
+ from mcp_code_indexer.database.models import (
24
+ Project, FileDescription, CodebaseOverview, SearchResult,
25
+ CodebaseSizeInfo, FolderNode, FileNode
26
+ )
27
+ from mcp_code_indexer.error_handler import setup_error_handling, ErrorHandler
28
+ from mcp_code_indexer.middleware.error_middleware import create_tool_middleware, AsyncTaskManager
29
+ from mcp_code_indexer.logging_config import get_logger
30
+ from mcp_code_indexer.merge_handler import MergeHandler
31
+
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ class MCPCodeIndexServer:
36
+ """
37
+ MCP Code Index Server.
38
+
39
+ Provides file description tracking and codebase navigation tools
40
+ through the Model Context Protocol.
41
+ """
42
+
43
+ def __init__(
44
+ self,
45
+ token_limit: int = 32000,
46
+ db_path: Optional[Path] = None,
47
+ cache_dir: Optional[Path] = None
48
+ ):
49
+ """
50
+ Initialize the MCP Code Index Server.
51
+
52
+ Args:
53
+ token_limit: Maximum tokens before recommending search over overview
54
+ db_path: Path to SQLite database
55
+ cache_dir: Directory for caching
56
+ """
57
+ self.token_limit = token_limit
58
+ self.db_path = db_path or Path.home() / ".mcp-code-index" / "tracker.db"
59
+ self.cache_dir = cache_dir or Path.home() / ".mcp-code-index" / "cache"
60
+
61
+ # Initialize components
62
+ self.db_manager = DatabaseManager(self.db_path)
63
+ self.token_counter = TokenCounter(token_limit)
64
+ self.merge_handler = MergeHandler(self.db_manager)
65
+
66
+ # Setup error handling
67
+ self.logger = get_logger(__name__)
68
+ self.error_handler = setup_error_handling(self.logger)
69
+ self.middleware = create_tool_middleware(self.error_handler)
70
+ self.task_manager = AsyncTaskManager(self.error_handler)
71
+
72
+ # Create MCP server
73
+ self.server = Server("mcp-code-indexer")
74
+
75
+ # Register handlers
76
+ self._register_handlers()
77
+
78
+ self.logger.info(
79
+ "MCP Code Index Server initialized",
80
+ extra={"structured_data": {"initialization": {"token_limit": token_limit}}}
81
+ )
82
+
83
+ async def initialize(self) -> None:
84
+ """Initialize database and other resources."""
85
+ await self.db_manager.initialize()
86
+ logger.info("Server initialized successfully")
87
+
88
+ def _register_handlers(self) -> None:
89
+ """Register MCP tool and resource handlers."""
90
+
91
+ @self.server.list_tools()
92
+ async def list_tools() -> List[types.Tool]:
93
+ """Return list of available tools."""
94
+ return [
95
+ types.Tool(
96
+ name="get_file_description",
97
+ description="Retrieves the stored description for a specific file in a codebase. Use this to quickly understand what a file contains without reading its full contents.",
98
+ inputSchema={
99
+ "type": "object",
100
+ "properties": {
101
+ "projectName": {
102
+ "type": "string",
103
+ "description": "The name of the project"
104
+ },
105
+ "folderPath": {
106
+ "type": "string",
107
+ "description": "Absolute path to the project folder on disk"
108
+ },
109
+ "branch": {
110
+ "type": "string",
111
+ "description": "Git branch name (e.g., 'main', 'develop')"
112
+ },
113
+ "remoteOrigin": {
114
+ "type": ["string", "null"],
115
+ "description": "Git remote origin URL if available"
116
+ },
117
+ "upstreamOrigin": {
118
+ "type": ["string", "null"],
119
+ "description": "Upstream repository URL if this is a fork"
120
+ },
121
+ "filePath": {
122
+ "type": "string",
123
+ "description": "Relative path to the file from project root"
124
+ }
125
+ },
126
+ "required": ["projectName", "folderPath", "branch", "filePath"]
127
+ }
128
+ ),
129
+ types.Tool(
130
+ name="update_file_description",
131
+ description="Creates or updates the description for a file. Use this after analyzing a file's contents to store a detailed summary.",
132
+ inputSchema={
133
+ "type": "object",
134
+ "properties": {
135
+ "projectName": {"type": "string", "description": "The name of the project"},
136
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder on disk"},
137
+ "branch": {"type": "string", "description": "Git branch name"},
138
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL if available"},
139
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"},
140
+ "filePath": {"type": "string", "description": "Relative path to the file from project root"},
141
+ "description": {"type": "string", "description": "Detailed description of the file's contents"},
142
+ "fileHash": {"type": ["string", "null"], "description": "SHA-256 hash of the file contents (optional)"}
143
+ },
144
+ "required": ["projectName", "folderPath", "branch", "filePath", "description"]
145
+ }
146
+ ),
147
+ types.Tool(
148
+ name="check_codebase_size",
149
+ description="Checks the total token count of a codebase's file structure and descriptions. Returns whether the codebase is 'large' and recommends using search instead of the full overview.",
150
+ inputSchema={
151
+ "type": "object",
152
+ "properties": {
153
+ "projectName": {"type": "string", "description": "The name of the project"},
154
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder on disk"},
155
+ "branch": {"type": "string", "description": "Git branch name"},
156
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL if available"},
157
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"}
158
+ },
159
+ "required": ["projectName", "folderPath", "branch"]
160
+ }
161
+ ),
162
+ types.Tool(
163
+ name="find_missing_descriptions",
164
+ description="Scans the project folder to find files that don't have descriptions yet. This is stage 1 of a two-stage process for updating missing descriptions.",
165
+ inputSchema={
166
+ "type": "object",
167
+ "properties": {
168
+ "projectName": {"type": "string", "description": "The name of the project"},
169
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder on disk"},
170
+ "branch": {"type": "string", "description": "Git branch name"},
171
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL if available"},
172
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"}
173
+ },
174
+ "required": ["projectName", "folderPath", "branch"]
175
+ }
176
+ ),
177
+ types.Tool(
178
+ name="update_missing_descriptions",
179
+ description="Batch updates descriptions for multiple files at once. This is stage 2 after find_missing_descriptions.",
180
+ inputSchema={
181
+ "type": "object",
182
+ "properties": {
183
+ "projectName": {"type": "string", "description": "The name of the project"},
184
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder on disk"},
185
+ "branch": {"type": "string", "description": "Git branch name"},
186
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL if available"},
187
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"},
188
+ "descriptions": {
189
+ "type": "array",
190
+ "description": "Array of file paths and their descriptions",
191
+ "items": {
192
+ "type": "object",
193
+ "properties": {
194
+ "filePath": {"type": "string", "description": "Relative path to the file"},
195
+ "description": {"type": "string", "description": "Detailed description of the file"}
196
+ },
197
+ "required": ["filePath", "description"]
198
+ }
199
+ }
200
+ },
201
+ "required": ["projectName", "folderPath", "branch", "descriptions"]
202
+ }
203
+ ),
204
+ types.Tool(
205
+ name="search_descriptions",
206
+ description="Searches through all file descriptions in a project to find files related to specific functionality. Use this for large codebases instead of loading the entire structure.",
207
+ inputSchema={
208
+ "type": "object",
209
+ "properties": {
210
+ "projectName": {"type": "string", "description": "The name of the project"},
211
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder on disk"},
212
+ "branch": {"type": "string", "description": "Git branch to search in"},
213
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL if available"},
214
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"},
215
+ "query": {"type": "string", "description": "Search query (e.g., 'authentication middleware', 'database models')"},
216
+ "maxResults": {"type": "integer", "default": 20, "description": "Maximum number of results to return"}
217
+ },
218
+ "required": ["projectName", "folderPath", "branch", "query"]
219
+ }
220
+ ),
221
+ types.Tool(
222
+ name="get_codebase_overview",
223
+ description="Returns the complete file and folder structure of a codebase with all descriptions. For large codebases, this will recommend using search_descriptions instead.",
224
+ inputSchema={
225
+ "type": "object",
226
+ "properties": {
227
+ "projectName": {"type": "string", "description": "The name of the project"},
228
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder on disk"},
229
+ "branch": {"type": "string", "description": "Git branch name"},
230
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL if available"},
231
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"}
232
+ },
233
+ "required": ["projectName", "folderPath", "branch"]
234
+ }
235
+ ),
236
+ types.Tool(
237
+ name="merge_branch_descriptions",
238
+ description="Merges file descriptions from one branch to another. This is a two-stage process: first call without resolutions returns conflicts where the same file has different descriptions in each branch. Second call with resolutions completes the merge.",
239
+ inputSchema={
240
+ "type": "object",
241
+ "properties": {
242
+ "projectName": {"type": "string", "description": "The name of the project"},
243
+ "folderPath": {"type": "string", "description": "Absolute path to the project folder"},
244
+ "remoteOrigin": {"type": ["string", "null"], "description": "Git remote origin URL"},
245
+ "upstreamOrigin": {"type": ["string", "null"], "description": "Upstream repository URL if this is a fork"},
246
+ "sourceBranch": {"type": "string", "description": "Branch to merge from (e.g., 'feature/new-ui')"},
247
+ "targetBranch": {"type": "string", "description": "Branch to merge into (e.g., 'main')"},
248
+ "conflictResolutions": {
249
+ "type": ["array", "null"],
250
+ "description": "Array of resolved conflicts (only for second stage)",
251
+ "items": {
252
+ "type": "object",
253
+ "properties": {
254
+ "conflictId": {"type": "string", "description": "ID of the conflict to resolve"},
255
+ "resolvedDescription": {"type": "string", "description": "Final description to use after merge"}
256
+ },
257
+ "required": ["conflictId", "resolvedDescription"]
258
+ }
259
+ }
260
+ },
261
+ "required": ["projectName", "folderPath", "sourceBranch", "targetBranch"]
262
+ }
263
+ )
264
+ ]
265
+
266
+ @self.server.call_tool()
267
+ async def call_tool(name: str, arguments: Dict[str, Any]) -> List[types.TextContent]:
268
+ """Handle tool calls with middleware."""
269
+ # Map tool names to handler methods
270
+ tool_handlers = {
271
+ "get_file_description": self._handle_get_file_description,
272
+ "update_file_description": self._handle_update_file_description,
273
+ "check_codebase_size": self._handle_check_codebase_size,
274
+ "find_missing_descriptions": self._handle_find_missing_descriptions,
275
+ "update_missing_descriptions": self._handle_update_missing_descriptions,
276
+ "search_descriptions": self._handle_search_descriptions,
277
+ "get_codebase_overview": self._handle_get_codebase_overview,
278
+ "merge_branch_descriptions": self._handle_merge_branch_descriptions,
279
+ }
280
+
281
+ if name not in tool_handlers:
282
+ from ..error_handler import ValidationError
283
+ raise ValidationError(f"Unknown tool: {name}")
284
+
285
+ # Wrap handler with middleware
286
+ wrapped_handler = self.middleware.wrap_tool_handler(name)(
287
+ lambda args: self._execute_tool_handler(tool_handlers[name], args)
288
+ )
289
+
290
+ return await wrapped_handler(arguments)
291
+
292
+ async def _execute_tool_handler(self, handler, arguments: Dict[str, Any]) -> List[types.TextContent]:
293
+ """Execute a tool handler and format the result."""
294
+ result = await handler(arguments)
295
+
296
+ return [types.TextContent(
297
+ type="text",
298
+ text=json.dumps(result, indent=2, default=str)
299
+ )]
300
+
301
+ async def _get_or_create_project_id(self, arguments: Dict[str, Any]) -> str:
302
+ """Get or create a project ID from tool arguments."""
303
+ project_name = arguments["projectName"]
304
+ remote_origin = arguments.get("remoteOrigin")
305
+ upstream_origin = arguments.get("upstreamOrigin")
306
+ folder_path = arguments["folderPath"]
307
+ branch = arguments.get("branch", "main")
308
+
309
+ # Create project ID from identifiers
310
+ id_source = f"{project_name}:{remote_origin}:{upstream_origin}:{folder_path}"
311
+ project_id = hashlib.sha256(id_source.encode()).hexdigest()[:16]
312
+
313
+ # Check if project exists, create if not
314
+ project = await self.db_manager.get_project(project_id)
315
+ if not project:
316
+ project = Project(
317
+ id=project_id,
318
+ name=project_name,
319
+ remote_origin=remote_origin,
320
+ upstream_origin=upstream_origin,
321
+ aliases=[folder_path],
322
+ created=datetime.utcnow(),
323
+ last_accessed=datetime.utcnow()
324
+ )
325
+ await self.db_manager.create_project(project)
326
+
327
+ # Auto-inherit from upstream if needed
328
+ if upstream_origin:
329
+ try:
330
+ inherited_count = await self.db_manager.inherit_from_upstream(project, branch)
331
+ if inherited_count > 0:
332
+ logger.info(f"Auto-inherited {inherited_count} descriptions from upstream for {project_name}")
333
+ except Exception as e:
334
+ logger.warning(f"Failed to inherit from upstream: {e}")
335
+ else:
336
+ # Update last accessed time
337
+ await self.db_manager.update_project_access_time(project_id)
338
+
339
+ # Check if upstream inheritance is needed for existing project
340
+ if upstream_origin and await self.db_manager.check_upstream_inheritance_needed(project):
341
+ try:
342
+ inherited_count = await self.db_manager.inherit_from_upstream(project, branch)
343
+ if inherited_count > 0:
344
+ logger.info(f"Auto-inherited {inherited_count} descriptions from upstream for {project_name}")
345
+ except Exception as e:
346
+ logger.warning(f"Failed to inherit from upstream: {e}")
347
+
348
+ return project_id
349
+
350
+ async def _handle_get_file_description(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
351
+ """Handle get_file_description tool calls."""
352
+ project_id = await self._get_or_create_project_id(arguments)
353
+
354
+ file_desc = await self.db_manager.get_file_description(
355
+ project_id=project_id,
356
+ branch=arguments["branch"],
357
+ file_path=arguments["filePath"]
358
+ )
359
+
360
+ if file_desc:
361
+ return {
362
+ "exists": True,
363
+ "description": file_desc.description,
364
+ "lastModified": file_desc.last_modified.isoformat(),
365
+ "fileHash": file_desc.file_hash,
366
+ "version": file_desc.version
367
+ }
368
+ else:
369
+ return {
370
+ "exists": False,
371
+ "message": f"No description found for {arguments['filePath']}"
372
+ }
373
+
374
+ async def _handle_update_file_description(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
375
+ """Handle update_file_description tool calls."""
376
+ project_id = await self._get_or_create_project_id(arguments)
377
+
378
+ file_desc = FileDescription(
379
+ project_id=project_id,
380
+ branch=arguments["branch"],
381
+ file_path=arguments["filePath"],
382
+ description=arguments["description"],
383
+ file_hash=arguments.get("fileHash"),
384
+ last_modified=datetime.utcnow(),
385
+ version=1
386
+ )
387
+
388
+ await self.db_manager.create_file_description(file_desc)
389
+
390
+ return {
391
+ "success": True,
392
+ "message": f"Description updated for {arguments['filePath']}",
393
+ "filePath": arguments["filePath"],
394
+ "lastModified": file_desc.last_modified.isoformat()
395
+ }
396
+
397
+ async def _handle_check_codebase_size(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
398
+ """Handle check_codebase_size tool calls."""
399
+ project_id = await self._get_or_create_project_id(arguments)
400
+
401
+ # Get all file descriptions for this project/branch
402
+ file_descriptions = await self.db_manager.get_all_file_descriptions(
403
+ project_id=project_id,
404
+ branch=arguments["branch"]
405
+ )
406
+
407
+ # Calculate total tokens
408
+ total_tokens = self.token_counter.calculate_codebase_tokens(file_descriptions)
409
+ is_large = self.token_counter.is_large_codebase(total_tokens)
410
+ recommendation = self.token_counter.get_recommendation(total_tokens)
411
+
412
+ return {
413
+ "totalTokens": total_tokens,
414
+ "isLarge": is_large,
415
+ "recommendation": recommendation,
416
+ "tokenLimit": self.token_counter.token_limit,
417
+ "totalFiles": len(file_descriptions)
418
+ }
419
+
420
+ async def _handle_find_missing_descriptions(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
421
+ """Handle find_missing_descriptions tool calls."""
422
+ project_id = await self._get_or_create_project_id(arguments)
423
+ folder_path = Path(arguments["folderPath"])
424
+
425
+ # Get existing file descriptions
426
+ existing_descriptions = await self.db_manager.get_all_file_descriptions(
427
+ project_id=project_id,
428
+ branch=arguments["branch"]
429
+ )
430
+ existing_paths = {desc.file_path for desc in existing_descriptions}
431
+
432
+ # Scan directory for files
433
+ scanner = FileScanner(folder_path)
434
+ if not scanner.is_valid_project_directory():
435
+ return {
436
+ "error": f"Invalid or inaccessible project directory: {folder_path}"
437
+ }
438
+
439
+ missing_files = scanner.find_missing_files(existing_paths)
440
+ missing_paths = [scanner.get_relative_path(f) for f in missing_files]
441
+
442
+ # Get project stats
443
+ stats = scanner.get_project_stats()
444
+
445
+ return {
446
+ "missingFiles": missing_paths,
447
+ "totalMissing": len(missing_paths),
448
+ "existingDescriptions": len(existing_paths),
449
+ "projectStats": stats
450
+ }
451
+
452
+ async def _handle_update_missing_descriptions(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
453
+ """Handle update_missing_descriptions tool calls."""
454
+ project_id = await self._get_or_create_project_id(arguments)
455
+ descriptions_data = arguments["descriptions"]
456
+
457
+ # Create FileDescription objects
458
+ file_descriptions = []
459
+ for desc_data in descriptions_data:
460
+ file_desc = FileDescription(
461
+ project_id=project_id,
462
+ branch=arguments["branch"],
463
+ file_path=desc_data["filePath"],
464
+ description=desc_data["description"],
465
+ file_hash=None, # Hash not provided in batch operations
466
+ last_modified=datetime.utcnow(),
467
+ version=1
468
+ )
469
+ file_descriptions.append(file_desc)
470
+
471
+ # Batch create descriptions
472
+ await self.db_manager.batch_create_file_descriptions(file_descriptions)
473
+
474
+ return {
475
+ "success": True,
476
+ "updatedFiles": len(file_descriptions),
477
+ "files": [desc["filePath"] for desc in descriptions_data],
478
+ "message": f"Successfully updated descriptions for {len(file_descriptions)} files"
479
+ }
480
+
481
+ async def _handle_search_descriptions(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
482
+ """Handle search_descriptions tool calls."""
483
+ project_id = await self._get_or_create_project_id(arguments)
484
+ max_results = arguments.get("maxResults", 20)
485
+
486
+ # Perform search
487
+ search_results = await self.db_manager.search_file_descriptions(
488
+ project_id=project_id,
489
+ branch=arguments["branch"],
490
+ query=arguments["query"],
491
+ max_results=max_results
492
+ )
493
+
494
+ # Format results
495
+ formatted_results = []
496
+ for result in search_results:
497
+ formatted_results.append({
498
+ "filePath": result.file_path,
499
+ "description": result.description,
500
+ "relevanceScore": result.relevance_score
501
+ })
502
+
503
+ return {
504
+ "results": formatted_results,
505
+ "totalResults": len(formatted_results),
506
+ "query": arguments["query"],
507
+ "maxResults": max_results
508
+ }
509
+
510
+ async def _handle_get_codebase_overview(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
511
+ """Handle get_codebase_overview tool calls."""
512
+ project_id = await self._get_or_create_project_id(arguments)
513
+
514
+ # Get all file descriptions
515
+ file_descriptions = await self.db_manager.get_all_file_descriptions(
516
+ project_id=project_id,
517
+ branch=arguments["branch"]
518
+ )
519
+
520
+ # Calculate total tokens
521
+ total_tokens = self.token_counter.calculate_codebase_tokens(file_descriptions)
522
+ is_large = self.token_counter.is_large_codebase(total_tokens)
523
+
524
+ # If large, recommend search instead
525
+ if is_large:
526
+ return {
527
+ "isLarge": True,
528
+ "totalTokens": total_tokens,
529
+ "tokenLimit": self.token_counter.token_limit,
530
+ "totalFiles": len(file_descriptions),
531
+ "recommendation": "use_search",
532
+ "message": f"Codebase has {total_tokens} tokens (limit: {self.token_counter.token_limit}). Use search_descriptions instead for better performance."
533
+ }
534
+
535
+ # Build folder structure
536
+ structure = self._build_folder_structure(file_descriptions)
537
+
538
+ return {
539
+ "projectName": arguments["projectName"],
540
+ "branch": arguments["branch"],
541
+ "totalFiles": len(file_descriptions),
542
+ "totalTokens": total_tokens,
543
+ "isLarge": is_large,
544
+ "tokenLimit": self.token_counter.token_limit,
545
+ "structure": structure
546
+ }
547
+
548
+ def _build_folder_structure(self, file_descriptions: List[FileDescription]) -> Dict[str, Any]:
549
+ """Build hierarchical folder structure from file descriptions."""
550
+ root = {"name": "", "path": "", "files": [], "folders": {}}
551
+
552
+ for file_desc in file_descriptions:
553
+ path_parts = Path(file_desc.file_path).parts
554
+ current = root
555
+
556
+ # Navigate/create folder structure
557
+ for i, part in enumerate(path_parts[:-1]):
558
+ folder_path = "/".join(path_parts[:i+1])
559
+ if part not in current["folders"]:
560
+ current["folders"][part] = {
561
+ "name": part,
562
+ "path": folder_path,
563
+ "files": [],
564
+ "folders": {}
565
+ }
566
+ current = current["folders"][part]
567
+
568
+ # Add file to current folder
569
+ if path_parts: # Handle empty paths
570
+ current["files"].append({
571
+ "name": path_parts[-1],
572
+ "path": file_desc.file_path,
573
+ "description": file_desc.description
574
+ })
575
+
576
+ # Convert nested dict structure to list format
577
+ def convert_structure(node):
578
+ return {
579
+ "name": node["name"],
580
+ "path": node["path"],
581
+ "files": node["files"],
582
+ "folders": [convert_structure(folder) for folder in node["folders"].values()]
583
+ }
584
+
585
+ return convert_structure(root)
586
+
587
+ async def _handle_merge_branch_descriptions(self, arguments: Dict[str, Any]) -> Dict[str, Any]:
588
+ """Handle merge_branch_descriptions tool calls."""
589
+ project_id = await self._get_or_create_project_id(arguments)
590
+ source_branch = arguments["sourceBranch"]
591
+ target_branch = arguments["targetBranch"]
592
+ conflict_resolutions = arguments.get("conflictResolutions")
593
+
594
+ if conflict_resolutions is None:
595
+ # Phase 1: Detect conflicts
596
+ session = await self.merge_handler.start_merge_phase1(
597
+ project_id, source_branch, target_branch
598
+ )
599
+
600
+ if session.get_conflict_count() == 0:
601
+ # No conflicts, can merge immediately
602
+ return {
603
+ "phase": "completed",
604
+ "conflicts": [],
605
+ "message": f"No conflicts detected. Merge from {source_branch} to {target_branch} can proceed automatically.",
606
+ "sourceBranch": source_branch,
607
+ "targetBranch": target_branch,
608
+ "conflictCount": 0
609
+ }
610
+ else:
611
+ # Return conflicts for resolution
612
+ return {
613
+ "phase": "conflicts_detected",
614
+ "sessionId": session.session_id,
615
+ "conflicts": [conflict.to_dict() for conflict in session.conflicts],
616
+ "conflictCount": session.get_conflict_count(),
617
+ "sourceBranch": source_branch,
618
+ "targetBranch": target_branch,
619
+ "message": f"Found {session.get_conflict_count()} conflicts that need resolution."
620
+ }
621
+ else:
622
+ # Phase 2: Apply resolutions
623
+ # Find the session ID from conflict resolutions
624
+ if not conflict_resolutions:
625
+ from ..error_handler import ValidationError
626
+ raise ValidationError("Conflict resolutions required for phase 2")
627
+
628
+ # For simplicity, create a new session and resolve immediately
629
+ # In a production system, you'd want to track session IDs properly
630
+ session = await self.merge_handler.start_merge_phase1(
631
+ project_id, source_branch, target_branch
632
+ )
633
+
634
+ if session.get_conflict_count() == 0:
635
+ return {
636
+ "phase": "completed",
637
+ "message": "No conflicts to resolve",
638
+ "sourceBranch": source_branch,
639
+ "targetBranch": target_branch
640
+ }
641
+
642
+ result = await self.merge_handler.complete_merge_phase2(
643
+ session.session_id, conflict_resolutions
644
+ )
645
+
646
+ return {
647
+ "phase": "completed",
648
+ **result
649
+ }
650
+
651
+ async def run(self) -> None:
652
+ """Run the MCP server."""
653
+ await self.initialize()
654
+
655
+ try:
656
+ async with stdio_server() as (read_stream, write_stream):
657
+ initialization_options = self.server.create_initialization_options()
658
+ await self.server.run(
659
+ read_stream,
660
+ write_stream,
661
+ initialization_options
662
+ )
663
+ finally:
664
+ # Clean shutdown
665
+ await self.shutdown()
666
+
667
+ async def shutdown(self) -> None:
668
+ """Clean shutdown of server resources."""
669
+ try:
670
+ # Cancel any running tasks
671
+ self.task_manager.cancel_all()
672
+
673
+ # Close database connections
674
+ await self.db_manager.close_pool()
675
+
676
+ self.logger.info("Server shutdown completed successfully")
677
+
678
+ except Exception as e:
679
+ self.error_handler.log_error(e, context={"phase": "shutdown"})
680
+
681
+
682
+ async def main():
683
+ """Main entry point for the MCP server."""
684
+ import sys
685
+
686
+ # Setup logging to stderr (stdout is used for MCP communication)
687
+ logging.basicConfig(
688
+ level=logging.INFO,
689
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
690
+ handlers=[logging.StreamHandler(sys.stderr)]
691
+ )
692
+
693
+ # Create and run server
694
+ server = MCPCodeIndexServer()
695
+ await server.run()
696
+
697
+
698
+ if __name__ == "__main__":
699
+ asyncio.run(main())