mcp-code-indexer 3.4.2__py3-none-any.whl → 3.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_code_indexer/__init__.py +6 -2
- mcp_code_indexer/commands/__init__.py +1 -0
- mcp_code_indexer/commands/makelocal.py +202 -0
- mcp_code_indexer/database/connection_health.py +2 -7
- mcp_code_indexer/database/database_factory.py +134 -0
- mcp_code_indexer/database/path_resolver.py +122 -0
- mcp_code_indexer/main.py +2 -0
- mcp_code_indexer/server/mcp_server.py +36 -24
- {mcp_code_indexer-3.4.2.dist-info → mcp_code_indexer-3.5.0.dist-info}/METADATA +3 -3
- {mcp_code_indexer-3.4.2.dist-info → mcp_code_indexer-3.5.0.dist-info}/RECORD +13 -9
- {mcp_code_indexer-3.4.2.dist-info → mcp_code_indexer-3.5.0.dist-info}/LICENSE +0 -0
- {mcp_code_indexer-3.4.2.dist-info → mcp_code_indexer-3.5.0.dist-info}/WHEEL +0 -0
- {mcp_code_indexer-3.4.2.dist-info → mcp_code_indexer-3.5.0.dist-info}/entry_points.txt +0 -0
mcp_code_indexer/__init__.py
CHANGED
@@ -6,7 +6,11 @@ intelligent codebase navigation through searchable file descriptions,
|
|
6
6
|
token-aware overviews, and advanced merge capabilities.
|
7
7
|
"""
|
8
8
|
|
9
|
-
|
9
|
+
# Delay import to avoid dependency issues during testing
|
10
|
+
def get_server():
|
11
|
+
"""Get MCPCodeIndexServer (lazy import)."""
|
12
|
+
from .server.mcp_server import MCPCodeIndexServer
|
13
|
+
return MCPCodeIndexServer
|
10
14
|
|
11
15
|
|
12
16
|
def _get_version() -> str:
|
@@ -54,4 +58,4 @@ __author__ = "MCP Code Indexer Contributors"
|
|
54
58
|
__email__ = ""
|
55
59
|
__license__ = "MIT"
|
56
60
|
|
57
|
-
__all__ = ["
|
61
|
+
__all__ = ["get_server", "__version__"]
|
@@ -0,0 +1 @@
|
|
1
|
+
"""Commands package for MCP Code Indexer."""
|
@@ -0,0 +1,202 @@
|
|
1
|
+
"""
|
2
|
+
Command to migrate project data from global to local database.
|
3
|
+
|
4
|
+
This module provides functionality to extract project data from the global database
|
5
|
+
and create a local database in a project folder.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import List, Optional
|
11
|
+
|
12
|
+
from mcp_code_indexer.database.database import DatabaseManager
|
13
|
+
from mcp_code_indexer.database.database_factory import DatabaseFactory
|
14
|
+
from mcp_code_indexer.database.models import Project, FileDescription, ProjectOverview
|
15
|
+
|
16
|
+
logger = logging.getLogger(__name__)
|
17
|
+
|
18
|
+
|
19
|
+
class MakeLocalCommand:
|
20
|
+
"""
|
21
|
+
Command to migrate project data from global to local database.
|
22
|
+
|
23
|
+
Extracts all project data, file descriptions, and project overviews
|
24
|
+
from the global database and creates a local database in the specified folder.
|
25
|
+
"""
|
26
|
+
|
27
|
+
def __init__(self, db_factory: DatabaseFactory):
|
28
|
+
"""
|
29
|
+
Initialize the make local command.
|
30
|
+
|
31
|
+
Args:
|
32
|
+
db_factory: Database factory for creating database managers
|
33
|
+
"""
|
34
|
+
self.db_factory = db_factory
|
35
|
+
|
36
|
+
async def execute(self, folder_path: str, project_name: Optional[str] = None) -> dict:
|
37
|
+
"""
|
38
|
+
Execute the make local command.
|
39
|
+
|
40
|
+
Args:
|
41
|
+
folder_path: Path to the project folder where local DB will be created
|
42
|
+
project_name: Optional project name to migrate (if None, tries to find by folder)
|
43
|
+
|
44
|
+
Returns:
|
45
|
+
Dictionary with operation results
|
46
|
+
"""
|
47
|
+
folder_path_obj = Path(folder_path).resolve()
|
48
|
+
|
49
|
+
if not folder_path_obj.exists():
|
50
|
+
raise ValueError(f"Folder path does not exist: {folder_path}")
|
51
|
+
|
52
|
+
if not folder_path_obj.is_dir():
|
53
|
+
raise ValueError(f"Path is not a directory: {folder_path}")
|
54
|
+
|
55
|
+
# Check if local database already exists
|
56
|
+
local_db_path = folder_path_obj / ".code-index.db"
|
57
|
+
if local_db_path.exists() and local_db_path.stat().st_size > 0:
|
58
|
+
raise ValueError(f"Local database already exists: {local_db_path}")
|
59
|
+
|
60
|
+
# Get global database manager
|
61
|
+
global_db_manager = await self.db_factory.get_database_manager()
|
62
|
+
|
63
|
+
# Find the project to migrate
|
64
|
+
project = await self._find_project_to_migrate(global_db_manager, folder_path, project_name)
|
65
|
+
if not project:
|
66
|
+
if project_name:
|
67
|
+
raise ValueError(f"Project '{project_name}' not found in global database")
|
68
|
+
else:
|
69
|
+
raise ValueError(f"No project found for folder path: {folder_path}")
|
70
|
+
|
71
|
+
logger.info(f"Found project to migrate: {project.name} (ID: {project.id})")
|
72
|
+
|
73
|
+
# Get all project data
|
74
|
+
file_descriptions = await global_db_manager.get_all_file_descriptions(project.id)
|
75
|
+
project_overview = await global_db_manager.get_project_overview(project.id)
|
76
|
+
|
77
|
+
logger.info(f"Found {len(file_descriptions)} file descriptions to migrate")
|
78
|
+
if project_overview:
|
79
|
+
logger.info("Found project overview to migrate")
|
80
|
+
|
81
|
+
# Create local database (remove empty file if it exists)
|
82
|
+
if local_db_path.exists():
|
83
|
+
local_db_path.unlink()
|
84
|
+
|
85
|
+
# Create local database manager
|
86
|
+
local_db_manager = await self.db_factory.get_database_manager(str(folder_path_obj))
|
87
|
+
|
88
|
+
# Migrate data
|
89
|
+
await self._migrate_project_data(
|
90
|
+
local_db_manager, project, file_descriptions, project_overview
|
91
|
+
)
|
92
|
+
|
93
|
+
# Remove data from global database
|
94
|
+
await self._remove_from_global_database(global_db_manager, project.id)
|
95
|
+
|
96
|
+
return {
|
97
|
+
"success": True,
|
98
|
+
"project_name": project.name,
|
99
|
+
"project_id": project.id,
|
100
|
+
"local_database_path": str(local_db_path),
|
101
|
+
"migrated_files": len(file_descriptions),
|
102
|
+
"migrated_overview": project_overview is not None,
|
103
|
+
}
|
104
|
+
|
105
|
+
async def _find_project_to_migrate(
|
106
|
+
self,
|
107
|
+
global_db_manager: DatabaseManager,
|
108
|
+
folder_path: str,
|
109
|
+
project_name: Optional[str]
|
110
|
+
) -> Optional[Project]:
|
111
|
+
"""
|
112
|
+
Find the project to migrate from the global database.
|
113
|
+
|
114
|
+
Args:
|
115
|
+
global_db_manager: Global database manager
|
116
|
+
folder_path: Project folder path
|
117
|
+
project_name: Optional project name
|
118
|
+
|
119
|
+
Returns:
|
120
|
+
Project to migrate or None if not found
|
121
|
+
"""
|
122
|
+
all_projects = await global_db_manager.get_all_projects()
|
123
|
+
|
124
|
+
if project_name:
|
125
|
+
# Search by name
|
126
|
+
normalized_name = project_name.lower()
|
127
|
+
for project in all_projects:
|
128
|
+
if project.name.lower() == normalized_name:
|
129
|
+
return project
|
130
|
+
else:
|
131
|
+
# Search by folder path in aliases
|
132
|
+
for project in all_projects:
|
133
|
+
if folder_path in project.aliases:
|
134
|
+
return project
|
135
|
+
|
136
|
+
return None
|
137
|
+
|
138
|
+
async def _migrate_project_data(
|
139
|
+
self,
|
140
|
+
local_db_manager: DatabaseManager,
|
141
|
+
project: Project,
|
142
|
+
file_descriptions: List[FileDescription],
|
143
|
+
project_overview: Optional[ProjectOverview]
|
144
|
+
) -> None:
|
145
|
+
"""
|
146
|
+
Migrate project data to the local database.
|
147
|
+
|
148
|
+
Args:
|
149
|
+
local_db_manager: Local database manager
|
150
|
+
project: Project to migrate
|
151
|
+
file_descriptions: File descriptions to migrate
|
152
|
+
project_overview: Project overview to migrate (if any)
|
153
|
+
"""
|
154
|
+
# Create project in local database
|
155
|
+
await local_db_manager.create_project(project)
|
156
|
+
logger.info(f"Created project in local database: {project.name}")
|
157
|
+
|
158
|
+
# Migrate file descriptions
|
159
|
+
if file_descriptions:
|
160
|
+
await local_db_manager.batch_create_file_descriptions(file_descriptions)
|
161
|
+
logger.info(f"Migrated {len(file_descriptions)} file descriptions")
|
162
|
+
|
163
|
+
# Migrate project overview
|
164
|
+
if project_overview:
|
165
|
+
await local_db_manager.create_project_overview(project_overview)
|
166
|
+
logger.info("Migrated project overview")
|
167
|
+
|
168
|
+
async def _remove_from_global_database(
|
169
|
+
self, global_db_manager: DatabaseManager, project_id: str
|
170
|
+
) -> None:
|
171
|
+
"""
|
172
|
+
Remove project data from the global database.
|
173
|
+
|
174
|
+
Args:
|
175
|
+
global_db_manager: Global database manager
|
176
|
+
project_id: Project ID to remove
|
177
|
+
"""
|
178
|
+
# Remove file descriptions
|
179
|
+
async with global_db_manager.get_write_connection_with_retry("remove_project_files") as db:
|
180
|
+
await db.execute(
|
181
|
+
"DELETE FROM file_descriptions WHERE project_id = ?",
|
182
|
+
(project_id,)
|
183
|
+
)
|
184
|
+
await db.commit()
|
185
|
+
|
186
|
+
# Remove project overview
|
187
|
+
async with global_db_manager.get_write_connection_with_retry("remove_project_overview") as db:
|
188
|
+
await db.execute(
|
189
|
+
"DELETE FROM project_overviews WHERE project_id = ?",
|
190
|
+
(project_id,)
|
191
|
+
)
|
192
|
+
await db.commit()
|
193
|
+
|
194
|
+
# Remove project
|
195
|
+
async with global_db_manager.get_write_connection_with_retry("remove_project") as db:
|
196
|
+
await db.execute(
|
197
|
+
"DELETE FROM projects WHERE id = ?",
|
198
|
+
(project_id,)
|
199
|
+
)
|
200
|
+
await db.commit()
|
201
|
+
|
202
|
+
logger.info(f"Removed project data from global database: {project_id}")
|
@@ -336,13 +336,8 @@ class ConnectionHealthMonitor:
|
|
336
336
|
if retry_executor:
|
337
337
|
health_status["retry_statistics"] = retry_executor.get_retry_stats()
|
338
338
|
|
339
|
-
#
|
340
|
-
|
341
|
-
try:
|
342
|
-
db_stats = self.database_manager.get_database_stats()
|
343
|
-
health_status["database_statistics"] = db_stats
|
344
|
-
except Exception as e:
|
345
|
-
logger.warning(f"Failed to get database statistics: {e}")
|
339
|
+
# Avoid circular dependency - don't include database stats here
|
340
|
+
# (they are included separately in the comprehensive diagnostics)
|
346
341
|
|
347
342
|
return health_status
|
348
343
|
|
@@ -0,0 +1,134 @@
|
|
1
|
+
"""
|
2
|
+
Database factory for managing multiple database instances.
|
3
|
+
|
4
|
+
This module provides a factory that creates and manages DatabaseManager instances
|
5
|
+
for both global and local databases, handling initialization and connection pooling.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import Dict, Optional
|
11
|
+
|
12
|
+
from .database import DatabaseManager
|
13
|
+
from .path_resolver import DatabasePathResolver
|
14
|
+
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
|
18
|
+
class DatabaseFactory:
|
19
|
+
"""
|
20
|
+
Factory for creating and managing DatabaseManager instances.
|
21
|
+
|
22
|
+
Maintains a cache of database managers for different database paths
|
23
|
+
to avoid creating multiple instances for the same database.
|
24
|
+
"""
|
25
|
+
|
26
|
+
def __init__(
|
27
|
+
self,
|
28
|
+
global_db_path: Path,
|
29
|
+
pool_size: int = 3,
|
30
|
+
retry_count: int = 5,
|
31
|
+
timeout: float = 10.0,
|
32
|
+
enable_wal_mode: bool = True,
|
33
|
+
health_check_interval: float = 30.0,
|
34
|
+
retry_min_wait: float = 0.1,
|
35
|
+
retry_max_wait: float = 2.0,
|
36
|
+
retry_jitter: float = 0.2,
|
37
|
+
):
|
38
|
+
"""
|
39
|
+
Initialize the database factory.
|
40
|
+
|
41
|
+
Args:
|
42
|
+
global_db_path: Path to the global database
|
43
|
+
pool_size: Database connection pool size
|
44
|
+
retry_count: Maximum retry attempts for database operations
|
45
|
+
timeout: Database operation timeout in seconds
|
46
|
+
enable_wal_mode: Whether to enable WAL mode for SQLite
|
47
|
+
health_check_interval: Health check interval in seconds
|
48
|
+
retry_min_wait: Minimum wait time between retries
|
49
|
+
retry_max_wait: Maximum wait time between retries
|
50
|
+
retry_jitter: Maximum jitter for retry delays
|
51
|
+
"""
|
52
|
+
self.global_db_path = global_db_path
|
53
|
+
self.db_config = {
|
54
|
+
"pool_size": pool_size,
|
55
|
+
"retry_count": retry_count,
|
56
|
+
"timeout": timeout,
|
57
|
+
"enable_wal_mode": enable_wal_mode,
|
58
|
+
"health_check_interval": health_check_interval,
|
59
|
+
"retry_min_wait": retry_min_wait,
|
60
|
+
"retry_max_wait": retry_max_wait,
|
61
|
+
"retry_jitter": retry_jitter,
|
62
|
+
}
|
63
|
+
|
64
|
+
self.path_resolver = DatabasePathResolver(global_db_path)
|
65
|
+
self._database_managers: Dict[str, DatabaseManager] = {}
|
66
|
+
self._initialized_dbs: set = set()
|
67
|
+
|
68
|
+
async def get_database_manager(self, folder_path: Optional[str] = None) -> DatabaseManager:
|
69
|
+
"""
|
70
|
+
Get a database manager for the appropriate database (local or global).
|
71
|
+
|
72
|
+
Args:
|
73
|
+
folder_path: Project folder path to check for local database
|
74
|
+
|
75
|
+
Returns:
|
76
|
+
DatabaseManager instance for the appropriate database
|
77
|
+
"""
|
78
|
+
db_path = self.path_resolver.resolve_database_path(folder_path)
|
79
|
+
db_key = str(db_path)
|
80
|
+
|
81
|
+
# Return existing manager if available
|
82
|
+
if db_key in self._database_managers:
|
83
|
+
return self._database_managers[db_key]
|
84
|
+
|
85
|
+
# Check if we need to initialize an empty local database
|
86
|
+
if (folder_path and
|
87
|
+
self.path_resolver.is_local_database(folder_path) and
|
88
|
+
self.path_resolver.should_initialize_local_database(folder_path)):
|
89
|
+
logger.info(f"Initializing empty local database: {db_path}")
|
90
|
+
# Remove the empty file so DatabaseManager can create it properly
|
91
|
+
if db_path.exists():
|
92
|
+
db_path.unlink()
|
93
|
+
|
94
|
+
# Create new database manager
|
95
|
+
db_manager = DatabaseManager(
|
96
|
+
db_path=db_path,
|
97
|
+
**self.db_config
|
98
|
+
)
|
99
|
+
|
100
|
+
# Initialize if not already done
|
101
|
+
if db_key not in self._initialized_dbs:
|
102
|
+
await db_manager.initialize()
|
103
|
+
self._initialized_dbs.add(db_key)
|
104
|
+
logger.info(f"Initialized database: {db_path}")
|
105
|
+
|
106
|
+
self._database_managers[db_key] = db_manager
|
107
|
+
return db_manager
|
108
|
+
|
109
|
+
async def close_all(self) -> None:
|
110
|
+
"""Close all database managers and their connection pools."""
|
111
|
+
for db_manager in self._database_managers.values():
|
112
|
+
await db_manager.close_pool()
|
113
|
+
self._database_managers.clear()
|
114
|
+
self._initialized_dbs.clear()
|
115
|
+
logger.info("Closed all database connections")
|
116
|
+
|
117
|
+
def get_path_resolver(self) -> DatabasePathResolver:
|
118
|
+
"""Get the database path resolver."""
|
119
|
+
return self.path_resolver
|
120
|
+
|
121
|
+
def list_active_databases(self) -> Dict[str, str]:
|
122
|
+
"""
|
123
|
+
List all active database connections.
|
124
|
+
|
125
|
+
Returns:
|
126
|
+
Dictionary mapping database paths to their types (global/local)
|
127
|
+
"""
|
128
|
+
result = {}
|
129
|
+
for db_path in self._database_managers.keys():
|
130
|
+
if db_path == str(self.global_db_path):
|
131
|
+
result[db_path] = "global"
|
132
|
+
else:
|
133
|
+
result[db_path] = "local"
|
134
|
+
return result
|
@@ -0,0 +1,122 @@
|
|
1
|
+
"""
|
2
|
+
Database path resolution for local vs global databases.
|
3
|
+
|
4
|
+
This module provides logic to determine whether to use a local project database
|
5
|
+
or the global database based on the presence of .code-index.db files.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import logging
|
9
|
+
import os
|
10
|
+
from pathlib import Path
|
11
|
+
from typing import Optional
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class DatabasePathResolver:
|
17
|
+
"""
|
18
|
+
Resolves database paths, determining whether to use local or global databases.
|
19
|
+
|
20
|
+
Local databases are stored as .code-index.db in project folders.
|
21
|
+
If a local database file exists, it takes precedence over the global database.
|
22
|
+
"""
|
23
|
+
|
24
|
+
def __init__(self, global_db_path: Path):
|
25
|
+
"""
|
26
|
+
Initialize the path resolver with the global database path.
|
27
|
+
|
28
|
+
Args:
|
29
|
+
global_db_path: Path to the global database file
|
30
|
+
"""
|
31
|
+
self.global_db_path = global_db_path
|
32
|
+
|
33
|
+
def resolve_database_path(self, folder_path: Optional[str] = None) -> Path:
|
34
|
+
"""
|
35
|
+
Resolve which database to use based on folder path.
|
36
|
+
|
37
|
+
Args:
|
38
|
+
folder_path: Project folder path to check for local database
|
39
|
+
|
40
|
+
Returns:
|
41
|
+
Path to the database file to use
|
42
|
+
"""
|
43
|
+
if not folder_path:
|
44
|
+
logger.debug("No folder path provided, using global database")
|
45
|
+
return self.global_db_path
|
46
|
+
|
47
|
+
try:
|
48
|
+
folder_path_obj = Path(folder_path).resolve()
|
49
|
+
local_db_path = folder_path_obj / ".code-index.db"
|
50
|
+
|
51
|
+
if local_db_path.exists():
|
52
|
+
logger.debug(f"Found local database: {local_db_path}")
|
53
|
+
return local_db_path
|
54
|
+
else:
|
55
|
+
logger.debug(f"No local database found at {local_db_path}, using global database")
|
56
|
+
return self.global_db_path
|
57
|
+
|
58
|
+
except (OSError, ValueError) as e:
|
59
|
+
logger.warning(f"Error resolving folder path '{folder_path}': {e}")
|
60
|
+
return self.global_db_path
|
61
|
+
|
62
|
+
def is_local_database(self, folder_path: Optional[str] = None) -> bool:
|
63
|
+
"""
|
64
|
+
Check if a local database exists for the given folder path.
|
65
|
+
|
66
|
+
Args:
|
67
|
+
folder_path: Project folder path to check
|
68
|
+
|
69
|
+
Returns:
|
70
|
+
True if a local database exists, False otherwise
|
71
|
+
"""
|
72
|
+
if not folder_path:
|
73
|
+
return False
|
74
|
+
|
75
|
+
try:
|
76
|
+
folder_path_obj = Path(folder_path).resolve()
|
77
|
+
local_db_path = folder_path_obj / ".code-index.db"
|
78
|
+
return local_db_path.exists()
|
79
|
+
except (OSError, ValueError):
|
80
|
+
return False
|
81
|
+
|
82
|
+
def get_local_database_path(self, folder_path: str) -> Path:
|
83
|
+
"""
|
84
|
+
Get the local database path for a folder (whether it exists or not).
|
85
|
+
|
86
|
+
Args:
|
87
|
+
folder_path: Project folder path
|
88
|
+
|
89
|
+
Returns:
|
90
|
+
Path where the local database would be located
|
91
|
+
"""
|
92
|
+
return Path(folder_path).resolve() / ".code-index.db"
|
93
|
+
|
94
|
+
def is_empty_database_file(self, db_path: Path) -> bool:
|
95
|
+
"""
|
96
|
+
Check if a database file is empty (0 bytes).
|
97
|
+
|
98
|
+
Args:
|
99
|
+
db_path: Path to the database file
|
100
|
+
|
101
|
+
Returns:
|
102
|
+
True if the file exists and is empty, False otherwise
|
103
|
+
"""
|
104
|
+
try:
|
105
|
+
return db_path.exists() and db_path.stat().st_size == 0
|
106
|
+
except (OSError, ValueError):
|
107
|
+
return False
|
108
|
+
|
109
|
+
def should_initialize_local_database(self, folder_path: str) -> bool:
|
110
|
+
"""
|
111
|
+
Check if a local database should be initialized.
|
112
|
+
|
113
|
+
This returns True if .code-index.db exists and is empty.
|
114
|
+
|
115
|
+
Args:
|
116
|
+
folder_path: Project folder path
|
117
|
+
|
118
|
+
Returns:
|
119
|
+
True if local database should be initialized, False otherwise
|
120
|
+
"""
|
121
|
+
local_db_path = self.get_local_database_path(folder_path)
|
122
|
+
return self.is_empty_database_file(local_db_path)
|
mcp_code_indexer/main.py
CHANGED
@@ -321,6 +321,7 @@ async def handle_runcommand(args: argparse.Namespace) -> None:
|
|
321
321
|
"update_codebase_overview": server._handle_update_codebase_overview,
|
322
322
|
"get_word_frequency": server._handle_get_word_frequency,
|
323
323
|
"search_codebase_overview": server._handle_search_codebase_overview,
|
324
|
+
"check_database_health": server._handle_check_database_health,
|
324
325
|
}
|
325
326
|
|
326
327
|
if tool_name not in tool_handlers:
|
@@ -333,6 +334,7 @@ async def handle_runcommand(args: argparse.Namespace) -> None:
|
|
333
334
|
}
|
334
335
|
},
|
335
336
|
)
|
337
|
+
|
336
338
|
error_result = {
|
337
339
|
"error": {"code": -32601, "message": f"Unknown tool: {tool_name}"}
|
338
340
|
}
|
@@ -23,6 +23,7 @@ from mcp.server.stdio import stdio_server
|
|
23
23
|
from pydantic import ValidationError
|
24
24
|
|
25
25
|
from mcp_code_indexer.database.database import DatabaseManager
|
26
|
+
from mcp_code_indexer.database.database_factory import DatabaseFactory
|
26
27
|
from mcp_code_indexer.file_scanner import FileScanner
|
27
28
|
from mcp_code_indexer.token_counter import TokenCounter
|
28
29
|
from mcp_code_indexer.database.models import (
|
@@ -97,8 +98,8 @@ class MCPCodeIndexServer:
|
|
97
98
|
}
|
98
99
|
|
99
100
|
# Initialize components
|
100
|
-
self.
|
101
|
-
|
101
|
+
self.db_factory = DatabaseFactory(
|
102
|
+
global_db_path=self.db_path,
|
102
103
|
pool_size=db_pool_size,
|
103
104
|
retry_count=db_retry_count,
|
104
105
|
timeout=db_timeout,
|
@@ -108,8 +109,10 @@ class MCPCodeIndexServer:
|
|
108
109
|
retry_max_wait=retry_max_wait,
|
109
110
|
retry_jitter=retry_jitter,
|
110
111
|
)
|
112
|
+
# Keep reference to global db_manager for backwards compatibility
|
113
|
+
self.db_manager = None # Will be set during run()
|
111
114
|
self.token_counter = TokenCounter(token_limit)
|
112
|
-
self.cleanup_manager =
|
115
|
+
self.cleanup_manager = None # Will be set during initialize()
|
113
116
|
|
114
117
|
# Setup error handling
|
115
118
|
self.logger = get_logger(__name__)
|
@@ -241,7 +244,10 @@ class MCPCodeIndexServer:
|
|
241
244
|
|
242
245
|
async def initialize(self) -> None:
|
243
246
|
"""Initialize database and other resources."""
|
244
|
-
|
247
|
+
# Initialize global database manager for backwards compatibility
|
248
|
+
self.db_manager = await self.db_factory.get_database_manager()
|
249
|
+
# Update cleanup manager with initialized db_manager
|
250
|
+
self.cleanup_manager = CleanupManager(self.db_manager, retention_months=6)
|
245
251
|
self._start_background_cleanup()
|
246
252
|
logger.info("Server initialized successfully")
|
247
253
|
|
@@ -745,14 +751,17 @@ class MCPCodeIndexServer:
|
|
745
751
|
project_name = arguments["projectName"]
|
746
752
|
folder_path = arguments["folderPath"]
|
747
753
|
|
754
|
+
# Get the appropriate database manager for this folder
|
755
|
+
db_manager = await self.db_factory.get_database_manager(folder_path)
|
756
|
+
|
748
757
|
# Normalize project name for case-insensitive matching
|
749
758
|
normalized_name = project_name.lower()
|
750
759
|
|
751
760
|
# Find potential project matches
|
752
|
-
project = await self._find_matching_project(normalized_name, folder_path)
|
761
|
+
project = await self._find_matching_project(normalized_name, folder_path, db_manager)
|
753
762
|
if project:
|
754
763
|
# Update project metadata and aliases
|
755
|
-
await self._update_existing_project(project, normalized_name, folder_path)
|
764
|
+
await self._update_existing_project(project, normalized_name, folder_path, db_manager)
|
756
765
|
else:
|
757
766
|
# Create new project with UUID
|
758
767
|
project_id = str(uuid.uuid4())
|
@@ -763,20 +772,20 @@ class MCPCodeIndexServer:
|
|
763
772
|
created=datetime.utcnow(),
|
764
773
|
last_accessed=datetime.utcnow(),
|
765
774
|
)
|
766
|
-
await
|
775
|
+
await db_manager.create_project(project)
|
767
776
|
logger.info(f"Created new project: {normalized_name} ({project_id})")
|
768
777
|
|
769
778
|
return project.id
|
770
779
|
|
771
780
|
async def _find_matching_project(
|
772
|
-
self, normalized_name: str, folder_path: str
|
781
|
+
self, normalized_name: str, folder_path: str, db_manager: DatabaseManager
|
773
782
|
) -> Optional[Project]:
|
774
783
|
"""
|
775
784
|
Find a matching project using name and folder path matching.
|
776
785
|
|
777
786
|
Returns the best matching project or None if no sufficient match is found.
|
778
787
|
"""
|
779
|
-
all_projects = await
|
788
|
+
all_projects = await db_manager.get_all_projects()
|
780
789
|
|
781
790
|
best_match = None
|
782
791
|
best_score = 0
|
@@ -863,11 +872,11 @@ class MCPCodeIndexServer:
|
|
863
872
|
return False
|
864
873
|
|
865
874
|
async def _update_existing_project(
|
866
|
-
self, project: Project, normalized_name: str, folder_path: str
|
875
|
+
self, project: Project, normalized_name: str, folder_path: str, db_manager: DatabaseManager
|
867
876
|
) -> None:
|
868
877
|
"""Update an existing project with new metadata and folder alias."""
|
869
878
|
# Update last accessed time
|
870
|
-
await
|
879
|
+
await db_manager.update_project_access_time(project.id)
|
871
880
|
|
872
881
|
should_update = False
|
873
882
|
|
@@ -891,16 +900,18 @@ class MCPCodeIndexServer:
|
|
891
900
|
)
|
892
901
|
|
893
902
|
if should_update:
|
894
|
-
await
|
903
|
+
await db_manager.update_project(project)
|
895
904
|
logger.debug(f"Updated project metadata for {project.name}")
|
896
905
|
|
897
906
|
async def _handle_get_file_description(
|
898
907
|
self, arguments: Dict[str, Any]
|
899
908
|
) -> Dict[str, Any]:
|
900
909
|
"""Handle get_file_description tool calls."""
|
910
|
+
folder_path = arguments["folderPath"]
|
911
|
+
db_manager = await self.db_factory.get_database_manager(folder_path)
|
901
912
|
project_id = await self._get_or_create_project_id(arguments)
|
902
913
|
|
903
|
-
file_desc = await
|
914
|
+
file_desc = await db_manager.get_file_description(
|
904
915
|
project_id=project_id, file_path=arguments["filePath"]
|
905
916
|
)
|
906
917
|
|
@@ -928,6 +939,8 @@ class MCPCodeIndexServer:
|
|
928
939
|
description_length = len(arguments.get("description", ""))
|
929
940
|
logger.info(f"Description length: {description_length} characters")
|
930
941
|
|
942
|
+
folder_path = arguments["folderPath"]
|
943
|
+
db_manager = await self.db_factory.get_database_manager(folder_path)
|
931
944
|
project_id = await self._get_or_create_project_id(arguments)
|
932
945
|
|
933
946
|
logger.info(f"Resolved project_id: {project_id}")
|
@@ -941,7 +954,7 @@ class MCPCodeIndexServer:
|
|
941
954
|
version=1,
|
942
955
|
)
|
943
956
|
|
944
|
-
await
|
957
|
+
await db_manager.create_file_description(file_desc)
|
945
958
|
|
946
959
|
logger.info(f"Successfully updated description for: {arguments['filePath']}")
|
947
960
|
|
@@ -1337,16 +1350,15 @@ class MCPCodeIndexServer:
|
|
1337
1350
|
database_stats = self.db_manager.get_database_stats()
|
1338
1351
|
|
1339
1352
|
return {
|
1340
|
-
"
|
1341
|
-
"
|
1342
|
-
"
|
1343
|
-
|
1344
|
-
"
|
1345
|
-
|
1346
|
-
|
1347
|
-
|
1348
|
-
|
1349
|
-
),
|
1353
|
+
"is_healthy": comprehensive_diagnostics.get("current_status", {}).get("is_healthy", True),
|
1354
|
+
"status": comprehensive_diagnostics.get("current_status", {}),
|
1355
|
+
"performance": {
|
1356
|
+
"avg_response_time_ms": comprehensive_diagnostics.get("metrics", {}).get("avg_response_time_ms", 0),
|
1357
|
+
"success_rate": comprehensive_diagnostics.get("current_status", {}).get("recent_success_rate_percent", 100)
|
1358
|
+
},
|
1359
|
+
"database": {
|
1360
|
+
"total_operations": database_stats.get("retry_executor", {}).get("total_operations", 0),
|
1361
|
+
"pool_size": database_stats.get("connection_pool", {}).get("current_size", 0)
|
1350
1362
|
},
|
1351
1363
|
"server_info": {
|
1352
1364
|
"token_limit": self.token_limit,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: mcp-code-indexer
|
3
|
-
Version: 3.
|
3
|
+
Version: 3.5.0
|
4
4
|
Summary: MCP server that tracks file descriptions across codebases, enabling AI agents to efficiently navigate and understand code through searchable summaries and token-aware overviews.
|
5
5
|
License: MIT
|
6
6
|
Keywords: mcp,model-context-protocol,code-indexer,ai-tools,codebase-navigation,file-descriptions,llm-tools
|
@@ -40,8 +40,8 @@ Description-Content-Type: text/markdown
|
|
40
40
|
|
41
41
|
# MCP Code Indexer 🚀
|
42
42
|
|
43
|
-
[](https://badge.fury.io/py/mcp-code-indexer)
|
44
|
+
[](https://pypi.org/project/mcp-code-indexer/)
|
45
45
|
[](https://opensource.org/licenses/MIT)
|
46
46
|
|
47
47
|
A production-ready **Model Context Protocol (MCP) server** that revolutionizes how AI agents navigate and understand codebases. Built for high-concurrency environments with advanced database resilience, the server provides instant access to intelligent descriptions, semantic search, and context-aware recommendations while maintaining 800+ writes/sec throughput.
|
@@ -1,21 +1,25 @@
|
|
1
|
-
mcp_code_indexer/__init__.py,sha256=
|
1
|
+
mcp_code_indexer/__init__.py,sha256=L_IpDV7vnk_plYvcVoBOmysRfW7P9BvGNrr8dIRIiQ0,1809
|
2
2
|
mcp_code_indexer/__main__.py,sha256=4Edinoe0ug43hobuLYcjTmGp2YJnlFYN4_8iKvUBJ0Q,213
|
3
3
|
mcp_code_indexer/ask_handler.py,sha256=cy7gVFyXF0c10GZ3Aquktvgw1A8e4_NtBsbjlE1Bc84,9106
|
4
4
|
mcp_code_indexer/claude_api_handler.py,sha256=uZF6P64Cac9AHfO2Q3Whe4exhZyZmqZ1grWT1nHw-Wc,13616
|
5
5
|
mcp_code_indexer/cleanup_manager.py,sha256=qjIAMiJ-F1pfgCwVbNaNE0dfs8Wh9aaWh51DBMCWFuI,9491
|
6
|
+
mcp_code_indexer/commands/__init__.py,sha256=141U722dS_NnFTZyxTPipzhXKdU21kCv-mcrN4djyHo,45
|
7
|
+
mcp_code_indexer/commands/makelocal.py,sha256=2rbHG5G4VgDECZ74YFxcLtKYtuDYUMIbhv_LtW3ZVJQ,7550
|
6
8
|
mcp_code_indexer/data/stop_words_english.txt,sha256=7Zdd9ameVgA6tN_zuXROvHXD4hkWeELVywPhb7FJEkw,6343
|
7
9
|
mcp_code_indexer/database/__init__.py,sha256=aPq_aaRp0aSwOBIq9GkuMNjmLxA411zg2vhdrAuHm-w,38
|
8
|
-
mcp_code_indexer/database/connection_health.py,sha256=
|
10
|
+
mcp_code_indexer/database/connection_health.py,sha256=vu8c8lDLEwva7ldTdxvlACTKT59kO-KjZ9I3tMbwr3s,25240
|
9
11
|
mcp_code_indexer/database/database.py,sha256=BpsWoy5qXqLQpEZ42dt5efOGSrLhokQyvAc9ZK0afc4,46895
|
12
|
+
mcp_code_indexer/database/database_factory.py,sha256=M7ojiDzs-acJG3QE4khTo2pYyQM9gUBcMAm0duboiLU,4875
|
10
13
|
mcp_code_indexer/database/exceptions.py,sha256=Cs9_qc-6724DPJc25fPMvNfO3JQCHrOQ80y8Q55w_3Y,10389
|
11
14
|
mcp_code_indexer/database/models.py,sha256=ITF5dMSBCuaunQ3YeaVQOZ5Kb8y59I5Fg0EU7O9Ez3A,7017
|
15
|
+
mcp_code_indexer/database/path_resolver.py,sha256=UFLyo_Y5pyHmU1a27XB6S-XHq6l5IUlhfGe5lgFCWv4,4041
|
12
16
|
mcp_code_indexer/database/retry_executor.py,sha256=wBIIbkU1bwQMrjM9AmDWNEQ-cw8IPNobfdeUOLhQVjQ,13528
|
13
17
|
mcp_code_indexer/deepask_handler.py,sha256=wpKMYnlsOGiaKLvuXIb62jeEb4xnYOmIcvvXjvbgdnc,18475
|
14
18
|
mcp_code_indexer/error_handler.py,sha256=XBjjEriq1diPTGKpHcaBh9fj88_qhuNMwPeLiTWxrds,11431
|
15
19
|
mcp_code_indexer/file_scanner.py,sha256=smY1Yfxfyqb_J5RQz5ETaSgE2_syC2SUUwzJxby3Bg8,11432
|
16
20
|
mcp_code_indexer/git_hook_handler.py,sha256=mP8uvtQFo4C6dPNSBlG4NUXwVTXofDzDpjZVSk43yzw,45542
|
17
21
|
mcp_code_indexer/logging_config.py,sha256=hexJWw7-6QQkH_2BwtKGO1CDOtQnP8F3Yss_yHKnzE4,9816
|
18
|
-
mcp_code_indexer/main.py,sha256=
|
22
|
+
mcp_code_indexer/main.py,sha256=2T2yXG_-MIDmj1xdW85FolaOtv4vshlWjljhRSIh5IA,30800
|
19
23
|
mcp_code_indexer/middleware/__init__.py,sha256=p-mP0pMsfiU2yajCPvokCUxUEkh_lu4XJP1LyyMW2ug,220
|
20
24
|
mcp_code_indexer/middleware/error_middleware.py,sha256=YHd7sm4PdNPIMKD8Nub_N7WaOH2JtiqkHBbTOGyxTno,11685
|
21
25
|
mcp_code_indexer/migrations/001_initial.sql,sha256=hIXkCP4LA_4A9HJ1CHU0a1DD-a6EN6u-uJPMqW0c2Yo,4120
|
@@ -25,12 +29,12 @@ mcp_code_indexer/migrations/004_remove_branch_dependency.sql,sha256=whZvj2qfba1-
|
|
25
29
|
mcp_code_indexer/migrations/005_remove_git_remotes.sql,sha256=vT84AaV1hyN4zq5W67hR14TgAwhW7_RNtBHrCoksxA4,1299
|
26
30
|
mcp_code_indexer/query_preprocessor.py,sha256=PLFR1T9mSn2Mkxw6-GB4GkxyfzjJ2ia3dgLPcziHfVA,5483
|
27
31
|
mcp_code_indexer/server/__init__.py,sha256=16xMcuriUOBlawRqWNBk6niwrvtv_JD5xvI36X1Vsmk,41
|
28
|
-
mcp_code_indexer/server/mcp_server.py,sha256=
|
32
|
+
mcp_code_indexer/server/mcp_server.py,sha256=o6G5vNxtyWwL_x4UXTB9XfG6acoba5P9hTJ70qoZ_l4,72660
|
29
33
|
mcp_code_indexer/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
|
30
34
|
mcp_code_indexer/token_counter.py,sha256=e6WsyCEWMMSkMwLbcVtr5e8vEqh-kFqNmiJErCNdqHE,8220
|
31
35
|
mcp_code_indexer/tools/__init__.py,sha256=m01mxML2UdD7y5rih_XNhNSCMzQTz7WQ_T1TeOcYlnE,49
|
32
|
-
mcp_code_indexer-3.
|
33
|
-
mcp_code_indexer-3.
|
34
|
-
mcp_code_indexer-3.
|
35
|
-
mcp_code_indexer-3.
|
36
|
-
mcp_code_indexer-3.
|
36
|
+
mcp_code_indexer-3.5.0.dist-info/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
|
37
|
+
mcp_code_indexer-3.5.0.dist-info/METADATA,sha256=2vo4ryVLHtcq_qNukXkMJCZoBT1qC_cmK5JHAd_NlB4,19359
|
38
|
+
mcp_code_indexer-3.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
39
|
+
mcp_code_indexer-3.5.0.dist-info/entry_points.txt,sha256=UABj7HZ0mC6rvF22gxaz2LLNLGQShTrFmp5u00iUtvo,67
|
40
|
+
mcp_code_indexer-3.5.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|