mcp-code-indexer 4.2.18__tar.gz → 4.2.20__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/PKG-INFO +3 -3
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/README.md +2 -2
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/pyproject.toml +1 -1
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/cleanup_manager.py +12 -12
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/database.py +72 -48
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/exceptions.py +2 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/retry_executor.py +1 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/file_scanner.py +10 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/logging_config.py +8 -3
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/server/mcp_server.py +65 -49
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/LICENSE +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/__main__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/ask_handler.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/claude_api_handler.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/commands/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/commands/makelocal.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/data/stop_words_english.txt +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/connection_health.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/database_factory.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/models.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/path_resolver.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/deepask_handler.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/error_handler.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/git_hook_handler.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/main.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/auth.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/error_middleware.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/logging.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/security.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/001_initial.sql +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/002_performance_indexes.sql +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/003_project_overviews.sql +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/004_remove_branch_dependency.sql +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/005_remove_git_remotes.sql +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/006_vector_mode.sql +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/query_preprocessor.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/server/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4 +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/token_counter.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/tools/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/transport/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/transport/base.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/transport/http_transport.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/transport/stdio_transport.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/chunking/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/chunking/ast_chunker.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/chunking/chunk_optimizer.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/chunking/language_handlers.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/config.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/const.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/daemon.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/monitoring/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/monitoring/change_detector.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/monitoring/file_watcher.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/monitoring/merkle_tree.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/providers/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/providers/turbopuffer_client.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/providers/voyage_client.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/security/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/security/patterns.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/security/redactor.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/services/__init__.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/services/embedding_service.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/services/vector_mode_tools_service.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/services/vector_storage_service.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/types.py +0 -0
- {mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/utils.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mcp-code-indexer
|
|
3
|
-
Version: 4.2.
|
|
3
|
+
Version: 4.2.20
|
|
4
4
|
Summary: MCP server that tracks file descriptions across codebases, enabling AI agents to efficiently navigate and understand code through searchable summaries and token-aware overviews.
|
|
5
5
|
License: MIT
|
|
6
6
|
License-File: LICENSE
|
|
@@ -49,8 +49,8 @@ Description-Content-Type: text/markdown
|
|
|
49
49
|
|
|
50
50
|
# MCP Code Indexer 🚀
|
|
51
51
|
|
|
52
|
-
[](https://badge.fury.io/py/mcp-code-indexer)
|
|
53
|
+
[](https://pypi.org/project/mcp-code-indexer/)
|
|
54
54
|
[](https://opensource.org/licenses/MIT)
|
|
55
55
|
|
|
56
56
|
A production-ready **Model Context Protocol (MCP) server** that revolutionizes how AI agents navigate and understand codebases. Built for high-concurrency environments with advanced database resilience, the server provides instant access to intelligent descriptions, semantic search, and context-aware recommendations while maintaining 800+ writes/sec throughput.
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# MCP Code Indexer 🚀
|
|
2
2
|
|
|
3
|
-
[](https://badge.fury.io/py/mcp-code-indexer)
|
|
4
|
+
[](https://pypi.org/project/mcp-code-indexer/)
|
|
5
5
|
[](https://opensource.org/licenses/MIT)
|
|
6
6
|
|
|
7
7
|
A production-ready **Model Context Protocol (MCP) server** that revolutionizes how AI agents navigate and understand codebases. Built for high-concurrency environments with advanced database resilience, the server provides instant access to intelligent descriptions, semantic search, and context-aware recommendations while maintaining 800+ writes/sec throughput.
|
|
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
|
|
4
4
|
|
|
5
5
|
[tool.poetry]
|
|
6
6
|
name = "mcp-code-indexer"
|
|
7
|
-
version = "4.2.
|
|
7
|
+
version = "4.2.20"
|
|
8
8
|
description = "MCP server that tracks file descriptions across codebases, enabling AI agents to efficiently navigate and understand code through searchable summaries and token-aware overviews."
|
|
9
9
|
authors = ["MCP Code Indexer Contributors"]
|
|
10
10
|
maintainers = ["MCP Code Indexer Contributors"]
|
|
@@ -51,10 +51,8 @@ class CleanupManager:
|
|
|
51
51
|
"""
|
|
52
52
|
cleanup_timestamp = int(time.time())
|
|
53
53
|
|
|
54
|
-
async
|
|
55
|
-
|
|
56
|
-
) as db:
|
|
57
|
-
cursor = await db.execute(
|
|
54
|
+
async def operation(conn: Any) -> bool:
|
|
55
|
+
cursor = await conn.execute(
|
|
58
56
|
"""
|
|
59
57
|
UPDATE file_descriptions
|
|
60
58
|
SET to_be_cleaned = ?
|
|
@@ -62,11 +60,13 @@ class CleanupManager:
|
|
|
62
60
|
""",
|
|
63
61
|
(cleanup_timestamp, project_id, file_path),
|
|
64
62
|
)
|
|
65
|
-
await db.commit()
|
|
66
|
-
|
|
67
63
|
# Check if any rows were affected
|
|
68
64
|
return cursor.rowcount > 0
|
|
69
65
|
|
|
66
|
+
return await self.db_manager.execute_transaction_with_retry(
|
|
67
|
+
operation, "mark_file_for_cleanup"
|
|
68
|
+
)
|
|
69
|
+
|
|
70
70
|
async def mark_files_for_cleanup(
|
|
71
71
|
self, project_id: str, file_paths: List[str]
|
|
72
72
|
) -> int:
|
|
@@ -117,10 +117,8 @@ class CleanupManager:
|
|
|
117
117
|
Returns:
|
|
118
118
|
True if file was restored, False if file not found
|
|
119
119
|
"""
|
|
120
|
-
async
|
|
121
|
-
|
|
122
|
-
) as db:
|
|
123
|
-
cursor = await db.execute(
|
|
120
|
+
async def operation(conn: Any) -> bool:
|
|
121
|
+
cursor = await conn.execute(
|
|
124
122
|
"""
|
|
125
123
|
UPDATE file_descriptions
|
|
126
124
|
SET to_be_cleaned = NULL
|
|
@@ -128,10 +126,12 @@ class CleanupManager:
|
|
|
128
126
|
""",
|
|
129
127
|
(project_id, file_path),
|
|
130
128
|
)
|
|
131
|
-
await db.commit()
|
|
132
|
-
|
|
133
129
|
return cursor.rowcount > 0
|
|
134
130
|
|
|
131
|
+
return await self.db_manager.execute_transaction_with_retry(
|
|
132
|
+
operation, "restore_file_from_cleanup"
|
|
133
|
+
)
|
|
134
|
+
|
|
135
135
|
async def get_files_to_be_cleaned(self, project_id: str) -> List[dict]:
|
|
136
136
|
"""
|
|
137
137
|
Get list of files marked for cleanup in a project.
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/database.py
RENAMED
|
@@ -396,7 +396,7 @@ class DatabaseManager:
|
|
|
396
396
|
async def get_immediate_transaction(
|
|
397
397
|
self,
|
|
398
398
|
operation_name: str = "immediate_transaction",
|
|
399
|
-
timeout_seconds: float =
|
|
399
|
+
timeout_seconds: Optional[float] = None,
|
|
400
400
|
) -> AsyncIterator[aiosqlite.Connection]:
|
|
401
401
|
"""
|
|
402
402
|
Get a database connection with BEGIN IMMEDIATE transaction and
|
|
@@ -407,8 +407,10 @@ class DatabaseManager:
|
|
|
407
407
|
|
|
408
408
|
Args:
|
|
409
409
|
operation_name: Name of the operation for monitoring
|
|
410
|
-
timeout_seconds: Transaction timeout in seconds
|
|
410
|
+
timeout_seconds: Transaction timeout in seconds (defaults to
|
|
411
|
+
self.timeout if None)
|
|
411
412
|
"""
|
|
413
|
+
actual_timeout = timeout_seconds if timeout_seconds is not None else self.timeout
|
|
412
414
|
import time
|
|
413
415
|
acquire_start = time.monotonic()
|
|
414
416
|
async with self.get_write_connection_with_retry(operation_name) as conn:
|
|
@@ -420,7 +422,7 @@ class DatabaseManager:
|
|
|
420
422
|
# Start immediate transaction with timeout
|
|
421
423
|
begin_start = time.monotonic()
|
|
422
424
|
await asyncio.wait_for(
|
|
423
|
-
conn.execute("BEGIN IMMEDIATE"), timeout=
|
|
425
|
+
conn.execute("BEGIN IMMEDIATE"), timeout=actual_timeout
|
|
424
426
|
)
|
|
425
427
|
begin_time = time.monotonic() - begin_start
|
|
426
428
|
logger.debug(
|
|
@@ -436,23 +438,35 @@ class DatabaseManager:
|
|
|
436
438
|
except asyncio.TimeoutError:
|
|
437
439
|
logger.warning(
|
|
438
440
|
(
|
|
439
|
-
f"Transaction timeout after {
|
|
441
|
+
f"Transaction timeout after {actual_timeout}s for "
|
|
440
442
|
f"{operation_name}"
|
|
441
443
|
),
|
|
442
444
|
extra={
|
|
443
445
|
"structured_data": {
|
|
444
446
|
"transaction_timeout": {
|
|
445
447
|
"operation": operation_name,
|
|
446
|
-
"timeout_seconds":
|
|
448
|
+
"timeout_seconds": actual_timeout,
|
|
447
449
|
}
|
|
448
450
|
}
|
|
449
451
|
},
|
|
450
452
|
)
|
|
451
|
-
|
|
453
|
+
# Shield rollback from cancellation to prevent leaked transactions
|
|
454
|
+
await asyncio.shield(conn.rollback())
|
|
452
455
|
raise
|
|
453
|
-
except
|
|
454
|
-
|
|
455
|
-
|
|
456
|
+
except BaseException as e:
|
|
457
|
+
# Catch BaseException to handle asyncio.CancelledError and ensure
|
|
458
|
+
# proper rollback on task cancellation. Shield the rollback to
|
|
459
|
+
# prevent cancellation from interrupting cleanup.
|
|
460
|
+
if isinstance(e, asyncio.CancelledError):
|
|
461
|
+
logger.warning(f"Transaction cancelled for {operation_name}")
|
|
462
|
+
else:
|
|
463
|
+
logger.error(f"Transaction failed for {operation_name}: {e}")
|
|
464
|
+
try:
|
|
465
|
+
await asyncio.shield(conn.rollback())
|
|
466
|
+
except Exception as rollback_error:
|
|
467
|
+
logger.error(
|
|
468
|
+
f"Rollback failed for {operation_name}: {rollback_error}"
|
|
469
|
+
)
|
|
456
470
|
raise
|
|
457
471
|
|
|
458
472
|
async def execute_transaction_with_retry(
|
|
@@ -460,7 +474,7 @@ class DatabaseManager:
|
|
|
460
474
|
operation_func: Callable[[aiosqlite.Connection], Any],
|
|
461
475
|
operation_name: str = "transaction_operation",
|
|
462
476
|
max_retries: int = 3,
|
|
463
|
-
timeout_seconds: float =
|
|
477
|
+
timeout_seconds: Optional[float] = None,
|
|
464
478
|
) -> Any:
|
|
465
479
|
"""
|
|
466
480
|
Execute a database operation within a transaction with automatic
|
|
@@ -475,7 +489,8 @@ class DatabaseManager:
|
|
|
475
489
|
operation_name: Name of the operation for logging
|
|
476
490
|
max_retries: Maximum retry attempts (overrides default retry
|
|
477
491
|
executor config)
|
|
478
|
-
timeout_seconds: Transaction timeout in seconds
|
|
492
|
+
timeout_seconds: Transaction timeout in seconds (defaults to
|
|
493
|
+
self.timeout if None)
|
|
479
494
|
|
|
480
495
|
Returns:
|
|
481
496
|
Result from operation_func
|
|
@@ -489,6 +504,7 @@ class DatabaseManager:
|
|
|
489
504
|
my_operation, "insert_data"
|
|
490
505
|
)
|
|
491
506
|
"""
|
|
507
|
+
actual_timeout = timeout_seconds if timeout_seconds is not None else self.timeout
|
|
492
508
|
|
|
493
509
|
async def execute_transaction() -> Any:
|
|
494
510
|
"""Inner function to execute transaction - retried by executor."""
|
|
@@ -496,11 +512,11 @@ class DatabaseManager:
|
|
|
496
512
|
start_time = time.monotonic()
|
|
497
513
|
logger.debug(
|
|
498
514
|
f"[{operation_name}] Starting transaction "
|
|
499
|
-
f"(timeout={
|
|
515
|
+
f"(timeout={actual_timeout}s, pool_size={len(self._connection_pool)})"
|
|
500
516
|
)
|
|
501
517
|
try:
|
|
502
518
|
async with self.get_immediate_transaction(
|
|
503
|
-
operation_name,
|
|
519
|
+
operation_name, actual_timeout
|
|
504
520
|
) as conn:
|
|
505
521
|
lock_acquired_time = time.monotonic()
|
|
506
522
|
logger.debug(
|
|
@@ -523,7 +539,7 @@ class DatabaseManager:
|
|
|
523
539
|
if self._metrics_collector:
|
|
524
540
|
self._metrics_collector.record_operation(
|
|
525
541
|
operation_name,
|
|
526
|
-
|
|
542
|
+
actual_timeout * 1000, # Convert to ms
|
|
527
543
|
True,
|
|
528
544
|
len(self._connection_pool),
|
|
529
545
|
)
|
|
@@ -555,7 +571,7 @@ class DatabaseManager:
|
|
|
555
571
|
if self._metrics_collector:
|
|
556
572
|
self._metrics_collector.record_operation(
|
|
557
573
|
operation_name,
|
|
558
|
-
|
|
574
|
+
actual_timeout * 1000,
|
|
559
575
|
False,
|
|
560
576
|
len(self._connection_pool),
|
|
561
577
|
)
|
|
@@ -565,7 +581,7 @@ class DatabaseManager:
|
|
|
565
581
|
elapsed = time.monotonic() - start_time
|
|
566
582
|
logger.warning(
|
|
567
583
|
f"[{operation_name}] Timeout after {elapsed*1000:.1f}ms "
|
|
568
|
-
f"waiting for database lock (timeout={
|
|
584
|
+
f"waiting for database lock (timeout={actual_timeout}s)"
|
|
569
585
|
)
|
|
570
586
|
if self._metrics_collector:
|
|
571
587
|
self._metrics_collector.record_locking_event(
|
|
@@ -605,7 +621,7 @@ class DatabaseManager:
|
|
|
605
621
|
if self._metrics_collector:
|
|
606
622
|
self._metrics_collector.record_operation(
|
|
607
623
|
operation_name,
|
|
608
|
-
|
|
624
|
+
actual_timeout * 1000,
|
|
609
625
|
False,
|
|
610
626
|
len(self._connection_pool),
|
|
611
627
|
)
|
|
@@ -624,7 +640,7 @@ class DatabaseManager:
|
|
|
624
640
|
if self._metrics_collector:
|
|
625
641
|
self._metrics_collector.record_operation(
|
|
626
642
|
operation_name,
|
|
627
|
-
|
|
643
|
+
actual_timeout * 1000,
|
|
628
644
|
False,
|
|
629
645
|
len(self._connection_pool),
|
|
630
646
|
)
|
|
@@ -634,8 +650,8 @@ class DatabaseManager:
|
|
|
634
650
|
|
|
635
651
|
async def create_project(self, project: Project) -> None:
|
|
636
652
|
"""Create a new project record."""
|
|
637
|
-
async
|
|
638
|
-
await
|
|
653
|
+
async def operation(conn: aiosqlite.Connection) -> None:
|
|
654
|
+
await conn.execute(
|
|
639
655
|
"""
|
|
640
656
|
INSERT INTO projects (id, name, aliases, created, last_accessed)
|
|
641
657
|
VALUES (?, ?, ?, ?, ?)
|
|
@@ -648,8 +664,9 @@ class DatabaseManager:
|
|
|
648
664
|
project.last_accessed,
|
|
649
665
|
),
|
|
650
666
|
)
|
|
651
|
-
|
|
652
|
-
|
|
667
|
+
|
|
668
|
+
await self.execute_transaction_with_retry(operation, "create_project")
|
|
669
|
+
logger.debug(f"Created project: {project.id}")
|
|
653
670
|
|
|
654
671
|
async def get_project(self, project_id: str) -> Optional[Project]:
|
|
655
672
|
"""Get project by ID."""
|
|
@@ -764,19 +781,20 @@ class DatabaseManager:
|
|
|
764
781
|
|
|
765
782
|
async def update_project_access_time(self, project_id: str) -> None:
|
|
766
783
|
"""Update the last accessed time for a project."""
|
|
767
|
-
async
|
|
768
|
-
|
|
769
|
-
) as db:
|
|
770
|
-
await db.execute(
|
|
784
|
+
async def operation(conn: aiosqlite.Connection) -> None:
|
|
785
|
+
await conn.execute(
|
|
771
786
|
"UPDATE projects SET last_accessed = ? WHERE id = ?",
|
|
772
787
|
(datetime.utcnow(), project_id),
|
|
773
788
|
)
|
|
774
|
-
|
|
789
|
+
|
|
790
|
+
await self.execute_transaction_with_retry(
|
|
791
|
+
operation, "update_project_access_time"
|
|
792
|
+
)
|
|
775
793
|
|
|
776
794
|
async def update_project(self, project: Project) -> None:
|
|
777
795
|
"""Update an existing project record."""
|
|
778
|
-
async
|
|
779
|
-
await
|
|
796
|
+
async def operation(conn: aiosqlite.Connection) -> None:
|
|
797
|
+
await conn.execute(
|
|
780
798
|
"""
|
|
781
799
|
UPDATE projects
|
|
782
800
|
SET name = ?, aliases = ?, last_accessed = ?
|
|
@@ -789,27 +807,28 @@ class DatabaseManager:
|
|
|
789
807
|
project.id,
|
|
790
808
|
),
|
|
791
809
|
)
|
|
792
|
-
|
|
793
|
-
|
|
810
|
+
|
|
811
|
+
await self.execute_transaction_with_retry(operation, "update_project")
|
|
812
|
+
logger.debug(f"Updated project: {project.id}")
|
|
794
813
|
|
|
795
814
|
async def set_project_vector_mode(self, project_id: str, enabled: bool) -> None:
|
|
796
815
|
"""Set the vector_mode for a specific project."""
|
|
797
|
-
async
|
|
798
|
-
|
|
799
|
-
) as db:
|
|
800
|
-
await db.execute(
|
|
816
|
+
async def operation(conn: aiosqlite.Connection) -> None:
|
|
817
|
+
await conn.execute(
|
|
801
818
|
"UPDATE projects SET vector_mode = ? WHERE id = ?",
|
|
802
819
|
(int(enabled), project_id),
|
|
803
820
|
)
|
|
804
821
|
|
|
805
822
|
# Check if the project was actually updated
|
|
806
|
-
cursor = await
|
|
823
|
+
cursor = await conn.execute("SELECT changes()")
|
|
807
824
|
changes = await cursor.fetchone()
|
|
808
825
|
if changes[0] == 0:
|
|
809
826
|
raise DatabaseError(f"Project not found: {project_id}")
|
|
810
827
|
|
|
811
|
-
|
|
812
|
-
|
|
828
|
+
await self.execute_transaction_with_retry(
|
|
829
|
+
operation, "set_project_vector_mode"
|
|
830
|
+
)
|
|
831
|
+
logger.debug(f"Set vector_mode={enabled} for project: {project_id}")
|
|
813
832
|
|
|
814
833
|
async def get_all_projects(self) -> List[Project]:
|
|
815
834
|
"""Get all projects in the database."""
|
|
@@ -1080,23 +1099,25 @@ class DatabaseManager:
|
|
|
1080
1099
|
"""Cache token count with TTL."""
|
|
1081
1100
|
expires = datetime.utcnow() + timedelta(hours=ttl_hours)
|
|
1082
1101
|
|
|
1083
|
-
async
|
|
1084
|
-
await
|
|
1102
|
+
async def operation(conn: aiosqlite.Connection) -> None:
|
|
1103
|
+
await conn.execute(
|
|
1085
1104
|
"""
|
|
1086
1105
|
INSERT OR REPLACE INTO token_cache (cache_key, token_count, expires)
|
|
1087
1106
|
VALUES (?, ?, ?)
|
|
1088
1107
|
""",
|
|
1089
1108
|
(cache_key, token_count, expires),
|
|
1090
1109
|
)
|
|
1091
|
-
|
|
1110
|
+
|
|
1111
|
+
await self.execute_transaction_with_retry(operation, "cache_token_count")
|
|
1092
1112
|
|
|
1093
1113
|
async def cleanup_expired_cache(self) -> None:
|
|
1094
1114
|
"""Remove expired cache entries."""
|
|
1095
|
-
async
|
|
1096
|
-
await
|
|
1115
|
+
async def operation(conn: aiosqlite.Connection) -> None:
|
|
1116
|
+
await conn.execute(
|
|
1097
1117
|
"DELETE FROM token_cache WHERE expires < ?", (datetime.utcnow(),)
|
|
1098
1118
|
)
|
|
1099
|
-
|
|
1119
|
+
|
|
1120
|
+
await self.execute_transaction_with_retry(operation, "cleanup_expired_cache")
|
|
1100
1121
|
|
|
1101
1122
|
# Utility operations
|
|
1102
1123
|
|
|
@@ -1316,9 +1337,9 @@ class DatabaseManager:
|
|
|
1316
1337
|
Returns:
|
|
1317
1338
|
Number of projects removed
|
|
1318
1339
|
"""
|
|
1319
|
-
async
|
|
1340
|
+
async def operation(conn: aiosqlite.Connection) -> int:
|
|
1320
1341
|
# Find projects with no descriptions and no overview
|
|
1321
|
-
cursor = await
|
|
1342
|
+
cursor = await conn.execute(
|
|
1322
1343
|
"""
|
|
1323
1344
|
SELECT p.id, p.name
|
|
1324
1345
|
FROM projects p
|
|
@@ -1339,14 +1360,17 @@ class DatabaseManager:
|
|
|
1339
1360
|
project_name = project["name"]
|
|
1340
1361
|
|
|
1341
1362
|
# Remove from projects table (cascading will handle related data)
|
|
1342
|
-
await
|
|
1363
|
+
await conn.execute("DELETE FROM projects WHERE id = ?", (project_id,))
|
|
1343
1364
|
removed_count += 1
|
|
1344
1365
|
|
|
1345
1366
|
logger.info(f"Removed empty project: {project_name} (ID: {project_id})")
|
|
1346
1367
|
|
|
1347
|
-
await db.commit()
|
|
1348
1368
|
return removed_count
|
|
1349
1369
|
|
|
1370
|
+
return await self.execute_transaction_with_retry(
|
|
1371
|
+
operation, "cleanup_empty_projects"
|
|
1372
|
+
)
|
|
1373
|
+
|
|
1350
1374
|
async def get_project_map_data(
|
|
1351
1375
|
self, project_identifier: str
|
|
1352
1376
|
) -> Optional[Dict[str, Any]]:
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/exceptions.py
RENAMED
|
@@ -191,6 +191,7 @@ def classify_sqlite_error(error: Exception, operation_name: str = "") -> Databas
|
|
|
191
191
|
"database is locked",
|
|
192
192
|
"sqlite_locked",
|
|
193
193
|
"attempt to write a readonly database",
|
|
194
|
+
"timeout waiting for database lock",
|
|
194
195
|
]
|
|
195
196
|
):
|
|
196
197
|
lock_type = (
|
|
@@ -297,6 +298,7 @@ def is_retryable_error(error: Exception) -> bool:
|
|
|
297
298
|
"sqlite_busy",
|
|
298
299
|
"sqlite_locked",
|
|
299
300
|
"cannot start a transaction within a transaction",
|
|
301
|
+
"timeout waiting for database lock",
|
|
300
302
|
]
|
|
301
303
|
|
|
302
304
|
return any(pattern in error_message for pattern in retryable_patterns)
|
|
@@ -499,3 +499,13 @@ class FileScanner:
|
|
|
499
499
|
"""
|
|
500
500
|
loop = asyncio.get_running_loop()
|
|
501
501
|
return await loop.run_in_executor(None, self.find_missing_files, existing_paths)
|
|
502
|
+
|
|
503
|
+
async def is_valid_project_directory_async(self) -> bool:
|
|
504
|
+
"""
|
|
505
|
+
Async version of is_valid_project_directory running in a thread.
|
|
506
|
+
|
|
507
|
+
Returns:
|
|
508
|
+
True if the directory exists and is accessible
|
|
509
|
+
"""
|
|
510
|
+
loop = asyncio.get_running_loop()
|
|
511
|
+
return await loop.run_in_executor(None, self.is_valid_project_directory)
|
|
@@ -34,16 +34,21 @@ def setup_logging(
|
|
|
34
34
|
Returns:
|
|
35
35
|
Configured root logger
|
|
36
36
|
"""
|
|
37
|
-
# Get root logger
|
|
37
|
+
# Get root logger - set to DEBUG so all logs reach handlers.
|
|
38
|
+
# Each handler filters to its own level.
|
|
38
39
|
root_logger = logging.getLogger()
|
|
39
|
-
root_logger.setLevel(
|
|
40
|
+
root_logger.setLevel(logging.DEBUG)
|
|
40
41
|
|
|
41
42
|
# Clear existing handlers
|
|
42
43
|
root_logger.handlers.clear()
|
|
43
44
|
|
|
44
45
|
# Console handler (stderr to avoid interfering with MCP stdout)
|
|
45
46
|
console_handler = logging.StreamHandler(sys.stderr)
|
|
46
|
-
|
|
47
|
+
# Force console logging to at least WARNING to prevent stderr buffer blocking
|
|
48
|
+
# when MCP clients don't consume stderr fast enough. File logging captures
|
|
49
|
+
# everything (DEBUG+) for detailed diagnostics.
|
|
50
|
+
requested_level = getattr(logging, log_level.upper())
|
|
51
|
+
console_handler.setLevel(max(requested_level, logging.WARNING))
|
|
47
52
|
|
|
48
53
|
# Use structured formatter for all handlers
|
|
49
54
|
structured_formatter = StructuredFormatter()
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/server/mcp_server.py
RENAMED
|
@@ -240,10 +240,10 @@ class MCPCodeIndexServer:
|
|
|
240
240
|
try:
|
|
241
241
|
result = json.loads(repaired)
|
|
242
242
|
if isinstance(result, dict):
|
|
243
|
-
logger.
|
|
243
|
+
logger.debug(
|
|
244
244
|
f"Successfully repaired JSON. Original: {json_str[:100]}..."
|
|
245
245
|
)
|
|
246
|
-
logger.
|
|
246
|
+
logger.debug(f"Repaired: {repaired[:100]}...")
|
|
247
247
|
return result
|
|
248
248
|
else:
|
|
249
249
|
raise ValueError(
|
|
@@ -793,8 +793,8 @@ class MCPCodeIndexServer:
|
|
|
793
793
|
|
|
794
794
|
start_time = time.time()
|
|
795
795
|
|
|
796
|
-
logger.
|
|
797
|
-
logger.
|
|
796
|
+
logger.debug(f"=== MCP Tool Call: {name} ===")
|
|
797
|
+
logger.debug(f"Arguments: {', '.join(arguments.keys())}")
|
|
798
798
|
|
|
799
799
|
# Map tool names to handler methods
|
|
800
800
|
tool_handlers = {
|
|
@@ -828,7 +828,7 @@ class MCPCodeIndexServer:
|
|
|
828
828
|
result = await wrapped_handler(arguments)
|
|
829
829
|
|
|
830
830
|
elapsed_time = time.time() - start_time
|
|
831
|
-
logger.
|
|
831
|
+
logger.debug(
|
|
832
832
|
f"MCP Tool '{name}' completed successfully in {elapsed_time:.2f}s"
|
|
833
833
|
)
|
|
834
834
|
|
|
@@ -887,7 +887,7 @@ class MCPCodeIndexServer:
|
|
|
887
887
|
if datetime.utcnow() - project.last_accessed > timedelta(minutes=5):
|
|
888
888
|
await db_manager.update_project_access_time(project.id)
|
|
889
889
|
|
|
890
|
-
logger.
|
|
890
|
+
logger.debug(
|
|
891
891
|
f"Using existing local project: {project.name} (ID: {project.id})"
|
|
892
892
|
)
|
|
893
893
|
return project.id
|
|
@@ -904,7 +904,7 @@ class MCPCodeIndexServer:
|
|
|
904
904
|
last_accessed=datetime.utcnow(),
|
|
905
905
|
)
|
|
906
906
|
await db_manager.create_project(project)
|
|
907
|
-
logger.
|
|
907
|
+
logger.debug(
|
|
908
908
|
f"Created new local project: {project_name} (ID: {project_id})"
|
|
909
909
|
)
|
|
910
910
|
return project_id
|
|
@@ -932,7 +932,7 @@ class MCPCodeIndexServer:
|
|
|
932
932
|
last_accessed=datetime.utcnow(),
|
|
933
933
|
)
|
|
934
934
|
await db_manager.create_project(project)
|
|
935
|
-
logger.
|
|
935
|
+
logger.debug(
|
|
936
936
|
f"Created new global project: {normalized_name} (ID: {project_id})"
|
|
937
937
|
)
|
|
938
938
|
|
|
@@ -975,7 +975,7 @@ class MCPCodeIndexServer:
|
|
|
975
975
|
if score > best_score:
|
|
976
976
|
best_score = score
|
|
977
977
|
best_match = project
|
|
978
|
-
logger.
|
|
978
|
+
logger.debug(
|
|
979
979
|
f"Match for project {project.name} "
|
|
980
980
|
f"(score: {score}, factors: {match_factors})"
|
|
981
981
|
)
|
|
@@ -983,7 +983,7 @@ class MCPCodeIndexServer:
|
|
|
983
983
|
# If only name matches, check file similarity for potential matches
|
|
984
984
|
elif score == 1 and "name" in match_factors:
|
|
985
985
|
if await self._check_file_similarity(project, folder_path):
|
|
986
|
-
logger.
|
|
986
|
+
logger.debug(
|
|
987
987
|
f"File similarity match for project {project.name} "
|
|
988
988
|
f"(factor: {match_factors[0]})"
|
|
989
989
|
)
|
|
@@ -1002,10 +1002,10 @@ class MCPCodeIndexServer:
|
|
|
1002
1002
|
try:
|
|
1003
1003
|
# Get files currently in the folder
|
|
1004
1004
|
scanner = FileScanner(Path(folder_path))
|
|
1005
|
-
if not scanner.
|
|
1005
|
+
if not await scanner.is_valid_project_directory_async():
|
|
1006
1006
|
return False
|
|
1007
1007
|
|
|
1008
|
-
current_files = scanner.
|
|
1008
|
+
current_files = await scanner.scan_directory_async()
|
|
1009
1009
|
current_basenames = {f.name for f in current_files}
|
|
1010
1010
|
|
|
1011
1011
|
if not current_basenames:
|
|
@@ -1060,7 +1060,7 @@ class MCPCodeIndexServer:
|
|
|
1060
1060
|
project_aliases.append(folder_path)
|
|
1061
1061
|
project.aliases = project_aliases
|
|
1062
1062
|
should_update = True
|
|
1063
|
-
logger.
|
|
1063
|
+
logger.debug(
|
|
1064
1064
|
f"Added new folder alias to project {project.name}: {folder_path}"
|
|
1065
1065
|
)
|
|
1066
1066
|
|
|
@@ -1098,17 +1098,17 @@ class MCPCodeIndexServer:
|
|
|
1098
1098
|
self, arguments: Dict[str, Any]
|
|
1099
1099
|
) -> Dict[str, Any]:
|
|
1100
1100
|
"""Handle update_file_description tool calls."""
|
|
1101
|
-
logger.
|
|
1102
|
-
logger.
|
|
1101
|
+
logger.debug(f"Updating file description for: {arguments['filePath']}")
|
|
1102
|
+
logger.debug(f"Project: {arguments.get('projectName', 'Unknown')}")
|
|
1103
1103
|
|
|
1104
1104
|
description_length = len(arguments.get("description", ""))
|
|
1105
|
-
logger.
|
|
1105
|
+
logger.debug(f"Description length: {description_length} characters")
|
|
1106
1106
|
|
|
1107
1107
|
folder_path = arguments["folderPath"]
|
|
1108
1108
|
db_manager = await self.db_factory.get_database_manager(folder_path)
|
|
1109
1109
|
project_id = await self._get_or_create_project_id(arguments)
|
|
1110
1110
|
|
|
1111
|
-
logger.
|
|
1111
|
+
logger.debug(f"Resolved project_id: {project_id}")
|
|
1112
1112
|
|
|
1113
1113
|
file_desc = FileDescription(
|
|
1114
1114
|
id=None, # Will be set by database
|
|
@@ -1124,7 +1124,7 @@ class MCPCodeIndexServer:
|
|
|
1124
1124
|
|
|
1125
1125
|
await db_manager.create_file_description(file_desc)
|
|
1126
1126
|
|
|
1127
|
-
logger.
|
|
1127
|
+
logger.debug(f"Successfully updated description for: {arguments['filePath']}")
|
|
1128
1128
|
|
|
1129
1129
|
return {
|
|
1130
1130
|
"success": True,
|
|
@@ -1137,17 +1137,17 @@ class MCPCodeIndexServer:
|
|
|
1137
1137
|
self, arguments: Dict[str, Any]
|
|
1138
1138
|
) -> Dict[str, Any]:
|
|
1139
1139
|
"""Handle check_codebase_size tool calls."""
|
|
1140
|
-
logger.
|
|
1140
|
+
logger.debug(
|
|
1141
1141
|
f"Checking codebase size for: {arguments.get('projectName', 'Unknown')}"
|
|
1142
1142
|
)
|
|
1143
|
-
logger.
|
|
1143
|
+
logger.debug(f"Folder path: {arguments.get('folderPath', 'Unknown')}")
|
|
1144
1144
|
|
|
1145
1145
|
folder_path = arguments["folderPath"]
|
|
1146
1146
|
db_manager = await self.db_factory.get_database_manager(folder_path)
|
|
1147
1147
|
project_id = await self._get_or_create_project_id(arguments)
|
|
1148
1148
|
folder_path_obj = Path(folder_path)
|
|
1149
1149
|
|
|
1150
|
-
logger.
|
|
1150
|
+
logger.debug(f"Resolved project_id: {project_id}")
|
|
1151
1151
|
|
|
1152
1152
|
# Run cleanup if needed (respects 30-minute cooldown)
|
|
1153
1153
|
cleaned_up_count = await self._run_cleanup_if_needed(
|
|
@@ -1155,26 +1155,31 @@ class MCPCodeIndexServer:
|
|
|
1155
1155
|
)
|
|
1156
1156
|
|
|
1157
1157
|
# Get file descriptions for this project (after cleanup)
|
|
1158
|
-
logger.
|
|
1158
|
+
logger.debug("Retrieving file descriptions...")
|
|
1159
1159
|
file_descriptions = await db_manager.get_all_file_descriptions(
|
|
1160
1160
|
project_id=project_id
|
|
1161
1161
|
)
|
|
1162
|
-
logger.
|
|
1162
|
+
logger.debug(f"Found {len(file_descriptions)} file descriptions")
|
|
1163
1163
|
|
|
1164
1164
|
# Use provided token limit or fall back to server default
|
|
1165
1165
|
token_limit = arguments.get("tokenLimit", self.token_limit)
|
|
1166
1166
|
|
|
1167
|
-
# Calculate total tokens for descriptions
|
|
1168
|
-
logger.
|
|
1169
|
-
|
|
1167
|
+
# Calculate total tokens for descriptions (offload to executor to avoid blocking)
|
|
1168
|
+
logger.debug("Calculating total token count...")
|
|
1169
|
+
loop = asyncio.get_running_loop()
|
|
1170
|
+
descriptions_tokens = await loop.run_in_executor(
|
|
1171
|
+
None,
|
|
1172
|
+
self.token_counter.calculate_codebase_tokens,
|
|
1170
1173
|
file_descriptions
|
|
1171
1174
|
)
|
|
1172
1175
|
|
|
1173
|
-
# Get overview tokens if available
|
|
1176
|
+
# Get overview tokens if available (offload to executor to avoid blocking)
|
|
1174
1177
|
overview = await db_manager.get_project_overview(project_id)
|
|
1175
1178
|
overview_tokens = 0
|
|
1176
1179
|
if overview and overview.overview:
|
|
1177
|
-
overview_tokens =
|
|
1180
|
+
overview_tokens = await loop.run_in_executor(
|
|
1181
|
+
None, self.token_counter.count_tokens, overview.overview
|
|
1182
|
+
)
|
|
1178
1183
|
|
|
1179
1184
|
total_tokens = descriptions_tokens + overview_tokens
|
|
1180
1185
|
is_large = total_tokens > token_limit
|
|
@@ -1192,16 +1197,16 @@ class MCPCodeIndexServer:
|
|
|
1192
1197
|
else:
|
|
1193
1198
|
recommendation = "use_search"
|
|
1194
1199
|
|
|
1195
|
-
logger.
|
|
1200
|
+
logger.debug(
|
|
1196
1201
|
f"Codebase analysis complete: {total_tokens} tokens total "
|
|
1197
1202
|
f"({descriptions_tokens} descriptions + {overview_tokens} overview), "
|
|
1198
1203
|
f"{len(file_descriptions)} files"
|
|
1199
1204
|
)
|
|
1200
|
-
logger.
|
|
1205
|
+
logger.debug(
|
|
1201
1206
|
f"Size assessment: {'LARGE' if is_large else 'SMALL'} "
|
|
1202
1207
|
f"(limit: {token_limit})"
|
|
1203
1208
|
)
|
|
1204
|
-
logger.
|
|
1209
|
+
logger.debug(f"Recommendation: {recommendation}")
|
|
1205
1210
|
|
|
1206
1211
|
return {
|
|
1207
1212
|
"fileDescriptionTokens": descriptions_tokens,
|
|
@@ -1218,31 +1223,31 @@ class MCPCodeIndexServer:
|
|
|
1218
1223
|
self, arguments: Dict[str, Any]
|
|
1219
1224
|
) -> Dict[str, Any]:
|
|
1220
1225
|
"""Handle find_missing_descriptions tool calls."""
|
|
1221
|
-
logger.
|
|
1226
|
+
logger.debug(
|
|
1222
1227
|
f"Finding missing descriptions for: "
|
|
1223
1228
|
f"{arguments.get('projectName', 'Unknown')}"
|
|
1224
1229
|
)
|
|
1225
|
-
logger.
|
|
1230
|
+
logger.debug(f"Folder path: {arguments.get('folderPath', 'Unknown')}")
|
|
1226
1231
|
|
|
1227
1232
|
folder_path = arguments["folderPath"]
|
|
1228
1233
|
db_manager = await self.db_factory.get_database_manager(folder_path)
|
|
1229
1234
|
project_id = await self._get_or_create_project_id(arguments)
|
|
1230
1235
|
folder_path_obj = Path(folder_path)
|
|
1231
1236
|
|
|
1232
|
-
logger.
|
|
1237
|
+
logger.debug(f"Resolved project_id: {project_id}")
|
|
1233
1238
|
|
|
1234
1239
|
# Get existing file descriptions
|
|
1235
|
-
logger.
|
|
1240
|
+
logger.debug("Retrieving existing file descriptions...")
|
|
1236
1241
|
existing_descriptions = await db_manager.get_all_file_descriptions(
|
|
1237
1242
|
project_id=project_id
|
|
1238
1243
|
)
|
|
1239
1244
|
existing_paths = {desc.file_path for desc in existing_descriptions}
|
|
1240
|
-
logger.
|
|
1245
|
+
logger.debug(f"Found {len(existing_paths)} existing descriptions")
|
|
1241
1246
|
|
|
1242
1247
|
# Scan directory for files
|
|
1243
|
-
logger.
|
|
1248
|
+
logger.debug(f"Scanning project directory: {folder_path_obj}")
|
|
1244
1249
|
scanner = FileScanner(folder_path_obj)
|
|
1245
|
-
if not scanner.
|
|
1250
|
+
if not await scanner.is_valid_project_directory_async():
|
|
1246
1251
|
logger.error(
|
|
1247
1252
|
f"Invalid or inaccessible project directory: {folder_path_obj}"
|
|
1248
1253
|
)
|
|
@@ -1250,27 +1255,28 @@ class MCPCodeIndexServer:
|
|
|
1250
1255
|
"error": f"Invalid or inaccessible project directory: {folder_path_obj}"
|
|
1251
1256
|
}
|
|
1252
1257
|
|
|
1253
|
-
missing_files = scanner.
|
|
1258
|
+
missing_files = await scanner.find_missing_files_async(existing_paths)
|
|
1254
1259
|
missing_paths = [scanner.get_relative_path(f) for f in missing_files]
|
|
1255
1260
|
|
|
1256
|
-
logger.
|
|
1261
|
+
logger.debug(f"Found {len(missing_paths)} files without descriptions")
|
|
1257
1262
|
|
|
1258
1263
|
# Apply randomization if specified
|
|
1259
1264
|
randomize = arguments.get("randomize", False)
|
|
1260
1265
|
if randomize:
|
|
1261
1266
|
random.shuffle(missing_paths)
|
|
1262
|
-
logger.
|
|
1267
|
+
logger.debug("Randomized file order for parallel processing")
|
|
1263
1268
|
|
|
1264
1269
|
# Apply limit if specified
|
|
1265
1270
|
limit = arguments.get("limit")
|
|
1266
1271
|
total_missing = len(missing_paths)
|
|
1267
1272
|
if limit is not None and isinstance(limit, int) and limit > 0:
|
|
1268
1273
|
missing_paths = missing_paths[:limit]
|
|
1269
|
-
logger.
|
|
1274
|
+
logger.debug(f"Applied limit {limit}, returning {len(missing_paths)} files")
|
|
1270
1275
|
|
|
1271
|
-
# Get project stats
|
|
1272
|
-
|
|
1273
|
-
|
|
1276
|
+
# Get project stats (offload to executor to avoid blocking)
|
|
1277
|
+
loop = asyncio.get_running_loop()
|
|
1278
|
+
stats = await loop.run_in_executor(None, scanner.get_project_stats)
|
|
1279
|
+
logger.debug(f"Project stats: {stats.get('total_files', 0)} total files")
|
|
1274
1280
|
|
|
1275
1281
|
return {
|
|
1276
1282
|
"missingFiles": missing_paths,
|
|
@@ -1325,8 +1331,13 @@ class MCPCodeIndexServer:
|
|
|
1325
1331
|
project_id=project_id
|
|
1326
1332
|
)
|
|
1327
1333
|
|
|
1328
|
-
# Calculate total tokens
|
|
1329
|
-
|
|
1334
|
+
# Calculate total tokens (offload to executor to avoid blocking)
|
|
1335
|
+
loop = asyncio.get_running_loop()
|
|
1336
|
+
total_tokens = await loop.run_in_executor(
|
|
1337
|
+
None,
|
|
1338
|
+
self.token_counter.calculate_codebase_tokens,
|
|
1339
|
+
file_descriptions
|
|
1340
|
+
)
|
|
1330
1341
|
is_large = self.token_counter.is_large_codebase(total_tokens)
|
|
1331
1342
|
|
|
1332
1343
|
# Always build and return the folder structure - if the AI called this
|
|
@@ -1422,7 +1433,12 @@ class MCPCodeIndexServer:
|
|
|
1422
1433
|
)
|
|
1423
1434
|
|
|
1424
1435
|
total_files = len(file_descriptions)
|
|
1425
|
-
|
|
1436
|
+
loop = asyncio.get_running_loop()
|
|
1437
|
+
total_tokens = await loop.run_in_executor(
|
|
1438
|
+
None,
|
|
1439
|
+
self.token_counter.calculate_codebase_tokens,
|
|
1440
|
+
file_descriptions
|
|
1441
|
+
)
|
|
1426
1442
|
|
|
1427
1443
|
# Create overview record
|
|
1428
1444
|
overview = ProjectOverview(
|
|
@@ -1630,7 +1646,7 @@ class MCPCodeIndexServer:
|
|
|
1630
1646
|
project_name = arguments["projectName"]
|
|
1631
1647
|
folder_path = arguments["folderPath"]
|
|
1632
1648
|
|
|
1633
|
-
logger.
|
|
1649
|
+
logger.debug(
|
|
1634
1650
|
"Processing find_similar_code request",
|
|
1635
1651
|
extra={
|
|
1636
1652
|
"structured_data": {
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/claude_api_handler.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/commands/__init__.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/commands/makelocal.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/data/stop_words_english.txt
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/database/path_resolver.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/git_hook_handler.py
RENAMED
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/logging.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/middleware/security.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/migrations/001_initial.sql
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/query_preprocessor.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/transport/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/transport/http_transport.py
RENAMED
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/config.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/const.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/daemon.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/types.py
RENAMED
|
File without changes
|
{mcp_code_indexer-4.2.18 → mcp_code_indexer-4.2.20}/src/mcp_code_indexer/vector_mode/utils.py
RENAMED
|
File without changes
|