mcp-code-indexer 4.2.18__py3-none-any.whl → 4.2.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -51,10 +51,8 @@ class CleanupManager:
51
51
  """
52
52
  cleanup_timestamp = int(time.time())
53
53
 
54
- async with self.db_manager.get_write_connection_with_retry(
55
- "mark_file_for_cleanup"
56
- ) as db:
57
- cursor = await db.execute(
54
+ async def operation(conn: Any) -> bool:
55
+ cursor = await conn.execute(
58
56
  """
59
57
  UPDATE file_descriptions
60
58
  SET to_be_cleaned = ?
@@ -62,11 +60,13 @@ class CleanupManager:
62
60
  """,
63
61
  (cleanup_timestamp, project_id, file_path),
64
62
  )
65
- await db.commit()
66
-
67
63
  # Check if any rows were affected
68
64
  return cursor.rowcount > 0
69
65
 
66
+ return await self.db_manager.execute_transaction_with_retry(
67
+ operation, "mark_file_for_cleanup"
68
+ )
69
+
70
70
  async def mark_files_for_cleanup(
71
71
  self, project_id: str, file_paths: List[str]
72
72
  ) -> int:
@@ -117,10 +117,8 @@ class CleanupManager:
117
117
  Returns:
118
118
  True if file was restored, False if file not found
119
119
  """
120
- async with self.db_manager.get_write_connection_with_retry(
121
- "restore_file_from_cleanup"
122
- ) as db:
123
- cursor = await db.execute(
120
+ async def operation(conn: Any) -> bool:
121
+ cursor = await conn.execute(
124
122
  """
125
123
  UPDATE file_descriptions
126
124
  SET to_be_cleaned = NULL
@@ -128,10 +126,12 @@ class CleanupManager:
128
126
  """,
129
127
  (project_id, file_path),
130
128
  )
131
- await db.commit()
132
-
133
129
  return cursor.rowcount > 0
134
130
 
131
+ return await self.db_manager.execute_transaction_with_retry(
132
+ operation, "restore_file_from_cleanup"
133
+ )
134
+
135
135
  async def get_files_to_be_cleaned(self, project_id: str) -> List[dict]:
136
136
  """
137
137
  Get list of files marked for cleanup in a project.
@@ -396,7 +396,7 @@ class DatabaseManager:
396
396
  async def get_immediate_transaction(
397
397
  self,
398
398
  operation_name: str = "immediate_transaction",
399
- timeout_seconds: float = 10.0,
399
+ timeout_seconds: Optional[float] = None,
400
400
  ) -> AsyncIterator[aiosqlite.Connection]:
401
401
  """
402
402
  Get a database connection with BEGIN IMMEDIATE transaction and
@@ -407,8 +407,10 @@ class DatabaseManager:
407
407
 
408
408
  Args:
409
409
  operation_name: Name of the operation for monitoring
410
- timeout_seconds: Transaction timeout in seconds
410
+ timeout_seconds: Transaction timeout in seconds (defaults to
411
+ self.timeout if None)
411
412
  """
413
+ actual_timeout = timeout_seconds if timeout_seconds is not None else self.timeout
412
414
  import time
413
415
  acquire_start = time.monotonic()
414
416
  async with self.get_write_connection_with_retry(operation_name) as conn:
@@ -420,7 +422,7 @@ class DatabaseManager:
420
422
  # Start immediate transaction with timeout
421
423
  begin_start = time.monotonic()
422
424
  await asyncio.wait_for(
423
- conn.execute("BEGIN IMMEDIATE"), timeout=timeout_seconds
425
+ conn.execute("BEGIN IMMEDIATE"), timeout=actual_timeout
424
426
  )
425
427
  begin_time = time.monotonic() - begin_start
426
428
  logger.debug(
@@ -436,23 +438,35 @@ class DatabaseManager:
436
438
  except asyncio.TimeoutError:
437
439
  logger.warning(
438
440
  (
439
- f"Transaction timeout after {timeout_seconds}s for "
441
+ f"Transaction timeout after {actual_timeout}s for "
440
442
  f"{operation_name}"
441
443
  ),
442
444
  extra={
443
445
  "structured_data": {
444
446
  "transaction_timeout": {
445
447
  "operation": operation_name,
446
- "timeout_seconds": timeout_seconds,
448
+ "timeout_seconds": actual_timeout,
447
449
  }
448
450
  }
449
451
  },
450
452
  )
451
- await conn.rollback()
453
+ # Shield rollback from cancellation to prevent leaked transactions
454
+ await asyncio.shield(conn.rollback())
452
455
  raise
453
- except Exception as e:
454
- logger.error(f"Transaction failed for {operation_name}: {e}")
455
- await conn.rollback()
456
+ except BaseException as e:
457
+ # Catch BaseException to handle asyncio.CancelledError and ensure
458
+ # proper rollback on task cancellation. Shield the rollback to
459
+ # prevent cancellation from interrupting cleanup.
460
+ if isinstance(e, asyncio.CancelledError):
461
+ logger.warning(f"Transaction cancelled for {operation_name}")
462
+ else:
463
+ logger.error(f"Transaction failed for {operation_name}: {e}")
464
+ try:
465
+ await asyncio.shield(conn.rollback())
466
+ except Exception as rollback_error:
467
+ logger.error(
468
+ f"Rollback failed for {operation_name}: {rollback_error}"
469
+ )
456
470
  raise
457
471
 
458
472
  async def execute_transaction_with_retry(
@@ -460,7 +474,7 @@ class DatabaseManager:
460
474
  operation_func: Callable[[aiosqlite.Connection], Any],
461
475
  operation_name: str = "transaction_operation",
462
476
  max_retries: int = 3,
463
- timeout_seconds: float = 10.0,
477
+ timeout_seconds: Optional[float] = None,
464
478
  ) -> Any:
465
479
  """
466
480
  Execute a database operation within a transaction with automatic
@@ -475,7 +489,8 @@ class DatabaseManager:
475
489
  operation_name: Name of the operation for logging
476
490
  max_retries: Maximum retry attempts (overrides default retry
477
491
  executor config)
478
- timeout_seconds: Transaction timeout in seconds
492
+ timeout_seconds: Transaction timeout in seconds (defaults to
493
+ self.timeout if None)
479
494
 
480
495
  Returns:
481
496
  Result from operation_func
@@ -489,6 +504,7 @@ class DatabaseManager:
489
504
  my_operation, "insert_data"
490
505
  )
491
506
  """
507
+ actual_timeout = timeout_seconds if timeout_seconds is not None else self.timeout
492
508
 
493
509
  async def execute_transaction() -> Any:
494
510
  """Inner function to execute transaction - retried by executor."""
@@ -496,11 +512,11 @@ class DatabaseManager:
496
512
  start_time = time.monotonic()
497
513
  logger.debug(
498
514
  f"[{operation_name}] Starting transaction "
499
- f"(timeout={timeout_seconds}s, pool_size={len(self._connection_pool)})"
515
+ f"(timeout={actual_timeout}s, pool_size={len(self._connection_pool)})"
500
516
  )
501
517
  try:
502
518
  async with self.get_immediate_transaction(
503
- operation_name, timeout_seconds
519
+ operation_name, actual_timeout
504
520
  ) as conn:
505
521
  lock_acquired_time = time.monotonic()
506
522
  logger.debug(
@@ -523,7 +539,7 @@ class DatabaseManager:
523
539
  if self._metrics_collector:
524
540
  self._metrics_collector.record_operation(
525
541
  operation_name,
526
- timeout_seconds * 1000, # Convert to ms
542
+ actual_timeout * 1000, # Convert to ms
527
543
  True,
528
544
  len(self._connection_pool),
529
545
  )
@@ -555,7 +571,7 @@ class DatabaseManager:
555
571
  if self._metrics_collector:
556
572
  self._metrics_collector.record_operation(
557
573
  operation_name,
558
- timeout_seconds * 1000,
574
+ actual_timeout * 1000,
559
575
  False,
560
576
  len(self._connection_pool),
561
577
  )
@@ -565,7 +581,7 @@ class DatabaseManager:
565
581
  elapsed = time.monotonic() - start_time
566
582
  logger.warning(
567
583
  f"[{operation_name}] Timeout after {elapsed*1000:.1f}ms "
568
- f"waiting for database lock (timeout={timeout_seconds}s)"
584
+ f"waiting for database lock (timeout={actual_timeout}s)"
569
585
  )
570
586
  if self._metrics_collector:
571
587
  self._metrics_collector.record_locking_event(
@@ -605,7 +621,7 @@ class DatabaseManager:
605
621
  if self._metrics_collector:
606
622
  self._metrics_collector.record_operation(
607
623
  operation_name,
608
- timeout_seconds * 1000,
624
+ actual_timeout * 1000,
609
625
  False,
610
626
  len(self._connection_pool),
611
627
  )
@@ -624,7 +640,7 @@ class DatabaseManager:
624
640
  if self._metrics_collector:
625
641
  self._metrics_collector.record_operation(
626
642
  operation_name,
627
- timeout_seconds * 1000,
643
+ actual_timeout * 1000,
628
644
  False,
629
645
  len(self._connection_pool),
630
646
  )
@@ -634,8 +650,8 @@ class DatabaseManager:
634
650
 
635
651
  async def create_project(self, project: Project) -> None:
636
652
  """Create a new project record."""
637
- async with self.get_write_connection_with_retry("create_project") as db:
638
- await db.execute(
653
+ async def operation(conn: aiosqlite.Connection) -> None:
654
+ await conn.execute(
639
655
  """
640
656
  INSERT INTO projects (id, name, aliases, created, last_accessed)
641
657
  VALUES (?, ?, ?, ?, ?)
@@ -648,8 +664,9 @@ class DatabaseManager:
648
664
  project.last_accessed,
649
665
  ),
650
666
  )
651
- await db.commit()
652
- logger.debug(f"Created project: {project.id}")
667
+
668
+ await self.execute_transaction_with_retry(operation, "create_project")
669
+ logger.debug(f"Created project: {project.id}")
653
670
 
654
671
  async def get_project(self, project_id: str) -> Optional[Project]:
655
672
  """Get project by ID."""
@@ -764,19 +781,20 @@ class DatabaseManager:
764
781
 
765
782
  async def update_project_access_time(self, project_id: str) -> None:
766
783
  """Update the last accessed time for a project."""
767
- async with self.get_write_connection_with_retry(
768
- "update_project_access_time"
769
- ) as db:
770
- await db.execute(
784
+ async def operation(conn: aiosqlite.Connection) -> None:
785
+ await conn.execute(
771
786
  "UPDATE projects SET last_accessed = ? WHERE id = ?",
772
787
  (datetime.utcnow(), project_id),
773
788
  )
774
- await db.commit()
789
+
790
+ await self.execute_transaction_with_retry(
791
+ operation, "update_project_access_time"
792
+ )
775
793
 
776
794
  async def update_project(self, project: Project) -> None:
777
795
  """Update an existing project record."""
778
- async with self.get_write_connection_with_retry("update_project") as db:
779
- await db.execute(
796
+ async def operation(conn: aiosqlite.Connection) -> None:
797
+ await conn.execute(
780
798
  """
781
799
  UPDATE projects
782
800
  SET name = ?, aliases = ?, last_accessed = ?
@@ -789,27 +807,28 @@ class DatabaseManager:
789
807
  project.id,
790
808
  ),
791
809
  )
792
- await db.commit()
793
- logger.debug(f"Updated project: {project.id}")
810
+
811
+ await self.execute_transaction_with_retry(operation, "update_project")
812
+ logger.debug(f"Updated project: {project.id}")
794
813
 
795
814
  async def set_project_vector_mode(self, project_id: str, enabled: bool) -> None:
796
815
  """Set the vector_mode for a specific project."""
797
- async with self.get_write_connection_with_retry(
798
- "set_project_vector_mode"
799
- ) as db:
800
- await db.execute(
816
+ async def operation(conn: aiosqlite.Connection) -> None:
817
+ await conn.execute(
801
818
  "UPDATE projects SET vector_mode = ? WHERE id = ?",
802
819
  (int(enabled), project_id),
803
820
  )
804
821
 
805
822
  # Check if the project was actually updated
806
- cursor = await db.execute("SELECT changes()")
823
+ cursor = await conn.execute("SELECT changes()")
807
824
  changes = await cursor.fetchone()
808
825
  if changes[0] == 0:
809
826
  raise DatabaseError(f"Project not found: {project_id}")
810
827
 
811
- await db.commit()
812
- logger.debug(f"Set vector_mode={enabled} for project: {project_id}")
828
+ await self.execute_transaction_with_retry(
829
+ operation, "set_project_vector_mode"
830
+ )
831
+ logger.debug(f"Set vector_mode={enabled} for project: {project_id}")
813
832
 
814
833
  async def get_all_projects(self) -> List[Project]:
815
834
  """Get all projects in the database."""
@@ -1080,23 +1099,25 @@ class DatabaseManager:
1080
1099
  """Cache token count with TTL."""
1081
1100
  expires = datetime.utcnow() + timedelta(hours=ttl_hours)
1082
1101
 
1083
- async with self.get_write_connection() as db:
1084
- await db.execute(
1102
+ async def operation(conn: aiosqlite.Connection) -> None:
1103
+ await conn.execute(
1085
1104
  """
1086
1105
  INSERT OR REPLACE INTO token_cache (cache_key, token_count, expires)
1087
1106
  VALUES (?, ?, ?)
1088
1107
  """,
1089
1108
  (cache_key, token_count, expires),
1090
1109
  )
1091
- await db.commit()
1110
+
1111
+ await self.execute_transaction_with_retry(operation, "cache_token_count")
1092
1112
 
1093
1113
  async def cleanup_expired_cache(self) -> None:
1094
1114
  """Remove expired cache entries."""
1095
- async with self.get_write_connection() as db:
1096
- await db.execute(
1115
+ async def operation(conn: aiosqlite.Connection) -> None:
1116
+ await conn.execute(
1097
1117
  "DELETE FROM token_cache WHERE expires < ?", (datetime.utcnow(),)
1098
1118
  )
1099
- await db.commit()
1119
+
1120
+ await self.execute_transaction_with_retry(operation, "cleanup_expired_cache")
1100
1121
 
1101
1122
  # Utility operations
1102
1123
 
@@ -1316,9 +1337,9 @@ class DatabaseManager:
1316
1337
  Returns:
1317
1338
  Number of projects removed
1318
1339
  """
1319
- async with self.get_write_connection() as db:
1340
+ async def operation(conn: aiosqlite.Connection) -> int:
1320
1341
  # Find projects with no descriptions and no overview
1321
- cursor = await db.execute(
1342
+ cursor = await conn.execute(
1322
1343
  """
1323
1344
  SELECT p.id, p.name
1324
1345
  FROM projects p
@@ -1339,14 +1360,17 @@ class DatabaseManager:
1339
1360
  project_name = project["name"]
1340
1361
 
1341
1362
  # Remove from projects table (cascading will handle related data)
1342
- await db.execute("DELETE FROM projects WHERE id = ?", (project_id,))
1363
+ await conn.execute("DELETE FROM projects WHERE id = ?", (project_id,))
1343
1364
  removed_count += 1
1344
1365
 
1345
1366
  logger.info(f"Removed empty project: {project_name} (ID: {project_id})")
1346
1367
 
1347
- await db.commit()
1348
1368
  return removed_count
1349
1369
 
1370
+ return await self.execute_transaction_with_retry(
1371
+ operation, "cleanup_empty_projects"
1372
+ )
1373
+
1350
1374
  async def get_project_map_data(
1351
1375
  self, project_identifier: str
1352
1376
  ) -> Optional[Dict[str, Any]]:
@@ -191,6 +191,7 @@ def classify_sqlite_error(error: Exception, operation_name: str = "") -> Databas
191
191
  "database is locked",
192
192
  "sqlite_locked",
193
193
  "attempt to write a readonly database",
194
+ "timeout waiting for database lock",
194
195
  ]
195
196
  ):
196
197
  lock_type = (
@@ -297,6 +298,7 @@ def is_retryable_error(error: Exception) -> bool:
297
298
  "sqlite_busy",
298
299
  "sqlite_locked",
299
300
  "cannot start a transaction within a transaction",
301
+ "timeout waiting for database lock",
300
302
  ]
301
303
 
302
304
  return any(pattern in error_message for pattern in retryable_patterns)
@@ -354,6 +354,7 @@ class RetryExecutor:
354
354
  "cannot start a transaction within a transaction",
355
355
  "sqlite_busy",
356
356
  "sqlite_locked",
357
+ "timeout waiting for database lock",
357
358
  ]
358
359
 
359
360
  return any(msg in error_message for msg in retryable_messages)
@@ -499,3 +499,13 @@ class FileScanner:
499
499
  """
500
500
  loop = asyncio.get_running_loop()
501
501
  return await loop.run_in_executor(None, self.find_missing_files, existing_paths)
502
+
503
+ async def is_valid_project_directory_async(self) -> bool:
504
+ """
505
+ Async version of is_valid_project_directory running in a thread.
506
+
507
+ Returns:
508
+ True if the directory exists and is accessible
509
+ """
510
+ loop = asyncio.get_running_loop()
511
+ return await loop.run_in_executor(None, self.is_valid_project_directory)
@@ -34,16 +34,21 @@ def setup_logging(
34
34
  Returns:
35
35
  Configured root logger
36
36
  """
37
- # Get root logger
37
+ # Get root logger - set to DEBUG so all logs reach handlers.
38
+ # Each handler filters to its own level.
38
39
  root_logger = logging.getLogger()
39
- root_logger.setLevel(getattr(logging, log_level.upper()))
40
+ root_logger.setLevel(logging.DEBUG)
40
41
 
41
42
  # Clear existing handlers
42
43
  root_logger.handlers.clear()
43
44
 
44
45
  # Console handler (stderr to avoid interfering with MCP stdout)
45
46
  console_handler = logging.StreamHandler(sys.stderr)
46
- console_handler.setLevel(getattr(logging, log_level.upper()))
47
+ # Force console logging to at least WARNING to prevent stderr buffer blocking
48
+ # when MCP clients don't consume stderr fast enough. File logging captures
49
+ # everything (DEBUG+) for detailed diagnostics.
50
+ requested_level = getattr(logging, log_level.upper())
51
+ console_handler.setLevel(max(requested_level, logging.WARNING))
47
52
 
48
53
  # Use structured formatter for all handlers
49
54
  structured_formatter = StructuredFormatter()
@@ -240,10 +240,10 @@ class MCPCodeIndexServer:
240
240
  try:
241
241
  result = json.loads(repaired)
242
242
  if isinstance(result, dict):
243
- logger.info(
243
+ logger.debug(
244
244
  f"Successfully repaired JSON. Original: {json_str[:100]}..."
245
245
  )
246
- logger.info(f"Repaired: {repaired[:100]}...")
246
+ logger.debug(f"Repaired: {repaired[:100]}...")
247
247
  return result
248
248
  else:
249
249
  raise ValueError(
@@ -793,8 +793,8 @@ class MCPCodeIndexServer:
793
793
 
794
794
  start_time = time.time()
795
795
 
796
- logger.info(f"=== MCP Tool Call: {name} ===")
797
- logger.info(f"Arguments: {', '.join(arguments.keys())}")
796
+ logger.debug(f"=== MCP Tool Call: {name} ===")
797
+ logger.debug(f"Arguments: {', '.join(arguments.keys())}")
798
798
 
799
799
  # Map tool names to handler methods
800
800
  tool_handlers = {
@@ -828,7 +828,7 @@ class MCPCodeIndexServer:
828
828
  result = await wrapped_handler(arguments)
829
829
 
830
830
  elapsed_time = time.time() - start_time
831
- logger.info(
831
+ logger.debug(
832
832
  f"MCP Tool '{name}' completed successfully in {elapsed_time:.2f}s"
833
833
  )
834
834
 
@@ -887,7 +887,7 @@ class MCPCodeIndexServer:
887
887
  if datetime.utcnow() - project.last_accessed > timedelta(minutes=5):
888
888
  await db_manager.update_project_access_time(project.id)
889
889
 
890
- logger.info(
890
+ logger.debug(
891
891
  f"Using existing local project: {project.name} (ID: {project.id})"
892
892
  )
893
893
  return project.id
@@ -904,7 +904,7 @@ class MCPCodeIndexServer:
904
904
  last_accessed=datetime.utcnow(),
905
905
  )
906
906
  await db_manager.create_project(project)
907
- logger.info(
907
+ logger.debug(
908
908
  f"Created new local project: {project_name} (ID: {project_id})"
909
909
  )
910
910
  return project_id
@@ -932,7 +932,7 @@ class MCPCodeIndexServer:
932
932
  last_accessed=datetime.utcnow(),
933
933
  )
934
934
  await db_manager.create_project(project)
935
- logger.info(
935
+ logger.debug(
936
936
  f"Created new global project: {normalized_name} (ID: {project_id})"
937
937
  )
938
938
 
@@ -975,7 +975,7 @@ class MCPCodeIndexServer:
975
975
  if score > best_score:
976
976
  best_score = score
977
977
  best_match = project
978
- logger.info(
978
+ logger.debug(
979
979
  f"Match for project {project.name} "
980
980
  f"(score: {score}, factors: {match_factors})"
981
981
  )
@@ -983,7 +983,7 @@ class MCPCodeIndexServer:
983
983
  # If only name matches, check file similarity for potential matches
984
984
  elif score == 1 and "name" in match_factors:
985
985
  if await self._check_file_similarity(project, folder_path):
986
- logger.info(
986
+ logger.debug(
987
987
  f"File similarity match for project {project.name} "
988
988
  f"(factor: {match_factors[0]})"
989
989
  )
@@ -1002,10 +1002,10 @@ class MCPCodeIndexServer:
1002
1002
  try:
1003
1003
  # Get files currently in the folder
1004
1004
  scanner = FileScanner(Path(folder_path))
1005
- if not scanner.is_valid_project_directory():
1005
+ if not await scanner.is_valid_project_directory_async():
1006
1006
  return False
1007
1007
 
1008
- current_files = scanner.scan_directory()
1008
+ current_files = await scanner.scan_directory_async()
1009
1009
  current_basenames = {f.name for f in current_files}
1010
1010
 
1011
1011
  if not current_basenames:
@@ -1060,7 +1060,7 @@ class MCPCodeIndexServer:
1060
1060
  project_aliases.append(folder_path)
1061
1061
  project.aliases = project_aliases
1062
1062
  should_update = True
1063
- logger.info(
1063
+ logger.debug(
1064
1064
  f"Added new folder alias to project {project.name}: {folder_path}"
1065
1065
  )
1066
1066
 
@@ -1098,17 +1098,17 @@ class MCPCodeIndexServer:
1098
1098
  self, arguments: Dict[str, Any]
1099
1099
  ) -> Dict[str, Any]:
1100
1100
  """Handle update_file_description tool calls."""
1101
- logger.info(f"Updating file description for: {arguments['filePath']}")
1102
- logger.info(f"Project: {arguments.get('projectName', 'Unknown')}")
1101
+ logger.debug(f"Updating file description for: {arguments['filePath']}")
1102
+ logger.debug(f"Project: {arguments.get('projectName', 'Unknown')}")
1103
1103
 
1104
1104
  description_length = len(arguments.get("description", ""))
1105
- logger.info(f"Description length: {description_length} characters")
1105
+ logger.debug(f"Description length: {description_length} characters")
1106
1106
 
1107
1107
  folder_path = arguments["folderPath"]
1108
1108
  db_manager = await self.db_factory.get_database_manager(folder_path)
1109
1109
  project_id = await self._get_or_create_project_id(arguments)
1110
1110
 
1111
- logger.info(f"Resolved project_id: {project_id}")
1111
+ logger.debug(f"Resolved project_id: {project_id}")
1112
1112
 
1113
1113
  file_desc = FileDescription(
1114
1114
  id=None, # Will be set by database
@@ -1124,7 +1124,7 @@ class MCPCodeIndexServer:
1124
1124
 
1125
1125
  await db_manager.create_file_description(file_desc)
1126
1126
 
1127
- logger.info(f"Successfully updated description for: {arguments['filePath']}")
1127
+ logger.debug(f"Successfully updated description for: {arguments['filePath']}")
1128
1128
 
1129
1129
  return {
1130
1130
  "success": True,
@@ -1137,17 +1137,17 @@ class MCPCodeIndexServer:
1137
1137
  self, arguments: Dict[str, Any]
1138
1138
  ) -> Dict[str, Any]:
1139
1139
  """Handle check_codebase_size tool calls."""
1140
- logger.info(
1140
+ logger.debug(
1141
1141
  f"Checking codebase size for: {arguments.get('projectName', 'Unknown')}"
1142
1142
  )
1143
- logger.info(f"Folder path: {arguments.get('folderPath', 'Unknown')}")
1143
+ logger.debug(f"Folder path: {arguments.get('folderPath', 'Unknown')}")
1144
1144
 
1145
1145
  folder_path = arguments["folderPath"]
1146
1146
  db_manager = await self.db_factory.get_database_manager(folder_path)
1147
1147
  project_id = await self._get_or_create_project_id(arguments)
1148
1148
  folder_path_obj = Path(folder_path)
1149
1149
 
1150
- logger.info(f"Resolved project_id: {project_id}")
1150
+ logger.debug(f"Resolved project_id: {project_id}")
1151
1151
 
1152
1152
  # Run cleanup if needed (respects 30-minute cooldown)
1153
1153
  cleaned_up_count = await self._run_cleanup_if_needed(
@@ -1155,26 +1155,31 @@ class MCPCodeIndexServer:
1155
1155
  )
1156
1156
 
1157
1157
  # Get file descriptions for this project (after cleanup)
1158
- logger.info("Retrieving file descriptions...")
1158
+ logger.debug("Retrieving file descriptions...")
1159
1159
  file_descriptions = await db_manager.get_all_file_descriptions(
1160
1160
  project_id=project_id
1161
1161
  )
1162
- logger.info(f"Found {len(file_descriptions)} file descriptions")
1162
+ logger.debug(f"Found {len(file_descriptions)} file descriptions")
1163
1163
 
1164
1164
  # Use provided token limit or fall back to server default
1165
1165
  token_limit = arguments.get("tokenLimit", self.token_limit)
1166
1166
 
1167
- # Calculate total tokens for descriptions
1168
- logger.info("Calculating total token count...")
1169
- descriptions_tokens = self.token_counter.calculate_codebase_tokens(
1167
+ # Calculate total tokens for descriptions (offload to executor to avoid blocking)
1168
+ logger.debug("Calculating total token count...")
1169
+ loop = asyncio.get_running_loop()
1170
+ descriptions_tokens = await loop.run_in_executor(
1171
+ None,
1172
+ self.token_counter.calculate_codebase_tokens,
1170
1173
  file_descriptions
1171
1174
  )
1172
1175
 
1173
- # Get overview tokens if available
1176
+ # Get overview tokens if available (offload to executor to avoid blocking)
1174
1177
  overview = await db_manager.get_project_overview(project_id)
1175
1178
  overview_tokens = 0
1176
1179
  if overview and overview.overview:
1177
- overview_tokens = self.token_counter.count_tokens(overview.overview)
1180
+ overview_tokens = await loop.run_in_executor(
1181
+ None, self.token_counter.count_tokens, overview.overview
1182
+ )
1178
1183
 
1179
1184
  total_tokens = descriptions_tokens + overview_tokens
1180
1185
  is_large = total_tokens > token_limit
@@ -1192,16 +1197,16 @@ class MCPCodeIndexServer:
1192
1197
  else:
1193
1198
  recommendation = "use_search"
1194
1199
 
1195
- logger.info(
1200
+ logger.debug(
1196
1201
  f"Codebase analysis complete: {total_tokens} tokens total "
1197
1202
  f"({descriptions_tokens} descriptions + {overview_tokens} overview), "
1198
1203
  f"{len(file_descriptions)} files"
1199
1204
  )
1200
- logger.info(
1205
+ logger.debug(
1201
1206
  f"Size assessment: {'LARGE' if is_large else 'SMALL'} "
1202
1207
  f"(limit: {token_limit})"
1203
1208
  )
1204
- logger.info(f"Recommendation: {recommendation}")
1209
+ logger.debug(f"Recommendation: {recommendation}")
1205
1210
 
1206
1211
  return {
1207
1212
  "fileDescriptionTokens": descriptions_tokens,
@@ -1218,31 +1223,31 @@ class MCPCodeIndexServer:
1218
1223
  self, arguments: Dict[str, Any]
1219
1224
  ) -> Dict[str, Any]:
1220
1225
  """Handle find_missing_descriptions tool calls."""
1221
- logger.info(
1226
+ logger.debug(
1222
1227
  f"Finding missing descriptions for: "
1223
1228
  f"{arguments.get('projectName', 'Unknown')}"
1224
1229
  )
1225
- logger.info(f"Folder path: {arguments.get('folderPath', 'Unknown')}")
1230
+ logger.debug(f"Folder path: {arguments.get('folderPath', 'Unknown')}")
1226
1231
 
1227
1232
  folder_path = arguments["folderPath"]
1228
1233
  db_manager = await self.db_factory.get_database_manager(folder_path)
1229
1234
  project_id = await self._get_or_create_project_id(arguments)
1230
1235
  folder_path_obj = Path(folder_path)
1231
1236
 
1232
- logger.info(f"Resolved project_id: {project_id}")
1237
+ logger.debug(f"Resolved project_id: {project_id}")
1233
1238
 
1234
1239
  # Get existing file descriptions
1235
- logger.info("Retrieving existing file descriptions...")
1240
+ logger.debug("Retrieving existing file descriptions...")
1236
1241
  existing_descriptions = await db_manager.get_all_file_descriptions(
1237
1242
  project_id=project_id
1238
1243
  )
1239
1244
  existing_paths = {desc.file_path for desc in existing_descriptions}
1240
- logger.info(f"Found {len(existing_paths)} existing descriptions")
1245
+ logger.debug(f"Found {len(existing_paths)} existing descriptions")
1241
1246
 
1242
1247
  # Scan directory for files
1243
- logger.info(f"Scanning project directory: {folder_path_obj}")
1248
+ logger.debug(f"Scanning project directory: {folder_path_obj}")
1244
1249
  scanner = FileScanner(folder_path_obj)
1245
- if not scanner.is_valid_project_directory():
1250
+ if not await scanner.is_valid_project_directory_async():
1246
1251
  logger.error(
1247
1252
  f"Invalid or inaccessible project directory: {folder_path_obj}"
1248
1253
  )
@@ -1250,27 +1255,28 @@ class MCPCodeIndexServer:
1250
1255
  "error": f"Invalid or inaccessible project directory: {folder_path_obj}"
1251
1256
  }
1252
1257
 
1253
- missing_files = scanner.find_missing_files(existing_paths)
1258
+ missing_files = await scanner.find_missing_files_async(existing_paths)
1254
1259
  missing_paths = [scanner.get_relative_path(f) for f in missing_files]
1255
1260
 
1256
- logger.info(f"Found {len(missing_paths)} files without descriptions")
1261
+ logger.debug(f"Found {len(missing_paths)} files without descriptions")
1257
1262
 
1258
1263
  # Apply randomization if specified
1259
1264
  randomize = arguments.get("randomize", False)
1260
1265
  if randomize:
1261
1266
  random.shuffle(missing_paths)
1262
- logger.info("Randomized file order for parallel processing")
1267
+ logger.debug("Randomized file order for parallel processing")
1263
1268
 
1264
1269
  # Apply limit if specified
1265
1270
  limit = arguments.get("limit")
1266
1271
  total_missing = len(missing_paths)
1267
1272
  if limit is not None and isinstance(limit, int) and limit > 0:
1268
1273
  missing_paths = missing_paths[:limit]
1269
- logger.info(f"Applied limit {limit}, returning {len(missing_paths)} files")
1274
+ logger.debug(f"Applied limit {limit}, returning {len(missing_paths)} files")
1270
1275
 
1271
- # Get project stats
1272
- stats = scanner.get_project_stats()
1273
- logger.info(f"Project stats: {stats.get('total_files', 0)} total files")
1276
+ # Get project stats (offload to executor to avoid blocking)
1277
+ loop = asyncio.get_running_loop()
1278
+ stats = await loop.run_in_executor(None, scanner.get_project_stats)
1279
+ logger.debug(f"Project stats: {stats.get('total_files', 0)} total files")
1274
1280
 
1275
1281
  return {
1276
1282
  "missingFiles": missing_paths,
@@ -1325,8 +1331,13 @@ class MCPCodeIndexServer:
1325
1331
  project_id=project_id
1326
1332
  )
1327
1333
 
1328
- # Calculate total tokens
1329
- total_tokens = self.token_counter.calculate_codebase_tokens(file_descriptions)
1334
+ # Calculate total tokens (offload to executor to avoid blocking)
1335
+ loop = asyncio.get_running_loop()
1336
+ total_tokens = await loop.run_in_executor(
1337
+ None,
1338
+ self.token_counter.calculate_codebase_tokens,
1339
+ file_descriptions
1340
+ )
1330
1341
  is_large = self.token_counter.is_large_codebase(total_tokens)
1331
1342
 
1332
1343
  # Always build and return the folder structure - if the AI called this
@@ -1422,7 +1433,12 @@ class MCPCodeIndexServer:
1422
1433
  )
1423
1434
 
1424
1435
  total_files = len(file_descriptions)
1425
- total_tokens = self.token_counter.calculate_codebase_tokens(file_descriptions)
1436
+ loop = asyncio.get_running_loop()
1437
+ total_tokens = await loop.run_in_executor(
1438
+ None,
1439
+ self.token_counter.calculate_codebase_tokens,
1440
+ file_descriptions
1441
+ )
1426
1442
 
1427
1443
  # Create overview record
1428
1444
  overview = ProjectOverview(
@@ -1630,7 +1646,7 @@ class MCPCodeIndexServer:
1630
1646
  project_name = arguments["projectName"]
1631
1647
  folder_path = arguments["folderPath"]
1632
1648
 
1633
- logger.info(
1649
+ logger.debug(
1634
1650
  "Processing find_similar_code request",
1635
1651
  extra={
1636
1652
  "structured_data": {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-code-indexer
3
- Version: 4.2.18
3
+ Version: 4.2.20
4
4
  Summary: MCP server that tracks file descriptions across codebases, enabling AI agents to efficiently navigate and understand code through searchable summaries and token-aware overviews.
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -49,8 +49,8 @@ Description-Content-Type: text/markdown
49
49
 
50
50
  # MCP Code Indexer 🚀
51
51
 
52
- [![PyPI version](https://badge.fury.io/py/mcp-code-indexer.svg?65)](https://badge.fury.io/py/mcp-code-indexer)
53
- [![Python](https://img.shields.io/pypi/pyversions/mcp-code-indexer.svg?65)](https://pypi.org/project/mcp-code-indexer/)
52
+ [![PyPI version](https://badge.fury.io/py/mcp-code-indexer.svg?67)](https://badge.fury.io/py/mcp-code-indexer)
53
+ [![Python](https://img.shields.io/pypi/pyversions/mcp-code-indexer.svg?67)](https://pypi.org/project/mcp-code-indexer/)
54
54
  [![License](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
55
55
 
56
56
  A production-ready **Model Context Protocol (MCP) server** that revolutionizes how AI agents navigate and understand codebases. Built for high-concurrency environments with advanced database resilience, the server provides instant access to intelligent descriptions, semantic search, and context-aware recommendations while maintaining 800+ writes/sec throughput.
@@ -2,23 +2,23 @@ mcp_code_indexer/__init__.py,sha256=IG3xW6SGlqnOCnGOCio_05IxTXWRWqaJF4y25ChbYMA,
2
2
  mcp_code_indexer/__main__.py,sha256=4Edinoe0ug43hobuLYcjTmGp2YJnlFYN4_8iKvUBJ0Q,213
3
3
  mcp_code_indexer/ask_handler.py,sha256=EiobL_Daii7wwcHvwDDtxLvqjZvhCJWvz_PIiRm14V4,9106
4
4
  mcp_code_indexer/claude_api_handler.py,sha256=_PVhHxwVyY3ojNp-tIGp73nWD8MYMmIlCMIwKNMJXi8,13845
5
- mcp_code_indexer/cleanup_manager.py,sha256=-Nqzy49fbjl7q_nIpVs__HV1IAI8q3AY8IH_fIIg5gs,9785
5
+ mcp_code_indexer/cleanup_manager.py,sha256=40YjboL0AzA7K79WPd83oNQFxVc5g4Fg8Oe7myP4cBo,9835
6
6
  mcp_code_indexer/commands/__init__.py,sha256=141U722dS_NnFTZyxTPipzhXKdU21kCv-mcrN4djyHo,45
7
7
  mcp_code_indexer/commands/makelocal.py,sha256=T_44so96jcs1FNlft9E3nAq0LlOzQLhjLd8P31Myfr4,9140
8
8
  mcp_code_indexer/data/stop_words_english.txt,sha256=feRGP8WG5hQPo-wZN5ralJiSv1CGw4h3010NBJnJ0Z8,6344
9
9
  mcp_code_indexer/database/__init__.py,sha256=aPq_aaRp0aSwOBIq9GkuMNjmLxA411zg2vhdrAuHm-w,38
10
10
  mcp_code_indexer/database/connection_health.py,sha256=jZr3tCbfjUJujdXe_uxtm1N4c31dMV4euiSY4ulamOE,25497
11
- mcp_code_indexer/database/database.py,sha256=uDtghexmwJjqq_H8H4MG8VZyJpGIySqkdrgGWQ7FVzA,60327
11
+ mcp_code_indexer/database/database.py,sha256=sxl9OMmPo7bQVDrJOGpPLqshOWF-5xeEr2A-ZxdhG0s,61716
12
12
  mcp_code_indexer/database/database_factory.py,sha256=VMw0tlutGgZoTI7Q_PCuFy5zAimq2xuMtDFAlF_FKtc,4316
13
- mcp_code_indexer/database/exceptions.py,sha256=zciu7fDwF8y0Z4tkzTBFPPvXCvdnEJiA-Wd-cBLZBWw,10473
13
+ mcp_code_indexer/database/exceptions.py,sha256=DieJQ2WVH7_CLmFhwDFdRH1XMED0WpZwLRWS1kovnLg,10567
14
14
  mcp_code_indexer/database/models.py,sha256=w1U9zMGNt0LQeCiifYeXKW_Cia9BKV5uPChbOve-FZY,13467
15
15
  mcp_code_indexer/database/path_resolver.py,sha256=1Ubx6Ly5F2dnvhbdN3tqyowBHslABXpoA6wgL4BQYGo,3461
16
- mcp_code_indexer/database/retry_executor.py,sha256=r7eKn_xDc6hKz9qs9z9Dg8gyq4uZgnyrgFmQFTyDhdo,14409
16
+ mcp_code_indexer/database/retry_executor.py,sha256=cQ8o6sh6ks7AOVo0CKJ7GQr7-GbOOHDq_g784m2Tz78,14458
17
17
  mcp_code_indexer/deepask_handler.py,sha256=qI9h_Me5WQAbt3hzzDG8XDBMZlnvx-I9R7OsmO_o8aA,18497
18
18
  mcp_code_indexer/error_handler.py,sha256=ylciEM-cR7E8Gmd8cfh5olcllJm0FnaYBGH86yayFic,12530
19
- mcp_code_indexer/file_scanner.py,sha256=OCLnzPTneIiMtGcV3NB5qvnZrO3zxCqPEZXtCk75dfA,15178
19
+ mcp_code_indexer/file_scanner.py,sha256=r8WR1Or1pbB63PmUIno5XYwBrmYg9xr37Ef6zmtt4yQ,15538
20
20
  mcp_code_indexer/git_hook_handler.py,sha256=sTtZV3-Yy1Evt06R5NZclELeepM4Ia9OQoR2O6BK3Hk,45517
21
- mcp_code_indexer/logging_config.py,sha256=NNB5iKCc-Hdognf-UdjuEKB9H_e4UIp3QTVnf69Rc2k,10504
21
+ mcp_code_indexer/logging_config.py,sha256=yTnmUXpBe7KujMFzWopiYBep29rpj3p9VvpqrNEgkZs,10843
22
22
  mcp_code_indexer/main.py,sha256=byM0Y9EwDa0616dEkx2p_1rUdJmDNeKAG41o5_AJago,41084
23
23
  mcp_code_indexer/middleware/__init__.py,sha256=UCEPzOlZldlqFzYEfrXw1HvCDvY1jpLvyaDGUzVr2aw,368
24
24
  mcp_code_indexer/middleware/auth.py,sha256=4HkHMDZBNsyPA1VE8qF7pRNKbqG4xIDZjllENbgynxI,7258
@@ -33,7 +33,7 @@ mcp_code_indexer/migrations/005_remove_git_remotes.sql,sha256=vT84AaV1hyN4zq5W67
33
33
  mcp_code_indexer/migrations/006_vector_mode.sql,sha256=kN-UBPGoagqtpxpGEjdz-V3hevPAXxAdNmxF4iIPsY8,7448
34
34
  mcp_code_indexer/query_preprocessor.py,sha256=vi23sK2ffs4T5PGY7lHrbCBDL421AlPz2dldqX_3JKA,5491
35
35
  mcp_code_indexer/server/__init__.py,sha256=16xMcuriUOBlawRqWNBk6niwrvtv_JD5xvI36X1Vsmk,41
36
- mcp_code_indexer/server/mcp_server.py,sha256=nkm7R_lOnD_XDxk1mEiZgrT6D-G-QIt0g34VHNb1p5g,84424
36
+ mcp_code_indexer/server/mcp_server.py,sha256=I_oHoUv00ZBrikjBxHwoOvuHIeCGMUcKxXOyT5TAge0,85150
37
37
  mcp_code_indexer/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
38
38
  mcp_code_indexer/token_counter.py,sha256=e6WsyCEWMMSkMwLbcVtr5e8vEqh-kFqNmiJErCNdqHE,8220
39
39
  mcp_code_indexer/tools/__init__.py,sha256=m01mxML2UdD7y5rih_XNhNSCMzQTz7WQ_T1TeOcYlnE,49
@@ -65,8 +65,8 @@ mcp_code_indexer/vector_mode/services/vector_mode_tools_service.py,sha256=K1_STy
65
65
  mcp_code_indexer/vector_mode/services/vector_storage_service.py,sha256=JI3VUc2mG8xZ_YqOvfKJivuMi4imeBLr2UFVrWgDWhk,21193
66
66
  mcp_code_indexer/vector_mode/types.py,sha256=M4lUF43FzjiVUezoRqozx_u0g1-xrX9qcRcRn-u65yw,1222
67
67
  mcp_code_indexer/vector_mode/utils.py,sha256=XtHrpOw0QJ0EjdzJ85jrbkmHy8Slkq_t7hz-q4RP-10,1283
68
- mcp_code_indexer-4.2.18.dist-info/METADATA,sha256=KSfwzQpXfijIn6qqogm57T7r5mINsTeVhr0wpD7YL_E,27689
69
- mcp_code_indexer-4.2.18.dist-info/WHEEL,sha256=kJCRJT_g0adfAJzTx2GUMmS80rTJIVHRCfG0DQgLq3o,88
70
- mcp_code_indexer-4.2.18.dist-info/entry_points.txt,sha256=UABj7HZ0mC6rvF22gxaz2LLNLGQShTrFmp5u00iUtvo,67
71
- mcp_code_indexer-4.2.18.dist-info/licenses/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
72
- mcp_code_indexer-4.2.18.dist-info/RECORD,,
68
+ mcp_code_indexer-4.2.20.dist-info/METADATA,sha256=K9Ku7UI9xcu1HU9WEqlLCacCWmfRONU1YgdZV5kxpzo,27689
69
+ mcp_code_indexer-4.2.20.dist-info/WHEEL,sha256=kJCRJT_g0adfAJzTx2GUMmS80rTJIVHRCfG0DQgLq3o,88
70
+ mcp_code_indexer-4.2.20.dist-info/entry_points.txt,sha256=UABj7HZ0mC6rvF22gxaz2LLNLGQShTrFmp5u00iUtvo,67
71
+ mcp_code_indexer-4.2.20.dist-info/licenses/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
72
+ mcp_code_indexer-4.2.20.dist-info/RECORD,,