mcp-code-indexer 4.2.17__py3-none-any.whl → 4.2.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -396,7 +396,7 @@ class DatabaseManager:
396
396
  async def get_immediate_transaction(
397
397
  self,
398
398
  operation_name: str = "immediate_transaction",
399
- timeout_seconds: float = 10.0,
399
+ timeout_seconds: Optional[float] = None,
400
400
  ) -> AsyncIterator[aiosqlite.Connection]:
401
401
  """
402
402
  Get a database connection with BEGIN IMMEDIATE transaction and
@@ -407,27 +407,45 @@ class DatabaseManager:
407
407
 
408
408
  Args:
409
409
  operation_name: Name of the operation for monitoring
410
- timeout_seconds: Transaction timeout in seconds
410
+ timeout_seconds: Transaction timeout in seconds (defaults to
411
+ self.timeout if None)
411
412
  """
413
+ actual_timeout = timeout_seconds if timeout_seconds is not None else self.timeout
414
+ import time
415
+ acquire_start = time.monotonic()
412
416
  async with self.get_write_connection_with_retry(operation_name) as conn:
417
+ write_lock_time = time.monotonic() - acquire_start
418
+ logger.debug(
419
+ f"[{operation_name}] Write lock acquired in {write_lock_time*1000:.1f}ms"
420
+ )
413
421
  try:
414
422
  # Start immediate transaction with timeout
423
+ begin_start = time.monotonic()
415
424
  await asyncio.wait_for(
416
- conn.execute("BEGIN IMMEDIATE"), timeout=timeout_seconds
425
+ conn.execute("BEGIN IMMEDIATE"), timeout=actual_timeout
426
+ )
427
+ begin_time = time.monotonic() - begin_start
428
+ logger.debug(
429
+ f"[{operation_name}] BEGIN IMMEDIATE completed in {begin_time*1000:.1f}ms"
417
430
  )
418
431
  yield conn
432
+ commit_start = time.monotonic()
419
433
  await conn.commit()
434
+ commit_time = time.monotonic() - commit_start
435
+ logger.debug(
436
+ f"[{operation_name}] COMMIT completed in {commit_time*1000:.1f}ms"
437
+ )
420
438
  except asyncio.TimeoutError:
421
439
  logger.warning(
422
440
  (
423
- f"Transaction timeout after {timeout_seconds}s for "
441
+ f"Transaction timeout after {actual_timeout}s for "
424
442
  f"{operation_name}"
425
443
  ),
426
444
  extra={
427
445
  "structured_data": {
428
446
  "transaction_timeout": {
429
447
  "operation": operation_name,
430
- "timeout_seconds": timeout_seconds,
448
+ "timeout_seconds": actual_timeout,
431
449
  }
432
450
  }
433
451
  },
@@ -444,7 +462,7 @@ class DatabaseManager:
444
462
  operation_func: Callable[[aiosqlite.Connection], Any],
445
463
  operation_name: str = "transaction_operation",
446
464
  max_retries: int = 3,
447
- timeout_seconds: float = 10.0,
465
+ timeout_seconds: Optional[float] = None,
448
466
  ) -> Any:
449
467
  """
450
468
  Execute a database operation within a transaction with automatic
@@ -459,7 +477,8 @@ class DatabaseManager:
459
477
  operation_name: Name of the operation for logging
460
478
  max_retries: Maximum retry attempts (overrides default retry
461
479
  executor config)
462
- timeout_seconds: Transaction timeout in seconds
480
+ timeout_seconds: Transaction timeout in seconds (defaults to
481
+ self.timeout if None)
463
482
 
464
483
  Returns:
465
484
  Result from operation_func
@@ -473,20 +492,42 @@ class DatabaseManager:
473
492
  my_operation, "insert_data"
474
493
  )
475
494
  """
495
+ actual_timeout = timeout_seconds if timeout_seconds is not None else self.timeout
476
496
 
477
497
  async def execute_transaction() -> Any:
478
498
  """Inner function to execute transaction - retried by executor."""
499
+ import time
500
+ start_time = time.monotonic()
501
+ logger.debug(
502
+ f"[{operation_name}] Starting transaction "
503
+ f"(timeout={actual_timeout}s, pool_size={len(self._connection_pool)})"
504
+ )
479
505
  try:
480
506
  async with self.get_immediate_transaction(
481
- operation_name, timeout_seconds
507
+ operation_name, actual_timeout
482
508
  ) as conn:
509
+ lock_acquired_time = time.monotonic()
510
+ logger.debug(
511
+ f"[{operation_name}] Lock acquired in "
512
+ f"{(lock_acquired_time - start_time)*1000:.1f}ms"
513
+ )
483
514
  result = await operation_func(conn)
515
+ exec_time = time.monotonic()
516
+ logger.debug(
517
+ f"[{operation_name}] Operation executed in "
518
+ f"{(exec_time - lock_acquired_time)*1000:.1f}ms"
519
+ )
484
520
 
485
521
  # Record successful operation metrics
522
+ total_time = time.monotonic() - start_time
523
+ logger.debug(
524
+ f"[{operation_name}] Transaction completed successfully "
525
+ f"in {total_time*1000:.1f}ms"
526
+ )
486
527
  if self._metrics_collector:
487
528
  self._metrics_collector.record_operation(
488
529
  operation_name,
489
- timeout_seconds * 1000, # Convert to ms
530
+ actual_timeout * 1000, # Convert to ms
490
531
  True,
491
532
  len(self._connection_pool),
492
533
  )
@@ -494,29 +535,42 @@ class DatabaseManager:
494
535
  return result
495
536
 
496
537
  except aiosqlite.OperationalError as e:
497
- # Record locking event for metrics
538
+ elapsed = time.monotonic() - start_time
498
539
  error_msg = str(e).lower()
540
+ logger.debug(
541
+ f"[{operation_name}] OperationalError after {elapsed*1000:.1f}ms: {e}"
542
+ )
499
543
  if self._metrics_collector and "locked" in error_msg:
500
544
  self._metrics_collector.record_locking_event(operation_name, str(e))
501
545
 
502
546
  # For retryable errors (locked/busy), re-raise the ORIGINAL error
503
547
  # so tenacity can retry. Only classify non-retryable errors.
504
548
  if "locked" in error_msg or "busy" in error_msg:
549
+ logger.debug(
550
+ f"[{operation_name}] Retryable error, will retry: {e}"
551
+ )
505
552
  raise # Let tenacity retry this
506
553
 
507
554
  # Non-retryable OperationalError - classify and raise
555
+ logger.warning(
556
+ f"[{operation_name}] Non-retryable OperationalError: {e}"
557
+ )
508
558
  classified_error = classify_sqlite_error(e, operation_name)
509
559
  if self._metrics_collector:
510
560
  self._metrics_collector.record_operation(
511
561
  operation_name,
512
- timeout_seconds * 1000,
562
+ actual_timeout * 1000,
513
563
  False,
514
564
  len(self._connection_pool),
515
565
  )
516
566
  raise classified_error
517
567
 
518
568
  except asyncio.TimeoutError as e:
519
- # Timeout on BEGIN IMMEDIATE - this is retryable
569
+ elapsed = time.monotonic() - start_time
570
+ logger.warning(
571
+ f"[{operation_name}] Timeout after {elapsed*1000:.1f}ms "
572
+ f"waiting for database lock (timeout={actual_timeout}s)"
573
+ )
520
574
  if self._metrics_collector:
521
575
  self._metrics_collector.record_locking_event(
522
576
  operation_name, "timeout waiting for lock"
@@ -555,7 +609,7 @@ class DatabaseManager:
555
609
  if self._metrics_collector:
556
610
  self._metrics_collector.record_operation(
557
611
  operation_name,
558
- timeout_seconds * 1000,
612
+ actual_timeout * 1000,
559
613
  False,
560
614
  len(self._connection_pool),
561
615
  )
@@ -574,7 +628,7 @@ class DatabaseManager:
574
628
  if self._metrics_collector:
575
629
  self._metrics_collector.record_operation(
576
630
  operation_name,
577
- timeout_seconds * 1000,
631
+ actual_timeout * 1000,
578
632
  False,
579
633
  len(self._connection_pool),
580
634
  )
@@ -499,3 +499,13 @@ class FileScanner:
499
499
  """
500
500
  loop = asyncio.get_running_loop()
501
501
  return await loop.run_in_executor(None, self.find_missing_files, existing_paths)
502
+
503
+ async def is_valid_project_directory_async(self) -> bool:
504
+ """
505
+ Async version of is_valid_project_directory running in a thread.
506
+
507
+ Returns:
508
+ True if the directory exists and is accessible
509
+ """
510
+ loop = asyncio.get_running_loop()
511
+ return await loop.run_in_executor(None, self.is_valid_project_directory)
@@ -81,22 +81,29 @@ def setup_logging(
81
81
  root_logger.warning(f"Failed to set up file logging: {e}")
82
82
 
83
83
  # Configure specific loggers
84
+ effective_level = getattr(logging, log_level.upper())
84
85
 
85
- # Quiet down noisy libraries
86
+ # Quiet down noisy libraries (always WARNING+)
86
87
  logging.getLogger("aiosqlite").setLevel(logging.WARNING)
87
88
  logging.getLogger("tiktoken").setLevel(logging.WARNING)
88
89
 
89
- # MCP specific loggers
90
+ # MCP specific loggers - respect the configured log level
90
91
  mcp_logger = logging.getLogger("mcp")
91
- mcp_logger.setLevel(logging.INFO)
92
+ mcp_logger.setLevel(effective_level)
92
93
 
93
- # Database logger
94
- db_logger = logging.getLogger("src.database")
95
- db_logger.setLevel(logging.INFO)
94
+ # Database logger - respect the configured log level
95
+ db_logger = logging.getLogger("mcp_code_indexer.database")
96
+ db_logger.setLevel(effective_level)
96
97
 
97
- # Server logger
98
- server_logger = logging.getLogger("src.server")
99
- server_logger.setLevel(logging.INFO)
98
+ # Also set the old logger names for backwards compatibility
99
+ logging.getLogger("src.database").setLevel(effective_level)
100
+
101
+ # Server logger - respect the configured log level
102
+ server_logger = logging.getLogger("mcp_code_indexer.server")
103
+ server_logger.setLevel(effective_level)
104
+
105
+ # Also set the old logger names for backwards compatibility
106
+ logging.getLogger("src.server").setLevel(effective_level)
100
107
 
101
108
  return root_logger
102
109
 
@@ -1002,10 +1002,10 @@ class MCPCodeIndexServer:
1002
1002
  try:
1003
1003
  # Get files currently in the folder
1004
1004
  scanner = FileScanner(Path(folder_path))
1005
- if not scanner.is_valid_project_directory():
1005
+ if not await scanner.is_valid_project_directory_async():
1006
1006
  return False
1007
1007
 
1008
- current_files = scanner.scan_directory()
1008
+ current_files = await scanner.scan_directory_async()
1009
1009
  current_basenames = {f.name for f in current_files}
1010
1010
 
1011
1011
  if not current_basenames:
@@ -1164,17 +1164,22 @@ class MCPCodeIndexServer:
1164
1164
  # Use provided token limit or fall back to server default
1165
1165
  token_limit = arguments.get("tokenLimit", self.token_limit)
1166
1166
 
1167
- # Calculate total tokens for descriptions
1167
+ # Calculate total tokens for descriptions (offload to executor to avoid blocking)
1168
1168
  logger.info("Calculating total token count...")
1169
- descriptions_tokens = self.token_counter.calculate_codebase_tokens(
1169
+ loop = asyncio.get_running_loop()
1170
+ descriptions_tokens = await loop.run_in_executor(
1171
+ None,
1172
+ self.token_counter.calculate_codebase_tokens,
1170
1173
  file_descriptions
1171
1174
  )
1172
1175
 
1173
- # Get overview tokens if available
1176
+ # Get overview tokens if available (offload to executor to avoid blocking)
1174
1177
  overview = await db_manager.get_project_overview(project_id)
1175
1178
  overview_tokens = 0
1176
1179
  if overview and overview.overview:
1177
- overview_tokens = self.token_counter.count_tokens(overview.overview)
1180
+ overview_tokens = await loop.run_in_executor(
1181
+ None, self.token_counter.count_tokens, overview.overview
1182
+ )
1178
1183
 
1179
1184
  total_tokens = descriptions_tokens + overview_tokens
1180
1185
  is_large = total_tokens > token_limit
@@ -1242,7 +1247,7 @@ class MCPCodeIndexServer:
1242
1247
  # Scan directory for files
1243
1248
  logger.info(f"Scanning project directory: {folder_path_obj}")
1244
1249
  scanner = FileScanner(folder_path_obj)
1245
- if not scanner.is_valid_project_directory():
1250
+ if not await scanner.is_valid_project_directory_async():
1246
1251
  logger.error(
1247
1252
  f"Invalid or inaccessible project directory: {folder_path_obj}"
1248
1253
  )
@@ -1250,7 +1255,7 @@ class MCPCodeIndexServer:
1250
1255
  "error": f"Invalid or inaccessible project directory: {folder_path_obj}"
1251
1256
  }
1252
1257
 
1253
- missing_files = scanner.find_missing_files(existing_paths)
1258
+ missing_files = await scanner.find_missing_files_async(existing_paths)
1254
1259
  missing_paths = [scanner.get_relative_path(f) for f in missing_files]
1255
1260
 
1256
1261
  logger.info(f"Found {len(missing_paths)} files without descriptions")
@@ -1268,8 +1273,9 @@ class MCPCodeIndexServer:
1268
1273
  missing_paths = missing_paths[:limit]
1269
1274
  logger.info(f"Applied limit {limit}, returning {len(missing_paths)} files")
1270
1275
 
1271
- # Get project stats
1272
- stats = scanner.get_project_stats()
1276
+ # Get project stats (offload to executor to avoid blocking)
1277
+ loop = asyncio.get_running_loop()
1278
+ stats = await loop.run_in_executor(None, scanner.get_project_stats)
1273
1279
  logger.info(f"Project stats: {stats.get('total_files', 0)} total files")
1274
1280
 
1275
1281
  return {
@@ -1325,8 +1331,13 @@ class MCPCodeIndexServer:
1325
1331
  project_id=project_id
1326
1332
  )
1327
1333
 
1328
- # Calculate total tokens
1329
- total_tokens = self.token_counter.calculate_codebase_tokens(file_descriptions)
1334
+ # Calculate total tokens (offload to executor to avoid blocking)
1335
+ loop = asyncio.get_running_loop()
1336
+ total_tokens = await loop.run_in_executor(
1337
+ None,
1338
+ self.token_counter.calculate_codebase_tokens,
1339
+ file_descriptions
1340
+ )
1330
1341
  is_large = self.token_counter.is_large_codebase(total_tokens)
1331
1342
 
1332
1343
  # Always build and return the folder structure - if the AI called this
@@ -1422,7 +1433,12 @@ class MCPCodeIndexServer:
1422
1433
  )
1423
1434
 
1424
1435
  total_files = len(file_descriptions)
1425
- total_tokens = self.token_counter.calculate_codebase_tokens(file_descriptions)
1436
+ loop = asyncio.get_running_loop()
1437
+ total_tokens = await loop.run_in_executor(
1438
+ None,
1439
+ self.token_counter.calculate_codebase_tokens,
1440
+ file_descriptions
1441
+ )
1426
1442
 
1427
1443
  # Create overview record
1428
1444
  overview = ProjectOverview(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mcp-code-indexer
3
- Version: 4.2.17
3
+ Version: 4.2.19
4
4
  Summary: MCP server that tracks file descriptions across codebases, enabling AI agents to efficiently navigate and understand code through searchable summaries and token-aware overviews.
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -49,8 +49,8 @@ Description-Content-Type: text/markdown
49
49
 
50
50
  # MCP Code Indexer 🚀
51
51
 
52
- [![PyPI version](https://badge.fury.io/py/mcp-code-indexer.svg?64)](https://badge.fury.io/py/mcp-code-indexer)
53
- [![Python](https://img.shields.io/pypi/pyversions/mcp-code-indexer.svg?64)](https://pypi.org/project/mcp-code-indexer/)
52
+ [![PyPI version](https://badge.fury.io/py/mcp-code-indexer.svg?66)](https://badge.fury.io/py/mcp-code-indexer)
53
+ [![Python](https://img.shields.io/pypi/pyversions/mcp-code-indexer.svg?66)](https://pypi.org/project/mcp-code-indexer/)
54
54
  [![License](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
55
55
 
56
56
  A production-ready **Model Context Protocol (MCP) server** that revolutionizes how AI agents navigate and understand codebases. Built for high-concurrency environments with advanced database resilience, the server provides instant access to intelligent descriptions, semantic search, and context-aware recommendations while maintaining 800+ writes/sec throughput.
@@ -8,7 +8,7 @@ mcp_code_indexer/commands/makelocal.py,sha256=T_44so96jcs1FNlft9E3nAq0LlOzQLhjLd
8
8
  mcp_code_indexer/data/stop_words_english.txt,sha256=feRGP8WG5hQPo-wZN5ralJiSv1CGw4h3010NBJnJ0Z8,6344
9
9
  mcp_code_indexer/database/__init__.py,sha256=aPq_aaRp0aSwOBIq9GkuMNjmLxA411zg2vhdrAuHm-w,38
10
10
  mcp_code_indexer/database/connection_health.py,sha256=jZr3tCbfjUJujdXe_uxtm1N4c31dMV4euiSY4ulamOE,25497
11
- mcp_code_indexer/database/database.py,sha256=GcXI6p99E4_fo9oMbbZXHIMpYm4Z7y2TdQJgSnZNvMI,57986
11
+ mcp_code_indexer/database/database.py,sha256=Dp2gArUXAY8q2agzj4QQlF-ZtdT0TRbHpGspZDI1jHg,60619
12
12
  mcp_code_indexer/database/database_factory.py,sha256=VMw0tlutGgZoTI7Q_PCuFy5zAimq2xuMtDFAlF_FKtc,4316
13
13
  mcp_code_indexer/database/exceptions.py,sha256=zciu7fDwF8y0Z4tkzTBFPPvXCvdnEJiA-Wd-cBLZBWw,10473
14
14
  mcp_code_indexer/database/models.py,sha256=w1U9zMGNt0LQeCiifYeXKW_Cia9BKV5uPChbOve-FZY,13467
@@ -16,9 +16,9 @@ mcp_code_indexer/database/path_resolver.py,sha256=1Ubx6Ly5F2dnvhbdN3tqyowBHslABX
16
16
  mcp_code_indexer/database/retry_executor.py,sha256=r7eKn_xDc6hKz9qs9z9Dg8gyq4uZgnyrgFmQFTyDhdo,14409
17
17
  mcp_code_indexer/deepask_handler.py,sha256=qI9h_Me5WQAbt3hzzDG8XDBMZlnvx-I9R7OsmO_o8aA,18497
18
18
  mcp_code_indexer/error_handler.py,sha256=ylciEM-cR7E8Gmd8cfh5olcllJm0FnaYBGH86yayFic,12530
19
- mcp_code_indexer/file_scanner.py,sha256=OCLnzPTneIiMtGcV3NB5qvnZrO3zxCqPEZXtCk75dfA,15178
19
+ mcp_code_indexer/file_scanner.py,sha256=r8WR1Or1pbB63PmUIno5XYwBrmYg9xr37Ef6zmtt4yQ,15538
20
20
  mcp_code_indexer/git_hook_handler.py,sha256=sTtZV3-Yy1Evt06R5NZclELeepM4Ia9OQoR2O6BK3Hk,45517
21
- mcp_code_indexer/logging_config.py,sha256=M5eVZ5PwfTROib7ISTQ522n2hUSc4hJ_wUgsrJKsTTg,10030
21
+ mcp_code_indexer/logging_config.py,sha256=NNB5iKCc-Hdognf-UdjuEKB9H_e4UIp3QTVnf69Rc2k,10504
22
22
  mcp_code_indexer/main.py,sha256=byM0Y9EwDa0616dEkx2p_1rUdJmDNeKAG41o5_AJago,41084
23
23
  mcp_code_indexer/middleware/__init__.py,sha256=UCEPzOlZldlqFzYEfrXw1HvCDvY1jpLvyaDGUzVr2aw,368
24
24
  mcp_code_indexer/middleware/auth.py,sha256=4HkHMDZBNsyPA1VE8qF7pRNKbqG4xIDZjllENbgynxI,7258
@@ -33,7 +33,7 @@ mcp_code_indexer/migrations/005_remove_git_remotes.sql,sha256=vT84AaV1hyN4zq5W67
33
33
  mcp_code_indexer/migrations/006_vector_mode.sql,sha256=kN-UBPGoagqtpxpGEjdz-V3hevPAXxAdNmxF4iIPsY8,7448
34
34
  mcp_code_indexer/query_preprocessor.py,sha256=vi23sK2ffs4T5PGY7lHrbCBDL421AlPz2dldqX_3JKA,5491
35
35
  mcp_code_indexer/server/__init__.py,sha256=16xMcuriUOBlawRqWNBk6niwrvtv_JD5xvI36X1Vsmk,41
36
- mcp_code_indexer/server/mcp_server.py,sha256=nkm7R_lOnD_XDxk1mEiZgrT6D-G-QIt0g34VHNb1p5g,84424
36
+ mcp_code_indexer/server/mcp_server.py,sha256=hALcOMxPBD0BD3Zpyfb8ifFXS2y2eoGWJ10foDde8R8,85114
37
37
  mcp_code_indexer/tiktoken_cache/9b5ad71b2ce5302211f9c61530b329a4922fc6a4,sha256=Ijkht27pm96ZW3_3OFE-7xAPtR0YyTWXoRO8_-hlsqc,1681126
38
38
  mcp_code_indexer/token_counter.py,sha256=e6WsyCEWMMSkMwLbcVtr5e8vEqh-kFqNmiJErCNdqHE,8220
39
39
  mcp_code_indexer/tools/__init__.py,sha256=m01mxML2UdD7y5rih_XNhNSCMzQTz7WQ_T1TeOcYlnE,49
@@ -65,8 +65,8 @@ mcp_code_indexer/vector_mode/services/vector_mode_tools_service.py,sha256=K1_STy
65
65
  mcp_code_indexer/vector_mode/services/vector_storage_service.py,sha256=JI3VUc2mG8xZ_YqOvfKJivuMi4imeBLr2UFVrWgDWhk,21193
66
66
  mcp_code_indexer/vector_mode/types.py,sha256=M4lUF43FzjiVUezoRqozx_u0g1-xrX9qcRcRn-u65yw,1222
67
67
  mcp_code_indexer/vector_mode/utils.py,sha256=XtHrpOw0QJ0EjdzJ85jrbkmHy8Slkq_t7hz-q4RP-10,1283
68
- mcp_code_indexer-4.2.17.dist-info/METADATA,sha256=hlxYys7f-3mK2L00ZbP7UHyVxBa5AQO1xDw8t5HZ3jo,27689
69
- mcp_code_indexer-4.2.17.dist-info/WHEEL,sha256=kJCRJT_g0adfAJzTx2GUMmS80rTJIVHRCfG0DQgLq3o,88
70
- mcp_code_indexer-4.2.17.dist-info/entry_points.txt,sha256=UABj7HZ0mC6rvF22gxaz2LLNLGQShTrFmp5u00iUtvo,67
71
- mcp_code_indexer-4.2.17.dist-info/licenses/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
72
- mcp_code_indexer-4.2.17.dist-info/RECORD,,
68
+ mcp_code_indexer-4.2.19.dist-info/METADATA,sha256=9_qVYC29iCTLIuAV_uLVB4vbq6HEU_ZCo3vdaKZyQ78,27689
69
+ mcp_code_indexer-4.2.19.dist-info/WHEEL,sha256=kJCRJT_g0adfAJzTx2GUMmS80rTJIVHRCfG0DQgLq3o,88
70
+ mcp_code_indexer-4.2.19.dist-info/entry_points.txt,sha256=UABj7HZ0mC6rvF22gxaz2LLNLGQShTrFmp5u00iUtvo,67
71
+ mcp_code_indexer-4.2.19.dist-info/licenses/LICENSE,sha256=JN9dyPPgYwH9C-UjYM7FLNZjQ6BF7kAzpF3_4PwY4rY,1086
72
+ mcp_code_indexer-4.2.19.dist-info/RECORD,,