mcp-code-indexer 4.0.1__py3-none-any.whl → 4.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. mcp_code_indexer/__init__.py +7 -5
  2. mcp_code_indexer/ask_handler.py +2 -2
  3. mcp_code_indexer/claude_api_handler.py +10 -5
  4. mcp_code_indexer/cleanup_manager.py +20 -12
  5. mcp_code_indexer/commands/makelocal.py +85 -63
  6. mcp_code_indexer/data/stop_words_english.txt +1 -1
  7. mcp_code_indexer/database/connection_health.py +29 -20
  8. mcp_code_indexer/database/database.py +44 -31
  9. mcp_code_indexer/database/database_factory.py +19 -20
  10. mcp_code_indexer/database/exceptions.py +10 -10
  11. mcp_code_indexer/database/models.py +126 -1
  12. mcp_code_indexer/database/path_resolver.py +22 -21
  13. mcp_code_indexer/database/retry_executor.py +37 -19
  14. mcp_code_indexer/deepask_handler.py +3 -3
  15. mcp_code_indexer/error_handler.py +46 -20
  16. mcp_code_indexer/file_scanner.py +15 -12
  17. mcp_code_indexer/git_hook_handler.py +71 -76
  18. mcp_code_indexer/logging_config.py +13 -5
  19. mcp_code_indexer/main.py +85 -22
  20. mcp_code_indexer/middleware/__init__.py +1 -1
  21. mcp_code_indexer/middleware/auth.py +47 -43
  22. mcp_code_indexer/middleware/error_middleware.py +15 -15
  23. mcp_code_indexer/middleware/logging.py +44 -42
  24. mcp_code_indexer/middleware/security.py +84 -76
  25. mcp_code_indexer/migrations/002_performance_indexes.sql +1 -1
  26. mcp_code_indexer/migrations/004_remove_branch_dependency.sql +14 -14
  27. mcp_code_indexer/migrations/006_vector_mode.sql +189 -0
  28. mcp_code_indexer/query_preprocessor.py +2 -2
  29. mcp_code_indexer/server/mcp_server.py +158 -94
  30. mcp_code_indexer/transport/__init__.py +1 -1
  31. mcp_code_indexer/transport/base.py +19 -17
  32. mcp_code_indexer/transport/http_transport.py +89 -76
  33. mcp_code_indexer/transport/stdio_transport.py +12 -8
  34. mcp_code_indexer/vector_mode/__init__.py +36 -0
  35. mcp_code_indexer/vector_mode/chunking/__init__.py +19 -0
  36. mcp_code_indexer/vector_mode/chunking/ast_chunker.py +403 -0
  37. mcp_code_indexer/vector_mode/chunking/chunk_optimizer.py +500 -0
  38. mcp_code_indexer/vector_mode/chunking/language_handlers.py +478 -0
  39. mcp_code_indexer/vector_mode/config.py +155 -0
  40. mcp_code_indexer/vector_mode/daemon.py +335 -0
  41. mcp_code_indexer/vector_mode/monitoring/__init__.py +19 -0
  42. mcp_code_indexer/vector_mode/monitoring/change_detector.py +312 -0
  43. mcp_code_indexer/vector_mode/monitoring/file_watcher.py +445 -0
  44. mcp_code_indexer/vector_mode/monitoring/merkle_tree.py +418 -0
  45. mcp_code_indexer/vector_mode/providers/__init__.py +72 -0
  46. mcp_code_indexer/vector_mode/providers/base_provider.py +230 -0
  47. mcp_code_indexer/vector_mode/providers/turbopuffer_client.py +338 -0
  48. mcp_code_indexer/vector_mode/providers/voyage_client.py +212 -0
  49. mcp_code_indexer/vector_mode/security/__init__.py +11 -0
  50. mcp_code_indexer/vector_mode/security/patterns.py +297 -0
  51. mcp_code_indexer/vector_mode/security/redactor.py +368 -0
  52. {mcp_code_indexer-4.0.1.dist-info → mcp_code_indexer-4.1.0.dist-info}/METADATA +82 -24
  53. mcp_code_indexer-4.1.0.dist-info/RECORD +66 -0
  54. mcp_code_indexer-4.0.1.dist-info/RECORD +0 -47
  55. {mcp_code_indexer-4.0.1.dist-info → mcp_code_indexer-4.1.0.dist-info}/LICENSE +0 -0
  56. {mcp_code_indexer-4.0.1.dist-info → mcp_code_indexer-4.1.0.dist-info}/WHEEL +0 -0
  57. {mcp_code_indexer-4.0.1.dist-info → mcp_code_indexer-4.1.0.dist-info}/entry_points.txt +0 -0
@@ -6,10 +6,12 @@ intelligent codebase navigation through searchable file descriptions,
6
6
  token-aware overviews, and advanced merge capabilities.
7
7
  """
8
8
 
9
+
9
10
  # Delay import to avoid dependency issues during testing
10
- def get_server():
11
+ def get_server() -> type:
11
12
  """Get MCPCodeIndexServer (lazy import)."""
12
13
  from .server.mcp_server import MCPCodeIndexServer
14
+
13
15
  return MCPCodeIndexServer
14
16
 
15
17
 
@@ -27,15 +29,15 @@ def _get_version() -> str:
27
29
  for pkg_name in ["mcp-code-indexer", "mcp_code_indexer"]:
28
30
  try:
29
31
  return version(pkg_name)
30
- except Exception:
32
+ except Exception: # nosec B112
31
33
  continue
32
- except Exception:
34
+ except Exception: # nosec B110
33
35
  pass
34
36
 
35
37
  # Fallback to reading from pyproject.toml (for development)
36
38
  try:
37
- from pathlib import Path
38
39
  import sys
40
+ from pathlib import Path
39
41
 
40
42
  if sys.version_info >= (3, 11):
41
43
  import tomllib
@@ -48,7 +50,7 @@ def _get_version() -> str:
48
50
  pyproject_path = Path(__file__).parent.parent.parent / "pyproject.toml"
49
51
  with open(pyproject_path, "rb") as f:
50
52
  data = tomllib.load(f)
51
- return data["project"]["version"]
53
+ return str(data["project"]["version"])
52
54
  except Exception:
53
55
  return "dev"
54
56
 
@@ -8,9 +8,9 @@ and sending them to Claude via OpenRouter API for direct responses.
8
8
 
9
9
  import logging
10
10
  from pathlib import Path
11
- from typing import Dict, Optional, Any
11
+ from typing import Any, Dict, Optional
12
12
 
13
- from .claude_api_handler import ClaudeAPIHandler, ClaudeAPIError
13
+ from .claude_api_handler import ClaudeAPIError, ClaudeAPIHandler
14
14
  from .database.database import DatabaseManager
15
15
 
16
16
 
@@ -11,15 +11,15 @@ import json
11
11
  import logging
12
12
  import os
13
13
  from dataclasses import dataclass
14
- from typing import Dict, List, Optional, Any
15
14
  from pathlib import Path
15
+ from typing import Any, Dict, List, Optional
16
16
 
17
17
  import aiohttp
18
18
  from tenacity import (
19
19
  retry,
20
- wait_exponential,
21
- stop_after_attempt,
22
20
  retry_if_exception_type,
21
+ stop_after_attempt,
22
+ wait_exponential,
23
23
  )
24
24
 
25
25
  from .database.database import DatabaseManager
@@ -194,7 +194,6 @@ class ClaudeAPIHandler:
194
194
  async with session.post(
195
195
  self.OPENROUTER_API_URL, headers=headers, json=payload
196
196
  ) as response:
197
-
198
197
  self.logger.info(f"Claude API response status: {response.status}")
199
198
 
200
199
  if response.status == 429:
@@ -240,7 +239,7 @@ class ClaudeAPIHandler:
240
239
  raise ClaudeAPIError("Claude API request timed out")
241
240
 
242
241
  def validate_json_response(
243
- self, response_text: str, required_keys: List[str] = None
242
+ self, response_text: str, required_keys: Optional[List[str]] = None
244
243
  ) -> Dict[str, Any]:
245
244
  """
246
245
  Validate and parse JSON response from Claude.
@@ -297,6 +296,12 @@ class ClaudeAPIHandler:
297
296
  self.logger.debug(f"Extracted JSON from response: {extracted_json}")
298
297
  data = json.loads(extracted_json)
299
298
 
299
+ # Ensure data is a dictionary
300
+ if not isinstance(data, dict):
301
+ raise ClaudeValidationError(
302
+ f"Expected JSON object, got {type(data).__name__}"
303
+ )
304
+
300
305
  # Validate required keys if specified
301
306
  if required_keys:
302
307
  missing_keys = [key for key in required_keys if key not in data]
@@ -8,7 +8,10 @@ and manual cleanup methods.
8
8
 
9
9
  import logging
10
10
  import time
11
- from typing import List, Optional
11
+ from typing import Any, List, Optional, TYPE_CHECKING
12
+
13
+ if TYPE_CHECKING:
14
+ from .database.database import DatabaseManager
12
15
 
13
16
  logger = logging.getLogger(__name__)
14
17
 
@@ -21,7 +24,9 @@ class CleanupManager:
21
24
  periodic cleanup to permanently remove old records after the retention period.
22
25
  """
23
26
 
24
- def __init__(self, db_manager, retention_months: int = 6):
27
+ def __init__(
28
+ self, db_manager: "DatabaseManager", retention_months: int = 6
29
+ ) -> None:
25
30
  """
26
31
  Initialize cleanup manager.
27
32
 
@@ -80,7 +85,7 @@ class CleanupManager:
80
85
 
81
86
  cleanup_timestamp = int(time.time())
82
87
 
83
- async def batch_operation(conn):
88
+ async def batch_operation(conn: Any) -> int:
84
89
  data = [(cleanup_timestamp, project_id, path) for path in file_paths]
85
90
  cursor = await conn.executemany(
86
91
  """
@@ -90,7 +95,7 @@ class CleanupManager:
90
95
  """,
91
96
  data,
92
97
  )
93
- return cursor.rowcount
98
+ return int(cursor.rowcount)
94
99
 
95
100
  marked_count = await self.db_manager.execute_transaction_with_retry(
96
101
  batch_operation,
@@ -99,7 +104,7 @@ class CleanupManager:
99
104
  )
100
105
 
101
106
  logger.info(f"Marked {marked_count} files for cleanup in project {project_id}")
102
- return marked_count
107
+ return int(marked_count)
103
108
 
104
109
  async def restore_file_from_cleanup(self, project_id: str, file_path: str) -> bool:
105
110
  """
@@ -177,7 +182,7 @@ class CleanupManager:
177
182
  ) # Approximate months to seconds
178
183
  cutoff_timestamp = int(time.time()) - cutoff_seconds
179
184
 
180
- async def cleanup_operation(conn):
185
+ async def cleanup_operation(conn: Any) -> int:
181
186
  if project_id:
182
187
  cursor = await conn.execute(
183
188
  """
@@ -196,7 +201,7 @@ class CleanupManager:
196
201
  (cutoff_timestamp,),
197
202
  )
198
203
 
199
- return cursor.rowcount
204
+ return int(cursor.rowcount)
200
205
 
201
206
  deleted_count = await self.db_manager.execute_transaction_with_retry(
202
207
  cleanup_operation,
@@ -208,7 +213,7 @@ class CleanupManager:
208
213
  scope = f"project {project_id}" if project_id else "all projects"
209
214
  logger.info(f"Permanently deleted {deleted_count} old records from {scope}")
210
215
 
211
- return deleted_count
216
+ return int(deleted_count)
212
217
 
213
218
  async def get_cleanup_stats(self, project_id: Optional[str] = None) -> dict:
214
219
  """
@@ -226,7 +231,7 @@ class CleanupManager:
226
231
  async with self.db_manager.get_connection() as db:
227
232
  if project_id:
228
233
  base_where = "WHERE project_id = ?"
229
- params = (project_id,)
234
+ params: tuple[Any, ...] = (project_id,)
230
235
  else:
231
236
  base_where = ""
232
237
  params = ()
@@ -239,7 +244,8 @@ class CleanupManager:
239
244
  ),
240
245
  params,
241
246
  )
242
- active_count = (await cursor.fetchone())[0]
247
+ row = await cursor.fetchone()
248
+ active_count = row[0] if row else 0
243
249
 
244
250
  # Files marked for cleanup
245
251
  cursor = await db.execute(
@@ -249,7 +255,8 @@ class CleanupManager:
249
255
  ),
250
256
  params,
251
257
  )
252
- marked_count = (await cursor.fetchone())[0]
258
+ row = await cursor.fetchone()
259
+ marked_count = row[0] if row else 0
253
260
 
254
261
  # Files eligible for permanent deletion
255
262
  if project_id:
@@ -268,7 +275,8 @@ class CleanupManager:
268
275
  ),
269
276
  (cutoff_timestamp,),
270
277
  )
271
- eligible_for_deletion = (await cursor.fetchone())[0]
278
+ row = await cursor.fetchone()
279
+ eligible_for_deletion = row[0] if row else 0
272
280
 
273
281
  return {
274
282
  "active_files": active_count,
@@ -11,7 +11,7 @@ from typing import List, Optional
11
11
 
12
12
  from mcp_code_indexer.database.database import DatabaseManager
13
13
  from mcp_code_indexer.database.database_factory import DatabaseFactory
14
- from mcp_code_indexer.database.models import Project, FileDescription, ProjectOverview
14
+ from mcp_code_indexer.database.models import FileDescription, Project, ProjectOverview
15
15
 
16
16
  logger = logging.getLogger(__name__)
17
17
 
@@ -19,97 +19,118 @@ logger = logging.getLogger(__name__)
19
19
  class MakeLocalCommand:
20
20
  """
21
21
  Command to migrate project data from global to local database.
22
-
22
+
23
23
  Extracts all project data, file descriptions, and project overviews
24
24
  from the global database and creates a local database in the specified folder.
25
25
  """
26
-
26
+
27
27
  def __init__(self, db_factory: DatabaseFactory):
28
28
  """
29
29
  Initialize the make local command.
30
-
30
+
31
31
  Args:
32
32
  db_factory: Database factory for creating database managers
33
33
  """
34
34
  self.db_factory = db_factory
35
-
36
- async def execute(self, folder_path: str, project_name: Optional[str] = None) -> dict:
35
+
36
+ async def execute(
37
+ self, folder_path: str, project_name: Optional[str] = None
38
+ ) -> dict:
37
39
  """
38
40
  Execute the make local command.
39
-
41
+
40
42
  Args:
41
43
  folder_path: Path to the project folder where local DB will be created
42
44
  project_name: Optional project name to migrate (if None, tries to find by folder)
43
-
45
+
44
46
  Returns:
45
47
  Dictionary with operation results
46
48
  """
47
49
  folder_path_obj = Path(folder_path).resolve()
48
-
50
+
49
51
  if not folder_path_obj.exists():
50
52
  raise ValueError(f"Folder path does not exist: {folder_path}")
51
-
53
+
52
54
  if not folder_path_obj.is_dir():
53
55
  raise ValueError(f"Path is not a directory: {folder_path}")
54
-
56
+
55
57
  # Get local database folder and path
56
- local_db_folder = self.db_factory.get_path_resolver().get_local_database_folder(folder_path)
57
- local_db_path = self.db_factory.get_path_resolver().get_local_database_path(folder_path)
58
-
58
+ local_db_folder = self.db_factory.get_path_resolver().get_local_database_folder(
59
+ folder_path
60
+ )
61
+ local_db_path = self.db_factory.get_path_resolver().get_local_database_path(
62
+ folder_path
63
+ )
64
+
59
65
  # Check if local database already exists and has data
60
- if local_db_folder.exists() and local_db_path.exists() and local_db_path.stat().st_size > 0:
66
+ if (
67
+ local_db_folder.exists()
68
+ and local_db_path.exists()
69
+ and local_db_path.stat().st_size > 0
70
+ ):
61
71
  # Check if it actually has project data (not just schema)
62
72
  from sqlite3 import connect
73
+
63
74
  try:
64
75
  with connect(local_db_path) as conn:
65
76
  cursor = conn.execute("SELECT COUNT(*) FROM projects")
66
77
  project_count = cursor.fetchone()[0]
67
78
  if project_count > 0:
68
- raise ValueError(f"Local database already contains {project_count} project(s): {local_db_path}")
79
+ raise ValueError(
80
+ f"Local database already contains {project_count} project(s): {local_db_path}"
81
+ )
69
82
  except Exception:
70
83
  # If we can't check, assume it has data to be safe
71
84
  raise ValueError(f"Local database already exists: {local_db_path}")
72
-
85
+
73
86
  # Get global database manager
74
87
  global_db_manager = await self.db_factory.get_database_manager()
75
-
88
+
76
89
  # Find the project to migrate
77
- project = await self._find_project_to_migrate(global_db_manager, folder_path, project_name)
90
+ project = await self._find_project_to_migrate(
91
+ global_db_manager, folder_path, project_name
92
+ )
78
93
  if not project:
79
94
  if project_name:
80
- raise ValueError(f"Project '{project_name}' not found in global database")
95
+ raise ValueError(
96
+ f"Project '{project_name}' not found in global database"
97
+ )
81
98
  else:
82
99
  raise ValueError(f"No project found for folder path: {folder_path}")
83
-
100
+
84
101
  logger.info(f"Found project to migrate: {project.name} (ID: {project.id})")
85
-
102
+
86
103
  # Get all project data
87
- file_descriptions = await global_db_manager.get_all_file_descriptions(project.id)
104
+ file_descriptions = await global_db_manager.get_all_file_descriptions(
105
+ project.id
106
+ )
88
107
  project_overview = await global_db_manager.get_project_overview(project.id)
89
-
108
+
90
109
  logger.info(f"Found {len(file_descriptions)} file descriptions to migrate")
91
110
  if project_overview:
92
111
  logger.info("Found project overview to migrate")
93
-
112
+
94
113
  # Create local database folder (this ensures it exists)
95
114
  local_db_folder.mkdir(parents=True, exist_ok=True)
96
115
  logger.info(f"Created local database folder: {local_db_folder}")
97
-
116
+
98
117
  # Create local database manager (this will initialize schema)
99
- local_db_manager = await self.db_factory.get_database_manager(str(folder_path_obj))
100
-
118
+ local_db_manager = await self.db_factory.get_database_manager(
119
+ str(folder_path_obj)
120
+ )
121
+
101
122
  # For local databases, we'll create a project with a machine-independent approach
102
123
  # We'll store the current folder path in aliases for reference, but the project
103
124
  # will be found by being the single project in the local database
104
-
125
+
105
126
  # Migrate data
106
127
  await self._migrate_project_data(
107
128
  local_db_manager, project, file_descriptions, project_overview
108
129
  )
109
-
130
+
110
131
  # Remove data from global database
111
132
  await self._remove_from_global_database(global_db_manager, project.id)
112
-
133
+
113
134
  return {
114
135
  "success": True,
115
136
  "project_name": project.name,
@@ -119,26 +140,26 @@ class MakeLocalCommand:
119
140
  "migrated_files": len(file_descriptions),
120
141
  "migrated_overview": project_overview is not None,
121
142
  }
122
-
143
+
123
144
  async def _find_project_to_migrate(
124
- self,
125
- global_db_manager: DatabaseManager,
126
- folder_path: str,
127
- project_name: Optional[str]
145
+ self,
146
+ global_db_manager: DatabaseManager,
147
+ folder_path: str,
148
+ project_name: Optional[str],
128
149
  ) -> Optional[Project]:
129
150
  """
130
151
  Find the project to migrate from the global database.
131
-
152
+
132
153
  Args:
133
154
  global_db_manager: Global database manager
134
155
  folder_path: Project folder path
135
156
  project_name: Optional project name
136
-
157
+
137
158
  Returns:
138
159
  Project to migrate or None if not found
139
160
  """
140
161
  all_projects = await global_db_manager.get_all_projects()
141
-
162
+
142
163
  if project_name:
143
164
  # Search by name
144
165
  normalized_name = project_name.lower()
@@ -150,23 +171,23 @@ class MakeLocalCommand:
150
171
  for project in all_projects:
151
172
  if folder_path in project.aliases:
152
173
  return project
153
-
174
+
154
175
  return None
155
-
176
+
156
177
  async def _migrate_project_data(
157
178
  self,
158
179
  local_db_manager: DatabaseManager,
159
180
  project: Project,
160
181
  file_descriptions: List[FileDescription],
161
- project_overview: Optional[ProjectOverview]
182
+ project_overview: Optional[ProjectOverview],
162
183
  ) -> None:
163
184
  """
164
185
  Migrate project data to the local database.
165
-
186
+
166
187
  For local databases, we update the project aliases to include the current
167
188
  folder path since local database projects are found by being the single
168
189
  project in the database rather than by path matching.
169
-
190
+
170
191
  Args:
171
192
  local_db_manager: Local database manager
172
193
  project: Project to migrate
@@ -175,53 +196,54 @@ class MakeLocalCommand:
175
196
  """
176
197
  # Update project aliases to include current folder path for reference
177
198
  # Note: This will be machine-specific but that's OK for local databases
178
-
199
+
179
200
  # Create project in local database
180
201
  await local_db_manager.create_project(project)
181
202
  logger.info(f"Created project in local database: {project.name}")
182
-
203
+
183
204
  # Migrate file descriptions
184
205
  if file_descriptions:
185
206
  await local_db_manager.batch_create_file_descriptions(file_descriptions)
186
207
  logger.info(f"Migrated {len(file_descriptions)} file descriptions")
187
-
208
+
188
209
  # Migrate project overview
189
210
  if project_overview:
190
211
  await local_db_manager.create_project_overview(project_overview)
191
212
  logger.info("Migrated project overview")
192
-
213
+
193
214
  async def _remove_from_global_database(
194
215
  self, global_db_manager: DatabaseManager, project_id: str
195
216
  ) -> None:
196
217
  """
197
218
  Remove project data from the global database.
198
-
219
+
199
220
  Args:
200
221
  global_db_manager: Global database manager
201
222
  project_id: Project ID to remove
202
223
  """
203
224
  # Remove file descriptions
204
- async with global_db_manager.get_write_connection_with_retry("remove_project_files") as db:
225
+ async with global_db_manager.get_write_connection_with_retry(
226
+ "remove_project_files"
227
+ ) as db:
205
228
  await db.execute(
206
- "DELETE FROM file_descriptions WHERE project_id = ?",
207
- (project_id,)
229
+ "DELETE FROM file_descriptions WHERE project_id = ?", (project_id,)
208
230
  )
209
231
  await db.commit()
210
-
232
+
211
233
  # Remove project overview
212
- async with global_db_manager.get_write_connection_with_retry("remove_project_overview") as db:
234
+ async with global_db_manager.get_write_connection_with_retry(
235
+ "remove_project_overview"
236
+ ) as db:
213
237
  await db.execute(
214
- "DELETE FROM project_overviews WHERE project_id = ?",
215
- (project_id,)
238
+ "DELETE FROM project_overviews WHERE project_id = ?", (project_id,)
216
239
  )
217
240
  await db.commit()
218
-
241
+
219
242
  # Remove project
220
- async with global_db_manager.get_write_connection_with_retry("remove_project") as db:
221
- await db.execute(
222
- "DELETE FROM projects WHERE id = ?",
223
- (project_id,)
224
- )
243
+ async with global_db_manager.get_write_connection_with_retry(
244
+ "remove_project"
245
+ ) as db:
246
+ await db.execute("DELETE FROM projects WHERE id = ?", (project_id,))
225
247
  await db.commit()
226
-
248
+
227
249
  logger.info(f"Removed project data from global database: {project_id}")
@@ -848,4 +848,4 @@ widely
848
848
  words
849
849
  world
850
850
  youd
851
- youre
851
+ youre
@@ -10,7 +10,10 @@ import logging
10
10
  import time
11
11
  from dataclasses import dataclass, field
12
12
  from datetime import datetime, timedelta
13
- from typing import Dict, Optional, List
13
+ from typing import Any, Dict, List, Optional, TYPE_CHECKING
14
+
15
+ if TYPE_CHECKING:
16
+ from .database import DatabaseManager
14
17
 
15
18
  logger = logging.getLogger(__name__)
16
19
 
@@ -45,11 +48,11 @@ class ConnectionHealthMonitor:
45
48
 
46
49
  def __init__(
47
50
  self,
48
- database_manager,
51
+ database_manager: "DatabaseManager",
49
52
  check_interval: float = 30.0,
50
53
  failure_threshold: int = 3,
51
54
  timeout_seconds: float = 5.0,
52
- ):
55
+ ) -> None:
53
56
  """
54
57
  Initialize connection health monitor.
55
58
 
@@ -147,24 +150,30 @@ class ConnectionHealthMonitor:
147
150
  start_time = time.time()
148
151
 
149
152
  try:
150
- # Use a timeout for the health check
151
- async with asyncio.timeout(self.timeout_seconds):
153
+ # Simple timeout wrapper
154
+ async def perform_check() -> Any:
152
155
  async with self.database_manager.get_connection() as conn:
153
156
  # Simple query to test connectivity
154
157
  cursor = await conn.execute("SELECT 1")
155
158
  result = await cursor.fetchone()
159
+ return result
160
+
161
+ # Use timeout for the health check
162
+ result = await asyncio.wait_for(
163
+ perform_check(), timeout=self.timeout_seconds
164
+ )
156
165
 
157
- if result and result[0] == 1:
158
- response_time = (time.time() - start_time) * 1000
159
- return HealthCheckResult(
160
- is_healthy=True, response_time_ms=response_time
161
- )
162
- else:
163
- return HealthCheckResult(
164
- is_healthy=False,
165
- response_time_ms=(time.time() - start_time) * 1000,
166
- error_message="Unexpected query result",
167
- )
166
+ if result and result[0] == 1:
167
+ response_time = (time.time() - start_time) * 1000
168
+ return HealthCheckResult(
169
+ is_healthy=True, response_time_ms=response_time
170
+ )
171
+ else:
172
+ return HealthCheckResult(
173
+ is_healthy=False,
174
+ response_time_ms=(time.time() - start_time) * 1000,
175
+ error_message="Unexpected query result",
176
+ )
168
177
 
169
178
  except asyncio.TimeoutError:
170
179
  return HealthCheckResult(
@@ -561,10 +570,10 @@ class ConnectionHealthMonitor:
561
570
  class DatabaseMetricsCollector:
562
571
  """Collects and aggregates database performance metrics."""
563
572
 
564
- def __init__(self):
573
+ def __init__(self) -> None:
565
574
  """Initialize metrics collector."""
566
- self._operation_metrics = {}
567
- self._locking_events = []
575
+ self._operation_metrics: Dict[str, Any] = {}
576
+ self._locking_events: List[Dict[str, Any]] = []
568
577
  self._max_events_history = 50
569
578
 
570
579
  def record_operation(
@@ -655,7 +664,7 @@ class DatabaseMetricsCollector:
655
664
  ]
656
665
 
657
666
  # Count by operation
658
- operation_counts = {}
667
+ operation_counts: Dict[str, int] = {}
659
668
  for event in self._locking_events:
660
669
  op = event["operation_name"]
661
670
  operation_counts[op] = operation_counts.get(op, 0) + 1