iflow-mcp_kandrwmrtn-cplusplus_mcp 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iflow_mcp_kandrwmrtn_cplusplus_mcp-0.1.0.dist-info/METADATA +222 -0
- iflow_mcp_kandrwmrtn_cplusplus_mcp-0.1.0.dist-info/RECORD +14 -0
- iflow_mcp_kandrwmrtn_cplusplus_mcp-0.1.0.dist-info/WHEEL +4 -0
- iflow_mcp_kandrwmrtn_cplusplus_mcp-0.1.0.dist-info/entry_points.txt +2 -0
- iflow_mcp_kandrwmrtn_cplusplus_mcp-0.1.0.dist-info/licenses/LICENSE +21 -0
- mcp_server/__init__.py +1 -0
- mcp_server/cache_manager.py +212 -0
- mcp_server/call_graph.py +108 -0
- mcp_server/cpp_analyzer.py +1042 -0
- mcp_server/cpp_analyzer_config.py +112 -0
- mcp_server/cpp_mcp_server.py +1675 -0
- mcp_server/file_scanner.py +92 -0
- mcp_server/search_engine.py +131 -0
- mcp_server/symbol_info.py +42 -0
|
@@ -0,0 +1,1675 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
C++ Code Analysis MCP Server
|
|
4
|
+
|
|
5
|
+
Provides tools for analyzing C++ codebases using libclang.
|
|
6
|
+
Focused on specific queries rather than bulk data dumps.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import asyncio
|
|
10
|
+
import json
|
|
11
|
+
import sys
|
|
12
|
+
import os
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
15
|
+
import re
|
|
16
|
+
import time
|
|
17
|
+
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor, as_completed
|
|
18
|
+
import threading
|
|
19
|
+
import multiprocessing
|
|
20
|
+
import tempfile
|
|
21
|
+
import subprocess
|
|
22
|
+
import shutil
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
import clang.cindex
|
|
26
|
+
from clang.cindex import Index, CursorKind, TypeKind, Config
|
|
27
|
+
except ImportError:
|
|
28
|
+
print("Error: clang package not found. Install with: pip install libclang", file=sys.stderr)
|
|
29
|
+
sys.exit(1)
|
|
30
|
+
|
|
31
|
+
from mcp.server import Server
|
|
32
|
+
from mcp.types import (
|
|
33
|
+
Tool,
|
|
34
|
+
TextContent,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
def find_and_configure_libclang():
|
|
38
|
+
"""Find and configure libclang library"""
|
|
39
|
+
import platform
|
|
40
|
+
import glob
|
|
41
|
+
|
|
42
|
+
system = platform.system()
|
|
43
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
44
|
+
# Go up one directory to find lib folder (since we're in mcp_server subfolder)
|
|
45
|
+
parent_dir = os.path.dirname(script_dir)
|
|
46
|
+
|
|
47
|
+
# First, try bundled libraries (self-contained)
|
|
48
|
+
bundled_paths = []
|
|
49
|
+
if system == "Windows":
|
|
50
|
+
bundled_paths = [
|
|
51
|
+
os.path.join(parent_dir, "lib", "windows", "libclang.dll"),
|
|
52
|
+
os.path.join(parent_dir, "lib", "windows", "clang.dll"),
|
|
53
|
+
]
|
|
54
|
+
elif system == "Darwin": # macOS
|
|
55
|
+
bundled_paths = [
|
|
56
|
+
os.path.join(parent_dir, "lib", "macos", "libclang.dylib"),
|
|
57
|
+
]
|
|
58
|
+
else: # Linux
|
|
59
|
+
bundled_paths = [
|
|
60
|
+
os.path.join(parent_dir, "lib", "linux", "libclang.so.1"),
|
|
61
|
+
os.path.join(parent_dir, "lib", "linux", "libclang.so"),
|
|
62
|
+
]
|
|
63
|
+
|
|
64
|
+
def _preload_linux_dependencies(lib_dir: str) -> None:
|
|
65
|
+
"""Load additional shared objects required by bundled libclang."""
|
|
66
|
+
if platform.system() != "Linux":
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
import ctypes
|
|
71
|
+
except ImportError:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
for name in ("libtinfo.so.5", "libtinfo.so.5.9"):
|
|
75
|
+
candidate = os.path.join(lib_dir, name)
|
|
76
|
+
if os.path.exists(candidate):
|
|
77
|
+
try:
|
|
78
|
+
ctypes.CDLL(candidate)
|
|
79
|
+
print(f"Preloaded dependency {candidate}", file=sys.stderr)
|
|
80
|
+
break
|
|
81
|
+
except OSError as exc:
|
|
82
|
+
print(f"Warning: failed to preload {candidate}: {exc}", file=sys.stderr)
|
|
83
|
+
|
|
84
|
+
# Try bundled libraries first
|
|
85
|
+
for path in bundled_paths:
|
|
86
|
+
if os.path.exists(path):
|
|
87
|
+
print(f"Using bundled libclang at: {path}", file=sys.stderr)
|
|
88
|
+
lib_dir = os.path.dirname(path)
|
|
89
|
+
_preload_linux_dependencies(lib_dir)
|
|
90
|
+
Config.set_library_file(path)
|
|
91
|
+
return True
|
|
92
|
+
|
|
93
|
+
# Try to find libclang in clang package (pip installed)
|
|
94
|
+
try:
|
|
95
|
+
import clang.cindex
|
|
96
|
+
clang_native_dir = os.path.dirname(clang.cindex.__file__)
|
|
97
|
+
if system == "Linux":
|
|
98
|
+
clang_libclang = os.path.join(clang_native_dir, "native", "libclang.so")
|
|
99
|
+
elif system == "Darwin":
|
|
100
|
+
clang_libclang = os.path.join(clang_native_dir, "native", "libclang.dylib")
|
|
101
|
+
else:
|
|
102
|
+
clang_libclang = os.path.join(clang_native_dir, "native", "libclang.dll")
|
|
103
|
+
|
|
104
|
+
if os.path.exists(clang_libclang):
|
|
105
|
+
print(f"Using libclang from clang package at: {clang_libclang}", file=sys.stderr)
|
|
106
|
+
Config.set_library_file(clang_libclang)
|
|
107
|
+
return True
|
|
108
|
+
except Exception as e:
|
|
109
|
+
print(f"Could not find libclang in clang package: {e}", file=sys.stderr)
|
|
110
|
+
|
|
111
|
+
print("No bundled libclang found, searching system...", file=sys.stderr)
|
|
112
|
+
|
|
113
|
+
# Fallback to system-installed libraries
|
|
114
|
+
if system == "Windows":
|
|
115
|
+
system_paths = [
|
|
116
|
+
# LLVM official installer paths
|
|
117
|
+
r"C:\Program Files\LLVM\bin\libclang.dll",
|
|
118
|
+
r"C:\Program Files (x86)\LLVM\bin\libclang.dll",
|
|
119
|
+
# vcpkg paths
|
|
120
|
+
r"C:\vcpkg\installed\x64-windows\bin\clang.dll",
|
|
121
|
+
r"C:\vcpkg\installed\x86-windows\bin\clang.dll",
|
|
122
|
+
# Conda paths
|
|
123
|
+
r"C:\ProgramData\Anaconda3\Library\bin\libclang.dll",
|
|
124
|
+
]
|
|
125
|
+
|
|
126
|
+
# Try to find in system PATH
|
|
127
|
+
import shutil
|
|
128
|
+
llvm_config = shutil.which("llvm-config")
|
|
129
|
+
if llvm_config:
|
|
130
|
+
try:
|
|
131
|
+
import subprocess
|
|
132
|
+
result = subprocess.run([llvm_config, "--libdir"], capture_output=True, text=True)
|
|
133
|
+
if result.returncode == 0:
|
|
134
|
+
lib_dir = result.stdout.strip()
|
|
135
|
+
system_paths.insert(0, os.path.join(lib_dir, "libclang.dll"))
|
|
136
|
+
except:
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
elif system == "Darwin": # macOS
|
|
140
|
+
system_paths = [
|
|
141
|
+
"/usr/local/lib/libclang.dylib",
|
|
142
|
+
"/opt/homebrew/lib/libclang.dylib",
|
|
143
|
+
"/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/libclang.dylib",
|
|
144
|
+
]
|
|
145
|
+
|
|
146
|
+
else: # Linux
|
|
147
|
+
system_paths = [
|
|
148
|
+
"/usr/lib/llvm-*/lib/libclang.so.1",
|
|
149
|
+
"/usr/lib/x86_64-linux-gnu/libclang-*.so.1",
|
|
150
|
+
"/usr/lib/libclang.so.1",
|
|
151
|
+
"/usr/lib/libclang.so",
|
|
152
|
+
]
|
|
153
|
+
|
|
154
|
+
# Try each system path
|
|
155
|
+
for path_pattern in system_paths:
|
|
156
|
+
if "*" in path_pattern:
|
|
157
|
+
# Handle glob patterns
|
|
158
|
+
matches = glob.glob(path_pattern)
|
|
159
|
+
if matches:
|
|
160
|
+
path = matches[0] # Use first match
|
|
161
|
+
else:
|
|
162
|
+
continue
|
|
163
|
+
else:
|
|
164
|
+
path = path_pattern
|
|
165
|
+
|
|
166
|
+
if os.path.exists(path):
|
|
167
|
+
print(f"Found system libclang at: {path}", file=sys.stderr)
|
|
168
|
+
Config.set_library_file(path)
|
|
169
|
+
return True
|
|
170
|
+
|
|
171
|
+
return False
|
|
172
|
+
|
|
173
|
+
# Try to find and configure libclang
|
|
174
|
+
if not find_and_configure_libclang():
|
|
175
|
+
print("Error: Could not find libclang library.", file=sys.stderr)
|
|
176
|
+
print("Please install LLVM/Clang:", file=sys.stderr)
|
|
177
|
+
print(" Windows: Download from https://releases.llvm.org/", file=sys.stderr)
|
|
178
|
+
print(" macOS: brew install llvm", file=sys.stderr)
|
|
179
|
+
print(" Linux: sudo apt install libclang-dev", file=sys.stderr)
|
|
180
|
+
sys.exit(1)
|
|
181
|
+
|
|
182
|
+
class CppAnalyzer:
|
|
183
|
+
def __init__(self, project_root: str):
|
|
184
|
+
self.project_root = Path(project_root)
|
|
185
|
+
self.index = Index.create()
|
|
186
|
+
self.translation_units = {}
|
|
187
|
+
self.file_timestamps = {} # Track file modification times
|
|
188
|
+
self.last_refresh_check = 0.0 # Timestamp of last refresh check
|
|
189
|
+
self.refresh_interval = 2.0 # Only check for changes every 2 seconds
|
|
190
|
+
|
|
191
|
+
# Pre-built indexes for fast searching
|
|
192
|
+
self.class_index = {} # name -> list of class info
|
|
193
|
+
self.function_index = {} # name -> list of function info
|
|
194
|
+
self.indexes_built = False
|
|
195
|
+
|
|
196
|
+
# Lazy initialization to avoid tool timeouts
|
|
197
|
+
self.initialization_started = False
|
|
198
|
+
self.initialization_complete = False
|
|
199
|
+
|
|
200
|
+
# Threading for parallel parsing
|
|
201
|
+
self.parse_lock = threading.Lock()
|
|
202
|
+
# Cap at 16 threads - libclang parsing is mostly I/O bound
|
|
203
|
+
self.max_workers = min(16, (os.cpu_count() or 1) * 2) # Cap at 16 threads
|
|
204
|
+
|
|
205
|
+
self.vcpkg_root = self._find_vcpkg_root()
|
|
206
|
+
self.vcpkg_triplet = self._detect_vcpkg_triplet()
|
|
207
|
+
self.vcpkg_dependencies = self._read_vcpkg_dependencies()
|
|
208
|
+
|
|
209
|
+
# Don't parse immediately - do it on first search to avoid timeout
|
|
210
|
+
print("CppAnalyzer ready for lazy initialization", file=sys.stderr)
|
|
211
|
+
|
|
212
|
+
def _find_vcpkg_root(self) -> Optional[Path]:
|
|
213
|
+
"""Find vcpkg installation directory by reading project configuration"""
|
|
214
|
+
|
|
215
|
+
# Method 1: Check for vcpkg.json in project (vcpkg manifest mode)
|
|
216
|
+
vcpkg_json = self.project_root / "vcpkg.json"
|
|
217
|
+
if vcpkg_json.exists():
|
|
218
|
+
print(f"Found vcpkg.json manifest at: {vcpkg_json}", file=sys.stderr)
|
|
219
|
+
|
|
220
|
+
# In manifest mode, vcpkg installs to ./vcpkg_installed
|
|
221
|
+
vcpkg_installed = self.project_root / "vcpkg_installed"
|
|
222
|
+
if vcpkg_installed.exists():
|
|
223
|
+
print(f"Using manifest mode vcpkg at: {vcpkg_installed}", file=sys.stderr)
|
|
224
|
+
return vcpkg_installed
|
|
225
|
+
|
|
226
|
+
# Method 2: Parse CMakeLists.txt for CMAKE_TOOLCHAIN_FILE
|
|
227
|
+
cmake_file = self.project_root / "CMakeLists.txt"
|
|
228
|
+
if cmake_file.exists():
|
|
229
|
+
try:
|
|
230
|
+
with open(cmake_file, 'r', encoding='utf-8') as f:
|
|
231
|
+
content = f.read()
|
|
232
|
+
|
|
233
|
+
# Look for vcpkg toolchain file path
|
|
234
|
+
import re
|
|
235
|
+
toolchain_match = re.search(r'CMAKE_TOOLCHAIN_FILE["\s]*([^"\s)]+vcpkg\.cmake)', content)
|
|
236
|
+
if toolchain_match:
|
|
237
|
+
toolchain_path = Path(toolchain_match.group(1).strip('"'))
|
|
238
|
+
# vcpkg.cmake is typically at /scripts/buildsystems/vcpkg.cmake
|
|
239
|
+
vcpkg_root = toolchain_path.parent.parent.parent
|
|
240
|
+
if (vcpkg_root / "installed").exists():
|
|
241
|
+
print(f"Found vcpkg via CMakeLists.txt at: {vcpkg_root}", file=sys.stderr)
|
|
242
|
+
return vcpkg_root
|
|
243
|
+
except Exception as e:
|
|
244
|
+
print(f"Could not parse CMakeLists.txt: {e}", file=sys.stderr)
|
|
245
|
+
|
|
246
|
+
# Method 3: Check environment variables
|
|
247
|
+
import os
|
|
248
|
+
vcpkg_root_env = os.environ.get('VCPKG_ROOT')
|
|
249
|
+
if vcpkg_root_env:
|
|
250
|
+
vcpkg_path = Path(vcpkg_root_env)
|
|
251
|
+
if vcpkg_path.exists() and (vcpkg_path / "installed").exists():
|
|
252
|
+
print(f"Found vcpkg via VCPKG_ROOT: {vcpkg_path}", file=sys.stderr)
|
|
253
|
+
return vcpkg_path
|
|
254
|
+
|
|
255
|
+
# Method 4: Common installation paths (fallback)
|
|
256
|
+
common_paths = [
|
|
257
|
+
Path("C:/vcpkg"),
|
|
258
|
+
Path("C:/dev/vcpkg"),
|
|
259
|
+
Path("C:/tools/vcpkg"),
|
|
260
|
+
self.project_root / "vcpkg",
|
|
261
|
+
self.project_root / ".." / "vcpkg"
|
|
262
|
+
]
|
|
263
|
+
|
|
264
|
+
for path in common_paths:
|
|
265
|
+
if path.exists() and (path / "installed").exists():
|
|
266
|
+
print(f"Found vcpkg at common path: {path}", file=sys.stderr)
|
|
267
|
+
return path
|
|
268
|
+
|
|
269
|
+
print("vcpkg not found - using basic include paths", file=sys.stderr)
|
|
270
|
+
return None
|
|
271
|
+
|
|
272
|
+
def _detect_vcpkg_triplet(self) -> str:
|
|
273
|
+
"""Detect the vcpkg triplet to use"""
|
|
274
|
+
import platform
|
|
275
|
+
|
|
276
|
+
# Try to read from CMakeLists.txt first
|
|
277
|
+
cmake_file = self.project_root / "CMakeLists.txt"
|
|
278
|
+
if cmake_file.exists():
|
|
279
|
+
try:
|
|
280
|
+
with open(cmake_file, 'r', encoding='utf-8') as f:
|
|
281
|
+
content = f.read()
|
|
282
|
+
import re
|
|
283
|
+
triplet_match = re.search(r'VCPKG_TARGET_TRIPLET["\s]*([^"\s)]+)', content)
|
|
284
|
+
if triplet_match:
|
|
285
|
+
triplet = triplet_match.group(1).strip('"')
|
|
286
|
+
print(f"Found vcpkg triplet in CMakeLists.txt: {triplet}", file=sys.stderr)
|
|
287
|
+
return triplet
|
|
288
|
+
except Exception:
|
|
289
|
+
pass
|
|
290
|
+
|
|
291
|
+
# Default based on platform
|
|
292
|
+
system = platform.system()
|
|
293
|
+
if system == "Windows":
|
|
294
|
+
return "x64-windows"
|
|
295
|
+
elif system == "Darwin":
|
|
296
|
+
return "x64-osx"
|
|
297
|
+
else:
|
|
298
|
+
return "x64-linux"
|
|
299
|
+
|
|
300
|
+
def _read_vcpkg_dependencies(self) -> List[str]:
|
|
301
|
+
"""Read vcpkg dependencies from vcpkg.json"""
|
|
302
|
+
vcpkg_json = self.project_root / "vcpkg.json"
|
|
303
|
+
if not vcpkg_json.exists():
|
|
304
|
+
return []
|
|
305
|
+
|
|
306
|
+
try:
|
|
307
|
+
import json
|
|
308
|
+
with open(vcpkg_json, 'r', encoding='utf-8') as f:
|
|
309
|
+
data = json.load(f)
|
|
310
|
+
deps = data.get('dependencies', [])
|
|
311
|
+
|
|
312
|
+
# Handle both string deps and object deps (with features)
|
|
313
|
+
dep_names = []
|
|
314
|
+
for dep in deps:
|
|
315
|
+
if isinstance(dep, str):
|
|
316
|
+
dep_names.append(dep)
|
|
317
|
+
elif isinstance(dep, dict) and 'name' in dep:
|
|
318
|
+
dep_names.append(dep['name'])
|
|
319
|
+
|
|
320
|
+
print(f"Found {len(dep_names)} vcpkg dependencies: {', '.join(dep_names[:5])}{'...' if len(dep_names) > 5 else ''}", file=sys.stderr)
|
|
321
|
+
return dep_names
|
|
322
|
+
except Exception as e:
|
|
323
|
+
print(f"Could not read vcpkg.json: {e}", file=sys.stderr)
|
|
324
|
+
return []
|
|
325
|
+
|
|
326
|
+
def _scan_project(self):
|
|
327
|
+
"""Scan project for C++ files and create translation units (multithreaded)"""
|
|
328
|
+
cpp_extensions = {'.cpp', '.cc', '.cxx', '.c++', '.h', '.hpp', '.hxx', '.h++'}
|
|
329
|
+
|
|
330
|
+
# Collect all files to parse
|
|
331
|
+
files_to_parse = []
|
|
332
|
+
for ext in cpp_extensions:
|
|
333
|
+
for file_path in self.project_root.rglob(f"*{ext}"):
|
|
334
|
+
if self._should_include_file(file_path):
|
|
335
|
+
files_to_parse.append(file_path)
|
|
336
|
+
|
|
337
|
+
if not files_to_parse:
|
|
338
|
+
print("No C++ files found to parse", file=sys.stderr)
|
|
339
|
+
return
|
|
340
|
+
|
|
341
|
+
print(f"Found {len(files_to_parse)} C++ files, parsing with {self.max_workers} threads...", file=sys.stderr)
|
|
342
|
+
start_time = time.time()
|
|
343
|
+
|
|
344
|
+
# Parse files in parallel
|
|
345
|
+
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
|
|
346
|
+
# Submit all parsing tasks
|
|
347
|
+
future_to_file = {
|
|
348
|
+
executor.submit(self._parse_file_safe, file_path): file_path
|
|
349
|
+
for file_path in files_to_parse
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
# Process completed tasks and show progress
|
|
353
|
+
completed = 0
|
|
354
|
+
for future in as_completed(future_to_file):
|
|
355
|
+
completed += 1
|
|
356
|
+
if completed % 20 == 0 or completed == len(files_to_parse):
|
|
357
|
+
elapsed = time.time() - start_time
|
|
358
|
+
rate = completed / elapsed if elapsed > 0 else 0
|
|
359
|
+
print(f"Parsed {completed}/{len(files_to_parse)} files ({rate:.1f} files/sec)", file=sys.stderr)
|
|
360
|
+
|
|
361
|
+
elapsed = time.time() - start_time
|
|
362
|
+
successful = len(self.translation_units)
|
|
363
|
+
print(f"Parsing complete: {successful}/{len(files_to_parse)} files in {elapsed:.2f}s", file=sys.stderr)
|
|
364
|
+
print(f"Search indexes built during parsing: {len(self.class_index)} class names, {len(self.function_index)} function names", file=sys.stderr)
|
|
365
|
+
self.indexes_built = True
|
|
366
|
+
|
|
367
|
+
def _should_include_file(self, file_path: Path) -> bool:
|
|
368
|
+
"""Filter out unwanted files"""
|
|
369
|
+
exclude_dirs = {
|
|
370
|
+
'build', 'cmake-build', '.git', 'third_party', 'external', 'deps', 'thirdparty',
|
|
371
|
+
'mcp_env', 'venv', '.venv', 'env', '.env', # Python virtual environments
|
|
372
|
+
'vcpkg_installed', 'vcpkg', 'node_modules', # Package managers
|
|
373
|
+
'bin', 'obj', 'Debug', 'Release', 'x64', 'Win32' # Build outputs
|
|
374
|
+
}
|
|
375
|
+
return not any(part in exclude_dirs for part in file_path.parts)
|
|
376
|
+
|
|
377
|
+
def _is_project_file(self, file_path: str) -> bool:
|
|
378
|
+
"""Check if a file belongs to the project (vs external dependencies)"""
|
|
379
|
+
file_path_obj = Path(file_path)
|
|
380
|
+
|
|
381
|
+
# File is part of the project if it's under the project root
|
|
382
|
+
try:
|
|
383
|
+
file_path_obj.relative_to(self.project_root)
|
|
384
|
+
return True
|
|
385
|
+
except ValueError:
|
|
386
|
+
# File is outside project root (e.g., vcpkg dependencies, system headers)
|
|
387
|
+
return False
|
|
388
|
+
|
|
389
|
+
def _get_file_timestamp(self, file_path: Path) -> float:
|
|
390
|
+
"""Get file modification timestamp"""
|
|
391
|
+
try:
|
|
392
|
+
return file_path.stat().st_mtime
|
|
393
|
+
except OSError:
|
|
394
|
+
return 0.0
|
|
395
|
+
|
|
396
|
+
def _is_file_modified(self, file_path: Path) -> bool:
|
|
397
|
+
"""Check if file has been modified since last parse"""
|
|
398
|
+
file_str = str(file_path)
|
|
399
|
+
current_time = self._get_file_timestamp(file_path)
|
|
400
|
+
last_time = self.file_timestamps.get(file_str, 0.0)
|
|
401
|
+
return current_time > last_time
|
|
402
|
+
|
|
403
|
+
def refresh_if_needed(self):
|
|
404
|
+
"""Check for file changes and re-parse if needed"""
|
|
405
|
+
modified_files = []
|
|
406
|
+
|
|
407
|
+
# Check all currently tracked files for modifications
|
|
408
|
+
for file_path_str in list(self.translation_units.keys()):
|
|
409
|
+
file_path = Path(file_path_str)
|
|
410
|
+
if file_path.exists() and self._is_file_modified(file_path):
|
|
411
|
+
modified_files.append(file_path)
|
|
412
|
+
elif not file_path.exists():
|
|
413
|
+
# File was deleted, remove from all indexes
|
|
414
|
+
self._remove_file_from_indexes(file_path_str)
|
|
415
|
+
del self.translation_units[file_path_str]
|
|
416
|
+
del self.file_timestamps[file_path_str]
|
|
417
|
+
|
|
418
|
+
# Use the file scanner to find all current C++ files
|
|
419
|
+
from .file_scanner import FileScanner
|
|
420
|
+
scanner = FileScanner(self.project_root, include_dependencies=True)
|
|
421
|
+
scanner.EXCLUDE_DIRS = self.exclude_dirs
|
|
422
|
+
scanner.DEPENDENCY_DIRS = self.dependency_dirs
|
|
423
|
+
|
|
424
|
+
current_files = set(scanner.find_cpp_files())
|
|
425
|
+
tracked_files = set(self.translation_units.keys())
|
|
426
|
+
|
|
427
|
+
# Find new files
|
|
428
|
+
new_files = current_files - tracked_files
|
|
429
|
+
for file_path_str in new_files:
|
|
430
|
+
file_path = Path(file_path_str)
|
|
431
|
+
if self._should_include_file(file_path):
|
|
432
|
+
modified_files.append(file_path)
|
|
433
|
+
|
|
434
|
+
if modified_files:
|
|
435
|
+
print(f"Detected {len(modified_files)} modified/new files, re-parsing...", file=sys.stderr)
|
|
436
|
+
for file_path in modified_files:
|
|
437
|
+
self._parse_file(file_path) # Indexes updated during parsing
|
|
438
|
+
|
|
439
|
+
return len(modified_files)
|
|
440
|
+
|
|
441
|
+
def _remove_file_from_indexes(self, file_path: str):
|
|
442
|
+
"""Remove all symbols from a deleted file from search indexes"""
|
|
443
|
+
with self.parse_lock:
|
|
444
|
+
# Remove from class_index
|
|
445
|
+
for class_name in list(self.class_index.keys()):
|
|
446
|
+
self.class_index[class_name] = [
|
|
447
|
+
info for info in self.class_index[class_name]
|
|
448
|
+
if info.get('file') != file_path
|
|
449
|
+
]
|
|
450
|
+
# Remove empty entries
|
|
451
|
+
if not self.class_index[class_name]:
|
|
452
|
+
del self.class_index[class_name]
|
|
453
|
+
|
|
454
|
+
# Remove from function_index
|
|
455
|
+
for func_name in list(self.function_index.keys()):
|
|
456
|
+
self.function_index[func_name] = [
|
|
457
|
+
info for info in self.function_index[func_name]
|
|
458
|
+
if info.get('file') != file_path
|
|
459
|
+
]
|
|
460
|
+
# Remove empty entries
|
|
461
|
+
if not self.function_index[func_name]:
|
|
462
|
+
del self.function_index[func_name]
|
|
463
|
+
|
|
464
|
+
def _build_indexes(self):
|
|
465
|
+
"""Build search indexes for fast lookups (multithreaded)"""
|
|
466
|
+
print("Building search indexes...", file=sys.stderr)
|
|
467
|
+
start_time = time.time()
|
|
468
|
+
|
|
469
|
+
self.class_index.clear()
|
|
470
|
+
self.function_index.clear()
|
|
471
|
+
|
|
472
|
+
# Use thread-safe collections for building indexes
|
|
473
|
+
from collections import defaultdict
|
|
474
|
+
temp_class_index = defaultdict(list)
|
|
475
|
+
temp_function_index = defaultdict(list)
|
|
476
|
+
|
|
477
|
+
# Build lists of files to process
|
|
478
|
+
files_to_process = list(self.translation_units.items())
|
|
479
|
+
|
|
480
|
+
# Process files in parallel
|
|
481
|
+
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
|
|
482
|
+
# Submit all indexing tasks
|
|
483
|
+
future_to_file = {
|
|
484
|
+
executor.submit(self._index_file_safe, file_path, tu): file_path
|
|
485
|
+
for file_path, tu in files_to_process
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
# Process completed tasks and show progress
|
|
489
|
+
completed = 0
|
|
490
|
+
for future in as_completed(future_to_file):
|
|
491
|
+
file_path = future_to_file[future]
|
|
492
|
+
try:
|
|
493
|
+
class_entries, func_entries = future.result()
|
|
494
|
+
|
|
495
|
+
# Safely merge results
|
|
496
|
+
with self.parse_lock:
|
|
497
|
+
for name, entries in class_entries.items():
|
|
498
|
+
temp_class_index[name].extend(entries)
|
|
499
|
+
for name, entries in func_entries.items():
|
|
500
|
+
temp_function_index[name].extend(entries)
|
|
501
|
+
|
|
502
|
+
completed += 1
|
|
503
|
+
if completed % 20 == 0 or completed == len(files_to_process):
|
|
504
|
+
elapsed = time.time() - start_time
|
|
505
|
+
rate = completed / elapsed if elapsed > 0 else 0
|
|
506
|
+
print(f"Indexed {completed}/{len(files_to_process)} files ({rate:.1f} files/sec)", file=sys.stderr)
|
|
507
|
+
|
|
508
|
+
except Exception as e:
|
|
509
|
+
print(f"Warning: Failed to index {file_path}: {e}", file=sys.stderr)
|
|
510
|
+
|
|
511
|
+
# Convert to regular dicts
|
|
512
|
+
self.class_index = dict(temp_class_index)
|
|
513
|
+
self.function_index = dict(temp_function_index)
|
|
514
|
+
|
|
515
|
+
elapsed = time.time() - start_time
|
|
516
|
+
print(f"Search indexes built in {elapsed:.2f}s: {len(self.class_index)} class names, {len(self.function_index)} function names", file=sys.stderr)
|
|
517
|
+
self.indexes_built = True
|
|
518
|
+
|
|
519
|
+
def _index_file_safe(self, file_path: str, tu) -> Tuple[Dict[str, List], Dict[str, List]]:
|
|
520
|
+
"""Thread-safe file indexing for building search indexes"""
|
|
521
|
+
from collections import defaultdict
|
|
522
|
+
|
|
523
|
+
class_entries = defaultdict(list)
|
|
524
|
+
func_entries = defaultdict(list)
|
|
525
|
+
|
|
526
|
+
try:
|
|
527
|
+
for cursor in tu.cursor.walk_preorder():
|
|
528
|
+
if cursor.kind in [CursorKind.CLASS_DECL, CursorKind.STRUCT_DECL]:
|
|
529
|
+
if cursor.spelling:
|
|
530
|
+
class_info = {
|
|
531
|
+
'name': cursor.spelling,
|
|
532
|
+
'kind': cursor.kind.name,
|
|
533
|
+
'file': file_path,
|
|
534
|
+
'line': cursor.location.line,
|
|
535
|
+
'column': cursor.location.column,
|
|
536
|
+
'is_project': self._is_project_file(file_path)
|
|
537
|
+
}
|
|
538
|
+
class_entries[cursor.spelling].append(class_info)
|
|
539
|
+
|
|
540
|
+
elif cursor.kind in [CursorKind.FUNCTION_DECL, CursorKind.CXX_METHOD]:
|
|
541
|
+
if cursor.spelling:
|
|
542
|
+
func_info = {
|
|
543
|
+
'name': cursor.spelling,
|
|
544
|
+
'kind': cursor.kind.name,
|
|
545
|
+
'file': file_path,
|
|
546
|
+
'line': cursor.location.line,
|
|
547
|
+
'column': cursor.location.column,
|
|
548
|
+
'signature': self._get_function_signature(cursor),
|
|
549
|
+
'is_project': self._is_project_file(file_path)
|
|
550
|
+
}
|
|
551
|
+
func_entries[cursor.spelling].append(func_info)
|
|
552
|
+
|
|
553
|
+
except Exception as e:
|
|
554
|
+
print(f"Warning: Failed to index {file_path}: {e}", file=sys.stderr)
|
|
555
|
+
|
|
556
|
+
return dict(class_entries), dict(func_entries)
|
|
557
|
+
|
|
558
|
+
def _parse_file_safe(self, file_path: Path):
|
|
559
|
+
"""Thread-safe wrapper for parsing a single file"""
|
|
560
|
+
try:
|
|
561
|
+
result = self._parse_file_internal(file_path)
|
|
562
|
+
if result:
|
|
563
|
+
file_str, tu, timestamp, class_entries, func_entries = result
|
|
564
|
+
with self.parse_lock:
|
|
565
|
+
self.translation_units[file_str] = tu
|
|
566
|
+
self.file_timestamps[file_str] = timestamp
|
|
567
|
+
|
|
568
|
+
# Merge index entries during parsing
|
|
569
|
+
for name, entries in class_entries.items():
|
|
570
|
+
if name not in self.class_index:
|
|
571
|
+
self.class_index[name] = []
|
|
572
|
+
self.class_index[name].extend(entries)
|
|
573
|
+
|
|
574
|
+
for name, entries in func_entries.items():
|
|
575
|
+
if name not in self.function_index:
|
|
576
|
+
self.function_index[name] = []
|
|
577
|
+
self.function_index[name].extend(entries)
|
|
578
|
+
except Exception as e:
|
|
579
|
+
print(f"Warning: Failed to parse {file_path}: {e}", file=sys.stderr)
|
|
580
|
+
|
|
581
|
+
def _parse_file_internal(self, file_path: Path) -> Optional[Tuple[str, Any, float, Dict[str, List], Dict[str, List]]]:
|
|
582
|
+
"""Internal file parsing logic (called from thread)"""
|
|
583
|
+
try:
|
|
584
|
+
# Build comprehensive compile args for vcpkg project
|
|
585
|
+
args = [
|
|
586
|
+
'-std=c++17',
|
|
587
|
+
'-I.',
|
|
588
|
+
f'-I{self.project_root}',
|
|
589
|
+
f'-I{self.project_root}/src',
|
|
590
|
+
# Preprocessor defines for common libraries
|
|
591
|
+
'-DWIN32',
|
|
592
|
+
'-D_WIN32',
|
|
593
|
+
'-D_WINDOWS',
|
|
594
|
+
'-DNOMINMAX',
|
|
595
|
+
# Common warnings to suppress
|
|
596
|
+
'-Wno-pragma-once-outside-header',
|
|
597
|
+
'-Wno-unknown-pragmas',
|
|
598
|
+
'-Wno-deprecated-declarations',
|
|
599
|
+
# Parse as C++
|
|
600
|
+
'-x', 'c++',
|
|
601
|
+
]
|
|
602
|
+
|
|
603
|
+
# Add vcpkg includes if found
|
|
604
|
+
if self.vcpkg_root and self.vcpkg_triplet:
|
|
605
|
+
vcpkg_include = self.vcpkg_root / "installed" / self.vcpkg_triplet / "include"
|
|
606
|
+
if vcpkg_include.exists():
|
|
607
|
+
args.append(f'-I{vcpkg_include}')
|
|
608
|
+
|
|
609
|
+
# Add include paths for specific dependencies found in vcpkg.json
|
|
610
|
+
common_subdir_mappings = {
|
|
611
|
+
'sdl2': 'SDL2',
|
|
612
|
+
'bgfx': 'bgfx',
|
|
613
|
+
'bx': 'bx',
|
|
614
|
+
'bimg': 'bimg',
|
|
615
|
+
'imgui': 'imgui',
|
|
616
|
+
'assimp': 'assimp',
|
|
617
|
+
'joltphysics': 'Jolt',
|
|
618
|
+
'openssl': 'openssl',
|
|
619
|
+
'protobuf': 'google/protobuf',
|
|
620
|
+
'nlohmann-json': 'nlohmann',
|
|
621
|
+
'sol2': 'sol'
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
for dep in self.vcpkg_dependencies:
|
|
625
|
+
# Check if this dependency has a known subdirectory
|
|
626
|
+
if dep in common_subdir_mappings:
|
|
627
|
+
subdir = common_subdir_mappings[dep]
|
|
628
|
+
lib_path = vcpkg_include / subdir
|
|
629
|
+
if lib_path.exists():
|
|
630
|
+
args.append(f'-I{lib_path}')
|
|
631
|
+
|
|
632
|
+
# Also check for exact directory match
|
|
633
|
+
dep_path = vcpkg_include / dep
|
|
634
|
+
if dep_path.exists() and dep_path.is_dir():
|
|
635
|
+
args.append(f'-I{dep_path}')
|
|
636
|
+
|
|
637
|
+
# Add Windows SDK includes (try to find current version)
|
|
638
|
+
import glob
|
|
639
|
+
winsdk_patterns = [
|
|
640
|
+
"C:/Program Files (x86)/Windows Kits/10/Include/*/ucrt",
|
|
641
|
+
"C:/Program Files (x86)/Windows Kits/10/Include/*/um",
|
|
642
|
+
"C:/Program Files (x86)/Windows Kits/10/Include/*/shared"
|
|
643
|
+
]
|
|
644
|
+
for pattern in winsdk_patterns:
|
|
645
|
+
matches = glob.glob(pattern)
|
|
646
|
+
if matches:
|
|
647
|
+
args.append(f'-I{matches[-1]}') # Use latest version
|
|
648
|
+
|
|
649
|
+
tu = self.index.parse(str(file_path), args=args)
|
|
650
|
+
if tu:
|
|
651
|
+
timestamp = self._get_file_timestamp(file_path)
|
|
652
|
+
file_str = str(file_path)
|
|
653
|
+
|
|
654
|
+
# Build indexes during parsing (single AST traversal)
|
|
655
|
+
from collections import defaultdict
|
|
656
|
+
class_entries = defaultdict(list)
|
|
657
|
+
func_entries = defaultdict(list)
|
|
658
|
+
|
|
659
|
+
for cursor in tu.cursor.walk_preorder():
|
|
660
|
+
if cursor.kind in [CursorKind.CLASS_DECL, CursorKind.STRUCT_DECL]:
|
|
661
|
+
if cursor.spelling:
|
|
662
|
+
class_info = {
|
|
663
|
+
'name': cursor.spelling,
|
|
664
|
+
'kind': cursor.kind.name,
|
|
665
|
+
'file': file_str,
|
|
666
|
+
'line': cursor.location.line,
|
|
667
|
+
'column': cursor.location.column,
|
|
668
|
+
'is_project': self._is_project_file(file_str)
|
|
669
|
+
}
|
|
670
|
+
class_entries[cursor.spelling].append(class_info)
|
|
671
|
+
|
|
672
|
+
elif cursor.kind in [CursorKind.FUNCTION_DECL, CursorKind.CXX_METHOD]:
|
|
673
|
+
if cursor.spelling:
|
|
674
|
+
func_info = {
|
|
675
|
+
'name': cursor.spelling,
|
|
676
|
+
'kind': cursor.kind.name,
|
|
677
|
+
'file': file_str,
|
|
678
|
+
'line': cursor.location.line,
|
|
679
|
+
'column': cursor.location.column,
|
|
680
|
+
'signature': self._get_function_signature(cursor),
|
|
681
|
+
'is_project': self._is_project_file(file_str)
|
|
682
|
+
}
|
|
683
|
+
func_entries[cursor.spelling].append(func_info)
|
|
684
|
+
|
|
685
|
+
return (file_str, tu, timestamp, dict(class_entries), dict(func_entries))
|
|
686
|
+
elif tu and len(tu.diagnostics) > 0:
|
|
687
|
+
# Only warn for serious errors, not dependency issues
|
|
688
|
+
serious_errors = [d for d in tu.diagnostics if d.severity >= 3] # Error or Fatal
|
|
689
|
+
if serious_errors:
|
|
690
|
+
print(f"Warning: Parse errors in {file_path}: {len(serious_errors)} errors", file=sys.stderr)
|
|
691
|
+
|
|
692
|
+
return None
|
|
693
|
+
except Exception as e:
|
|
694
|
+
print(f"Warning: Failed to parse {file_path}: {e}", file=sys.stderr)
|
|
695
|
+
return None
|
|
696
|
+
|
|
697
|
+
def _parse_file(self, file_path: Path):
|
|
698
|
+
"""Single-threaded file parsing (for refresh operations)"""
|
|
699
|
+
result = self._parse_file_internal(file_path)
|
|
700
|
+
if result:
|
|
701
|
+
file_str, tu, timestamp, class_entries, func_entries = result
|
|
702
|
+
self.translation_units[file_str] = tu
|
|
703
|
+
self.file_timestamps[file_str] = timestamp
|
|
704
|
+
|
|
705
|
+
# Update indexes for this file
|
|
706
|
+
for name, entries in class_entries.items():
|
|
707
|
+
if name not in self.class_index:
|
|
708
|
+
self.class_index[name] = []
|
|
709
|
+
self.class_index[name].extend(entries)
|
|
710
|
+
|
|
711
|
+
for name, entries in func_entries.items():
|
|
712
|
+
if name not in self.function_index:
|
|
713
|
+
self.function_index[name] = []
|
|
714
|
+
self.function_index[name].extend(entries)
|
|
715
|
+
|
|
716
|
+
def _ensure_initialized(self):
|
|
717
|
+
"""Ensure the analyzer is initialized (lazy loading to avoid timeouts)"""
|
|
718
|
+
if not self.initialization_complete:
|
|
719
|
+
if not self.initialization_started:
|
|
720
|
+
print("Starting project analysis (this may take a moment)...", file=sys.stderr)
|
|
721
|
+
self.initialization_started = True
|
|
722
|
+
self._scan_project() # Indexes are built during parsing now
|
|
723
|
+
self.initialization_complete = True
|
|
724
|
+
print("Project analysis complete - searches will now be fast!", file=sys.stderr)
|
|
725
|
+
|
|
726
|
+
def search_classes(self, pattern: str, project_only: bool = True) -> List[Dict[str, Any]]:
|
|
727
|
+
"""Search for classes matching pattern"""
|
|
728
|
+
# Ensure initialized on first use
|
|
729
|
+
self._ensure_initialized()
|
|
730
|
+
|
|
731
|
+
# Check for file changes before searching (throttled)
|
|
732
|
+
current_time = time.time()
|
|
733
|
+
if current_time - self.last_refresh_check > self.refresh_interval:
|
|
734
|
+
self.refresh_if_needed()
|
|
735
|
+
self.last_refresh_check = current_time
|
|
736
|
+
|
|
737
|
+
results = []
|
|
738
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
739
|
+
|
|
740
|
+
# Search through the pre-built index (much faster)
|
|
741
|
+
for class_name, class_infos in self.class_index.items():
|
|
742
|
+
if regex.search(class_name):
|
|
743
|
+
for class_info in class_infos:
|
|
744
|
+
# Filter by project_only flag
|
|
745
|
+
if project_only and not class_info['is_project']:
|
|
746
|
+
continue
|
|
747
|
+
results.append(class_info.copy())
|
|
748
|
+
|
|
749
|
+
return results
|
|
750
|
+
|
|
751
|
+
def search_functions(self, pattern: str, project_only: bool = True) -> List[Dict[str, Any]]:
|
|
752
|
+
"""Search for functions matching pattern"""
|
|
753
|
+
# Ensure initialized on first use
|
|
754
|
+
self._ensure_initialized()
|
|
755
|
+
|
|
756
|
+
# Check for file changes before searching (throttled)
|
|
757
|
+
current_time = time.time()
|
|
758
|
+
if current_time - self.last_refresh_check > self.refresh_interval:
|
|
759
|
+
self.refresh_if_needed()
|
|
760
|
+
self.last_refresh_check = current_time
|
|
761
|
+
|
|
762
|
+
results = []
|
|
763
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
764
|
+
|
|
765
|
+
# Search through the pre-built index (much faster)
|
|
766
|
+
for func_name, func_infos in self.function_index.items():
|
|
767
|
+
if regex.search(func_name):
|
|
768
|
+
for func_info in func_infos:
|
|
769
|
+
# Filter by project_only flag
|
|
770
|
+
if project_only and not func_info['is_project']:
|
|
771
|
+
continue
|
|
772
|
+
results.append(func_info.copy())
|
|
773
|
+
|
|
774
|
+
return results
|
|
775
|
+
|
|
776
|
+
def get_class_info(self, class_name: str) -> Optional[Dict[str, Any]]:
|
|
777
|
+
"""Get detailed information about a specific class"""
|
|
778
|
+
for file_path, tu in self.translation_units.items():
|
|
779
|
+
for cursor in tu.cursor.walk_preorder():
|
|
780
|
+
if (cursor.kind in [CursorKind.CLASS_DECL, CursorKind.STRUCT_DECL]
|
|
781
|
+
and cursor.spelling == class_name):
|
|
782
|
+
|
|
783
|
+
return {
|
|
784
|
+
'name': cursor.spelling,
|
|
785
|
+
'kind': cursor.kind.name,
|
|
786
|
+
'file': file_path,
|
|
787
|
+
'line': cursor.location.line,
|
|
788
|
+
'methods': self._get_class_methods(cursor),
|
|
789
|
+
'members': self._get_class_members(cursor),
|
|
790
|
+
'base_classes': self._get_base_classes(cursor)
|
|
791
|
+
}
|
|
792
|
+
return None
|
|
793
|
+
|
|
794
|
+
def get_function_signature(self, function_name: str) -> List[Dict[str, Any]]:
|
|
795
|
+
"""Get signature details for functions with given name"""
|
|
796
|
+
results = []
|
|
797
|
+
|
|
798
|
+
for file_path, tu in self.translation_units.items():
|
|
799
|
+
for cursor in tu.cursor.walk_preorder():
|
|
800
|
+
if (cursor.kind in [CursorKind.FUNCTION_DECL, CursorKind.CXX_METHOD]
|
|
801
|
+
and cursor.spelling == function_name):
|
|
802
|
+
|
|
803
|
+
results.append({
|
|
804
|
+
'name': cursor.spelling,
|
|
805
|
+
'file': file_path,
|
|
806
|
+
'line': cursor.location.line,
|
|
807
|
+
'signature': self._get_function_signature(cursor),
|
|
808
|
+
'return_type': cursor.result_type.spelling,
|
|
809
|
+
'parameters': self._get_function_parameters(cursor)
|
|
810
|
+
})
|
|
811
|
+
|
|
812
|
+
return results
|
|
813
|
+
|
|
814
|
+
def find_in_file(self, file_path: str, pattern: str) -> List[Dict[str, Any]]:
|
|
815
|
+
"""Search for symbols within a specific file"""
|
|
816
|
+
results = []
|
|
817
|
+
abs_path = str(self.project_root / file_path)
|
|
818
|
+
|
|
819
|
+
if abs_path in self.translation_units:
|
|
820
|
+
tu = self.translation_units[abs_path]
|
|
821
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
822
|
+
|
|
823
|
+
for cursor in tu.cursor.walk_preorder():
|
|
824
|
+
if (cursor.location.file and
|
|
825
|
+
str(cursor.location.file) == abs_path and
|
|
826
|
+
cursor.spelling and
|
|
827
|
+
regex.search(cursor.spelling)):
|
|
828
|
+
|
|
829
|
+
results.append({
|
|
830
|
+
'name': cursor.spelling,
|
|
831
|
+
'kind': cursor.kind.name,
|
|
832
|
+
'line': cursor.location.line,
|
|
833
|
+
'column': cursor.location.column
|
|
834
|
+
})
|
|
835
|
+
|
|
836
|
+
return results
|
|
837
|
+
|
|
838
|
+
def _get_function_signature(self, cursor) -> str:
|
|
839
|
+
"""Extract function signature"""
|
|
840
|
+
try:
|
|
841
|
+
return cursor.type.spelling
|
|
842
|
+
except:
|
|
843
|
+
return f"{cursor.spelling}(...)"
|
|
844
|
+
|
|
845
|
+
def _get_function_parameters(self, cursor) -> List[Dict[str, str]]:
|
|
846
|
+
"""Get function parameters"""
|
|
847
|
+
params = []
|
|
848
|
+
for child in cursor.get_children():
|
|
849
|
+
if child.kind == CursorKind.PARM_DECL:
|
|
850
|
+
params.append({
|
|
851
|
+
'name': child.spelling,
|
|
852
|
+
'type': child.type.spelling
|
|
853
|
+
})
|
|
854
|
+
return params
|
|
855
|
+
|
|
856
|
+
def _get_class_methods(self, cursor) -> List[Dict[str, Any]]:
|
|
857
|
+
"""Get class methods"""
|
|
858
|
+
methods = []
|
|
859
|
+
for child in cursor.get_children():
|
|
860
|
+
if child.kind == CursorKind.CXX_METHOD:
|
|
861
|
+
methods.append({
|
|
862
|
+
'name': child.spelling,
|
|
863
|
+
'signature': self._get_function_signature(child),
|
|
864
|
+
'line': child.location.line,
|
|
865
|
+
'access': self._get_access_specifier(child)
|
|
866
|
+
})
|
|
867
|
+
return methods
|
|
868
|
+
|
|
869
|
+
def _get_class_members(self, cursor) -> List[Dict[str, Any]]:
|
|
870
|
+
"""Get class member variables"""
|
|
871
|
+
members = []
|
|
872
|
+
for child in cursor.get_children():
|
|
873
|
+
if child.kind == CursorKind.FIELD_DECL:
|
|
874
|
+
members.append({
|
|
875
|
+
'name': child.spelling,
|
|
876
|
+
'type': child.type.spelling,
|
|
877
|
+
'line': child.location.line,
|
|
878
|
+
'access': self._get_access_specifier(child)
|
|
879
|
+
})
|
|
880
|
+
return members
|
|
881
|
+
|
|
882
|
+
def _get_base_classes(self, cursor) -> List[str]:
|
|
883
|
+
"""Get base classes"""
|
|
884
|
+
bases = []
|
|
885
|
+
for child in cursor.get_children():
|
|
886
|
+
if child.kind == CursorKind.CXX_BASE_SPECIFIER:
|
|
887
|
+
bases.append(child.type.spelling)
|
|
888
|
+
return bases
|
|
889
|
+
|
|
890
|
+
def _get_access_specifier(self, cursor) -> str:
|
|
891
|
+
"""Get access level (public/private/protected)"""
|
|
892
|
+
access_map = {
|
|
893
|
+
clang.cindex.AccessSpecifier.PUBLIC: "public",
|
|
894
|
+
clang.cindex.AccessSpecifier.PROTECTED: "protected",
|
|
895
|
+
clang.cindex.AccessSpecifier.PRIVATE: "private"
|
|
896
|
+
}
|
|
897
|
+
return access_map.get(cursor.access_specifier, "unknown")
|
|
898
|
+
|
|
899
|
+
def test_compile_files(self, header_info: Dict[str, str], source_info: Dict[str, str],
|
|
900
|
+
test_integration: bool = True) -> Dict[str, Any]:
|
|
901
|
+
"""
|
|
902
|
+
Test if header/source file pair would compile with the project using libclang.
|
|
903
|
+
|
|
904
|
+
Args:
|
|
905
|
+
header_info: Dict with 'path' and 'content' keys
|
|
906
|
+
source_info: Dict with 'path' and 'content' keys
|
|
907
|
+
test_integration: Whether to test integration with existing project
|
|
908
|
+
|
|
909
|
+
Returns:
|
|
910
|
+
Dict with compilation results, errors, warnings, etc.
|
|
911
|
+
"""
|
|
912
|
+
results = {
|
|
913
|
+
"header_compiles": False,
|
|
914
|
+
"source_compiles": False,
|
|
915
|
+
"links_with_project": False,
|
|
916
|
+
"errors": [],
|
|
917
|
+
"warnings": [],
|
|
918
|
+
"missing_dependencies": [],
|
|
919
|
+
"clang_available": True
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
# Check if libclang is available (same as main analyzer)
|
|
923
|
+
if not hasattr(self, 'index') or not self.index:
|
|
924
|
+
results["clang_available"] = False
|
|
925
|
+
results["errors"].append("libclang not available")
|
|
926
|
+
return results
|
|
927
|
+
|
|
928
|
+
try:
|
|
929
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
930
|
+
temp_path = Path(temp_dir)
|
|
931
|
+
|
|
932
|
+
# Create header file
|
|
933
|
+
header_filename = Path(header_info["path"]).name
|
|
934
|
+
header_path = temp_path / header_filename
|
|
935
|
+
|
|
936
|
+
with open(header_path, 'w', encoding='utf-8') as f:
|
|
937
|
+
f.write(header_info["content"])
|
|
938
|
+
|
|
939
|
+
# Test header compilation using libclang
|
|
940
|
+
header_result = self._test_compile_with_libclang(header_path, test_integration)
|
|
941
|
+
results["header_compiles"] = header_result["success"]
|
|
942
|
+
if not header_result["success"]:
|
|
943
|
+
results["errors"].extend(header_result["errors"])
|
|
944
|
+
results["warnings"].extend(header_result["warnings"])
|
|
945
|
+
|
|
946
|
+
# Create source file
|
|
947
|
+
source_filename = Path(source_info["path"]).name
|
|
948
|
+
source_path = temp_path / source_filename
|
|
949
|
+
|
|
950
|
+
# Include the header in the source file
|
|
951
|
+
source_content = f'#include "{header_filename}"\n{source_info["content"]}'
|
|
952
|
+
|
|
953
|
+
with open(source_path, 'w', encoding='utf-8') as f:
|
|
954
|
+
f.write(source_content)
|
|
955
|
+
|
|
956
|
+
# Test source compilation using libclang
|
|
957
|
+
source_result = self._test_compile_with_libclang(source_path, test_integration)
|
|
958
|
+
results["source_compiles"] = source_result["success"]
|
|
959
|
+
if not source_result["success"]:
|
|
960
|
+
results["errors"].extend(source_result["errors"])
|
|
961
|
+
results["warnings"].extend(source_result["warnings"])
|
|
962
|
+
|
|
963
|
+
# Extract missing dependencies from errors
|
|
964
|
+
results["missing_dependencies"] = self._extract_missing_dependencies(results["errors"])
|
|
965
|
+
|
|
966
|
+
# Both files compiled successfully means they can link
|
|
967
|
+
if results["header_compiles"] and results["source_compiles"]:
|
|
968
|
+
results["links_with_project"] = True
|
|
969
|
+
|
|
970
|
+
except Exception as e:
|
|
971
|
+
results["errors"].append(f"Test compilation failed: {str(e)}")
|
|
972
|
+
|
|
973
|
+
return results
|
|
974
|
+
|
|
975
|
+
def _test_compile_with_libclang(self, file_path: Path, test_integration: bool) -> Dict[str, Any]:
|
|
976
|
+
"""Test compilation using libclang (same as main analyzer)"""
|
|
977
|
+
try:
|
|
978
|
+
# Use the same compilation arguments as the main analyzer
|
|
979
|
+
compile_args = []
|
|
980
|
+
|
|
981
|
+
if test_integration:
|
|
982
|
+
# Add project-specific include paths
|
|
983
|
+
if self.project_root:
|
|
984
|
+
project_includes = [
|
|
985
|
+
self.project_root,
|
|
986
|
+
self.project_root / "include",
|
|
987
|
+
self.project_root / "src"
|
|
988
|
+
]
|
|
989
|
+
|
|
990
|
+
for include_path in project_includes:
|
|
991
|
+
if include_path.exists():
|
|
992
|
+
compile_args.extend([f"-I{include_path}"])
|
|
993
|
+
|
|
994
|
+
# Add vcpkg includes if available
|
|
995
|
+
if hasattr(self, 'vcpkg_root') and self.vcpkg_root:
|
|
996
|
+
vcpkg_include = Path(self.vcpkg_root) / "installed" / "x64-windows" / "include"
|
|
997
|
+
if vcpkg_include.exists():
|
|
998
|
+
compile_args.append(f"-I{vcpkg_include}")
|
|
999
|
+
|
|
1000
|
+
# Add temp directory to include path for local headers
|
|
1001
|
+
temp_dir = file_path.parent
|
|
1002
|
+
compile_args.append(f"-I{temp_dir}")
|
|
1003
|
+
|
|
1004
|
+
# Add standard C++ settings
|
|
1005
|
+
compile_args.extend(["-std=c++17", "-x", "c++"])
|
|
1006
|
+
|
|
1007
|
+
# Try to parse the file with libclang
|
|
1008
|
+
tu = self.index.parse(str(file_path), args=compile_args)
|
|
1009
|
+
|
|
1010
|
+
errors = []
|
|
1011
|
+
warnings = []
|
|
1012
|
+
|
|
1013
|
+
# Check for diagnostics
|
|
1014
|
+
for diag in tu.diagnostics:
|
|
1015
|
+
message = f"{file_path.name}:{diag.location.line}:{diag.location.column}: {diag.spelling}"
|
|
1016
|
+
|
|
1017
|
+
if diag.severity >= clang.cindex.Diagnostic.Error:
|
|
1018
|
+
errors.append(message)
|
|
1019
|
+
elif diag.severity == clang.cindex.Diagnostic.Warning:
|
|
1020
|
+
warnings.append(message)
|
|
1021
|
+
|
|
1022
|
+
success = len(errors) == 0
|
|
1023
|
+
|
|
1024
|
+
return {
|
|
1025
|
+
"success": success,
|
|
1026
|
+
"errors": errors,
|
|
1027
|
+
"warnings": warnings
|
|
1028
|
+
}
|
|
1029
|
+
|
|
1030
|
+
except Exception as e:
|
|
1031
|
+
return {
|
|
1032
|
+
"success": False,
|
|
1033
|
+
"errors": [f"libclang compilation test failed: {str(e)}"],
|
|
1034
|
+
"warnings": []
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
def _extract_missing_dependencies(self, errors: List[str]) -> List[str]:
|
|
1038
|
+
"""Check if clang++ is available"""
|
|
1039
|
+
try:
|
|
1040
|
+
# Try to find clang++ in PATH
|
|
1041
|
+
clang_path = shutil.which("clang++")
|
|
1042
|
+
if clang_path:
|
|
1043
|
+
return True
|
|
1044
|
+
|
|
1045
|
+
# Try common Windows locations
|
|
1046
|
+
common_paths = [
|
|
1047
|
+
r"C:\Program Files\LLVM\bin\clang++.exe",
|
|
1048
|
+
r"C:\Program Files (x86)\LLVM\bin\clang++.exe",
|
|
1049
|
+
r"C:\msys64\ucrt64\bin\clang++.exe",
|
|
1050
|
+
r"C:\msys64\mingw64\bin\clang++.exe"
|
|
1051
|
+
]
|
|
1052
|
+
|
|
1053
|
+
for path in common_paths:
|
|
1054
|
+
if os.path.exists(path):
|
|
1055
|
+
return True
|
|
1056
|
+
|
|
1057
|
+
return False
|
|
1058
|
+
|
|
1059
|
+
except Exception:
|
|
1060
|
+
return False
|
|
1061
|
+
|
|
1062
|
+
def _get_clang_command(self) -> str:
|
|
1063
|
+
"""Get the clang++ command to use"""
|
|
1064
|
+
# Try PATH first
|
|
1065
|
+
clang_path = shutil.which("clang++")
|
|
1066
|
+
if clang_path:
|
|
1067
|
+
return "clang++"
|
|
1068
|
+
|
|
1069
|
+
# Try common Windows locations
|
|
1070
|
+
common_paths = [
|
|
1071
|
+
r"C:\Program Files\LLVM\bin\clang++.exe",
|
|
1072
|
+
r"C:\Program Files (x86)\LLVM\bin\clang++.exe",
|
|
1073
|
+
r"C:\msys64\ucrt64\bin\clang++.exe",
|
|
1074
|
+
r"C:\msys64\mingw64\bin\clang++.exe"
|
|
1075
|
+
]
|
|
1076
|
+
|
|
1077
|
+
for path in common_paths:
|
|
1078
|
+
if os.path.exists(path):
|
|
1079
|
+
return path
|
|
1080
|
+
|
|
1081
|
+
return "clang++" # Fallback
|
|
1082
|
+
|
|
1083
|
+
def _build_compile_args_for_testing(self, include_project_headers: bool = True) -> List[str]:
|
|
1084
|
+
"""Build compile arguments for testing"""
|
|
1085
|
+
args = [
|
|
1086
|
+
"-std=c++17",
|
|
1087
|
+
"-fsyntax-only", # Only check syntax, don't generate output
|
|
1088
|
+
"-Wall", # Enable common warnings
|
|
1089
|
+
"-Wextra", # Enable extra warnings
|
|
1090
|
+
]
|
|
1091
|
+
|
|
1092
|
+
if include_project_headers:
|
|
1093
|
+
# Add project include paths
|
|
1094
|
+
args.extend([
|
|
1095
|
+
f"-I{self.project_root}",
|
|
1096
|
+
f"-I{self.project_root}/src",
|
|
1097
|
+
])
|
|
1098
|
+
|
|
1099
|
+
# Add vcpkg includes if available
|
|
1100
|
+
if self.vcpkg_root and self.vcpkg_triplet:
|
|
1101
|
+
vcpkg_include = self.vcpkg_root / "installed" / self.vcpkg_triplet / "include"
|
|
1102
|
+
if vcpkg_include.exists():
|
|
1103
|
+
args.append(f"-I{vcpkg_include}")
|
|
1104
|
+
|
|
1105
|
+
# Add preprocessor defines
|
|
1106
|
+
args.extend([
|
|
1107
|
+
"-DWIN32",
|
|
1108
|
+
"-D_WIN32",
|
|
1109
|
+
"-D_WINDOWS",
|
|
1110
|
+
"-DNOMINMAX"
|
|
1111
|
+
])
|
|
1112
|
+
|
|
1113
|
+
return args
|
|
1114
|
+
|
|
1115
|
+
def _test_compile_header(self, header_path: Path, test_integration: bool) -> Dict[str, Any]:
|
|
1116
|
+
"""Test header file compilation"""
|
|
1117
|
+
try:
|
|
1118
|
+
clang_cmd = self._get_clang_command()
|
|
1119
|
+
compile_args = self._build_compile_args_for_testing(test_integration)
|
|
1120
|
+
|
|
1121
|
+
# For header files, we need to create a dummy source file that includes it
|
|
1122
|
+
dummy_source = header_path.parent / "dummy_test.cpp"
|
|
1123
|
+
with open(dummy_source, 'w') as f:
|
|
1124
|
+
f.write(f'#include "{header_path.name}"\nint main() {{ return 0; }}')
|
|
1125
|
+
|
|
1126
|
+
cmd = [clang_cmd] + compile_args + [str(dummy_source)]
|
|
1127
|
+
|
|
1128
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
|
|
1129
|
+
|
|
1130
|
+
# Clean up dummy file
|
|
1131
|
+
dummy_source.unlink(missing_ok=True)
|
|
1132
|
+
|
|
1133
|
+
return {
|
|
1134
|
+
"success": result.returncode == 0,
|
|
1135
|
+
"errors": self._parse_compiler_output(result.stderr, "error"),
|
|
1136
|
+
"warnings": self._parse_compiler_output(result.stderr, "warning")
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1139
|
+
except subprocess.TimeoutExpired:
|
|
1140
|
+
return {
|
|
1141
|
+
"success": False,
|
|
1142
|
+
"errors": ["Compilation timeout (>30 seconds)"],
|
|
1143
|
+
"warnings": []
|
|
1144
|
+
}
|
|
1145
|
+
except Exception as e:
|
|
1146
|
+
return {
|
|
1147
|
+
"success": False,
|
|
1148
|
+
"errors": [f"Header compilation test failed: {str(e)}"],
|
|
1149
|
+
"warnings": []
|
|
1150
|
+
}
|
|
1151
|
+
|
|
1152
|
+
def _test_compile_source(self, source_path: Path, test_integration: bool) -> Dict[str, Any]:
|
|
1153
|
+
"""Test source file compilation"""
|
|
1154
|
+
try:
|
|
1155
|
+
clang_cmd = self._get_clang_command()
|
|
1156
|
+
compile_args = self._build_compile_args_for_testing(test_integration)
|
|
1157
|
+
|
|
1158
|
+
cmd = [clang_cmd] + compile_args + [str(source_path)]
|
|
1159
|
+
|
|
1160
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
|
|
1161
|
+
|
|
1162
|
+
return {
|
|
1163
|
+
"success": result.returncode == 0,
|
|
1164
|
+
"errors": self._parse_compiler_output(result.stderr, "error"),
|
|
1165
|
+
"warnings": self._parse_compiler_output(result.stderr, "warning")
|
|
1166
|
+
}
|
|
1167
|
+
|
|
1168
|
+
except subprocess.TimeoutExpired:
|
|
1169
|
+
return {
|
|
1170
|
+
"success": False,
|
|
1171
|
+
"errors": ["Compilation timeout (>30 seconds)"],
|
|
1172
|
+
"warnings": []
|
|
1173
|
+
}
|
|
1174
|
+
except Exception as e:
|
|
1175
|
+
return {
|
|
1176
|
+
"success": False,
|
|
1177
|
+
"errors": [f"Source compilation test failed: {str(e)}"],
|
|
1178
|
+
"warnings": []
|
|
1179
|
+
}
|
|
1180
|
+
|
|
1181
|
+
def _test_linking(self, source_path: Path, test_integration: bool) -> Dict[str, Any]:
|
|
1182
|
+
"""Test linking with project (basic test)"""
|
|
1183
|
+
if not test_integration:
|
|
1184
|
+
return {"success": True, "errors": [], "warnings": []}
|
|
1185
|
+
|
|
1186
|
+
try:
|
|
1187
|
+
clang_cmd = self._get_clang_command()
|
|
1188
|
+
compile_args = self._build_compile_args_for_testing(test_integration)
|
|
1189
|
+
|
|
1190
|
+
# Remove -fsyntax-only for linking test
|
|
1191
|
+
compile_args = [arg for arg in compile_args if arg != "-fsyntax-only"]
|
|
1192
|
+
|
|
1193
|
+
# Add output file
|
|
1194
|
+
output_path = source_path.parent / "test_output.exe"
|
|
1195
|
+
compile_args.extend(["-o", str(output_path)])
|
|
1196
|
+
|
|
1197
|
+
cmd = [clang_cmd] + compile_args + [str(source_path)]
|
|
1198
|
+
|
|
1199
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
|
|
1200
|
+
|
|
1201
|
+
# Clean up output file
|
|
1202
|
+
output_path.unlink(missing_ok=True)
|
|
1203
|
+
|
|
1204
|
+
return {
|
|
1205
|
+
"success": result.returncode == 0,
|
|
1206
|
+
"errors": self._parse_compiler_output(result.stderr, "error"),
|
|
1207
|
+
"warnings": self._parse_compiler_output(result.stderr, "warning")
|
|
1208
|
+
}
|
|
1209
|
+
|
|
1210
|
+
except subprocess.TimeoutExpired:
|
|
1211
|
+
return {
|
|
1212
|
+
"success": False,
|
|
1213
|
+
"errors": ["Linking timeout (>30 seconds)"],
|
|
1214
|
+
"warnings": []
|
|
1215
|
+
}
|
|
1216
|
+
except Exception as e:
|
|
1217
|
+
return {
|
|
1218
|
+
"success": False,
|
|
1219
|
+
"errors": [f"Linking test failed: {str(e)}"],
|
|
1220
|
+
"warnings": []
|
|
1221
|
+
}
|
|
1222
|
+
|
|
1223
|
+
def _parse_compiler_output(self, output: str, message_type: str) -> List[str]:
|
|
1224
|
+
"""Parse compiler output for errors or warnings"""
|
|
1225
|
+
messages = []
|
|
1226
|
+
if not output:
|
|
1227
|
+
return messages
|
|
1228
|
+
|
|
1229
|
+
lines = output.split('\n')
|
|
1230
|
+
for line in lines:
|
|
1231
|
+
line = line.strip()
|
|
1232
|
+
if message_type.lower() in line.lower() and line:
|
|
1233
|
+
# Clean up the message
|
|
1234
|
+
messages.append(line)
|
|
1235
|
+
|
|
1236
|
+
return messages
|
|
1237
|
+
|
|
1238
|
+
def _extract_missing_dependencies(self, errors: List[str]) -> List[str]:
|
|
1239
|
+
"""Extract missing dependencies from error messages"""
|
|
1240
|
+
missing_deps = []
|
|
1241
|
+
|
|
1242
|
+
for error in errors:
|
|
1243
|
+
# Look for include file not found errors
|
|
1244
|
+
if "fatal error:" in error and "file not found" in error:
|
|
1245
|
+
# Extract the header name
|
|
1246
|
+
import re
|
|
1247
|
+
match = re.search(r"'([^']+)'\s+file not found", error)
|
|
1248
|
+
if match:
|
|
1249
|
+
missing_deps.append(match.group(1))
|
|
1250
|
+
|
|
1251
|
+
# Look for undefined symbol errors
|
|
1252
|
+
elif "undefined reference" in error or "unresolved external symbol" in error:
|
|
1253
|
+
# Could extract symbol names here in the future
|
|
1254
|
+
pass
|
|
1255
|
+
|
|
1256
|
+
return list(set(missing_deps)) # Remove duplicates
|
|
1257
|
+
|
|
1258
|
+
# Import the enhanced Python analyzer
|
|
1259
|
+
try:
|
|
1260
|
+
# Try package import first (when run as module)
|
|
1261
|
+
from mcp_server.cpp_analyzer import CppAnalyzer as EnhancedCppAnalyzer
|
|
1262
|
+
except ImportError:
|
|
1263
|
+
# Fall back to direct import (when run as script)
|
|
1264
|
+
from cpp_analyzer import CppAnalyzer as EnhancedCppAnalyzer
|
|
1265
|
+
|
|
1266
|
+
# Initialize analyzer
|
|
1267
|
+
PROJECT_ROOT = os.environ.get('CPP_PROJECT_ROOT', None)
|
|
1268
|
+
|
|
1269
|
+
# Initialize analyzer as None - will be set when project directory is specified
|
|
1270
|
+
analyzer = None
|
|
1271
|
+
|
|
1272
|
+
# Track if analyzer has been initialized with a valid project
|
|
1273
|
+
analyzer_initialized = False
|
|
1274
|
+
|
|
1275
|
+
# MCP Server
|
|
1276
|
+
server = Server("cpp-analyzer")
|
|
1277
|
+
|
|
1278
|
+
@server.list_tools()
|
|
1279
|
+
async def list_tools() -> List[Tool]:
|
|
1280
|
+
return [
|
|
1281
|
+
Tool(
|
|
1282
|
+
name="search_classes",
|
|
1283
|
+
description="Search for C++ classes by name pattern (regex supported)",
|
|
1284
|
+
inputSchema={
|
|
1285
|
+
"type": "object",
|
|
1286
|
+
"properties": {
|
|
1287
|
+
"pattern": {
|
|
1288
|
+
"type": "string",
|
|
1289
|
+
"description": "Class name pattern to search for (supports regex)"
|
|
1290
|
+
},
|
|
1291
|
+
"project_only": {
|
|
1292
|
+
"type": "boolean",
|
|
1293
|
+
"description": "Only search project files (exclude dependencies like vcpkg, system headers). Default: true",
|
|
1294
|
+
"default": True
|
|
1295
|
+
}
|
|
1296
|
+
},
|
|
1297
|
+
"required": ["pattern"]
|
|
1298
|
+
}
|
|
1299
|
+
),
|
|
1300
|
+
Tool(
|
|
1301
|
+
name="search_functions",
|
|
1302
|
+
description="Search for C++ functions by name pattern (regex supported)",
|
|
1303
|
+
inputSchema={
|
|
1304
|
+
"type": "object",
|
|
1305
|
+
"properties": {
|
|
1306
|
+
"pattern": {
|
|
1307
|
+
"type": "string",
|
|
1308
|
+
"description": "Function name pattern to search for (supports regex)"
|
|
1309
|
+
},
|
|
1310
|
+
"project_only": {
|
|
1311
|
+
"type": "boolean",
|
|
1312
|
+
"description": "Only search project files (exclude dependencies like vcpkg, system headers). Default: true",
|
|
1313
|
+
"default": True
|
|
1314
|
+
},
|
|
1315
|
+
"class_name": {
|
|
1316
|
+
"type": "string",
|
|
1317
|
+
"description": "Optional: search only for methods within this class"
|
|
1318
|
+
}
|
|
1319
|
+
},
|
|
1320
|
+
"required": ["pattern"]
|
|
1321
|
+
}
|
|
1322
|
+
),
|
|
1323
|
+
Tool(
|
|
1324
|
+
name="get_class_info",
|
|
1325
|
+
description="Get detailed information about a specific class",
|
|
1326
|
+
inputSchema={
|
|
1327
|
+
"type": "object",
|
|
1328
|
+
"properties": {
|
|
1329
|
+
"class_name": {
|
|
1330
|
+
"type": "string",
|
|
1331
|
+
"description": "Exact class name to analyze"
|
|
1332
|
+
}
|
|
1333
|
+
},
|
|
1334
|
+
"required": ["class_name"]
|
|
1335
|
+
}
|
|
1336
|
+
),
|
|
1337
|
+
Tool(
|
|
1338
|
+
name="get_function_signature",
|
|
1339
|
+
description="Get signature and details for functions with given name",
|
|
1340
|
+
inputSchema={
|
|
1341
|
+
"type": "object",
|
|
1342
|
+
"properties": {
|
|
1343
|
+
"function_name": {
|
|
1344
|
+
"type": "string",
|
|
1345
|
+
"description": "Exact function name to analyze"
|
|
1346
|
+
},
|
|
1347
|
+
"class_name": {
|
|
1348
|
+
"type": "string",
|
|
1349
|
+
"description": "Optional: specify class name to get method signatures only from that class"
|
|
1350
|
+
}
|
|
1351
|
+
},
|
|
1352
|
+
"required": ["function_name"]
|
|
1353
|
+
}
|
|
1354
|
+
),
|
|
1355
|
+
Tool(
|
|
1356
|
+
name="search_symbols",
|
|
1357
|
+
description="Search for all symbols (classes and functions) matching a pattern",
|
|
1358
|
+
inputSchema={
|
|
1359
|
+
"type": "object",
|
|
1360
|
+
"properties": {
|
|
1361
|
+
"pattern": {
|
|
1362
|
+
"type": "string",
|
|
1363
|
+
"description": "Pattern to search for (supports regex)"
|
|
1364
|
+
},
|
|
1365
|
+
"project_only": {
|
|
1366
|
+
"type": "boolean",
|
|
1367
|
+
"description": "Only search project files (exclude dependencies). Default: true",
|
|
1368
|
+
"default": True
|
|
1369
|
+
},
|
|
1370
|
+
"symbol_types": {
|
|
1371
|
+
"type": "array",
|
|
1372
|
+
"items": {
|
|
1373
|
+
"type": "string",
|
|
1374
|
+
"enum": ["class", "struct", "function", "method"]
|
|
1375
|
+
},
|
|
1376
|
+
"description": "Types of symbols to include. If not specified, includes all types"
|
|
1377
|
+
}
|
|
1378
|
+
},
|
|
1379
|
+
"required": ["pattern"]
|
|
1380
|
+
}
|
|
1381
|
+
),
|
|
1382
|
+
Tool(
|
|
1383
|
+
name="find_in_file",
|
|
1384
|
+
description="Search for symbols within a specific file",
|
|
1385
|
+
inputSchema={
|
|
1386
|
+
"type": "object",
|
|
1387
|
+
"properties": {
|
|
1388
|
+
"file_path": {
|
|
1389
|
+
"type": "string",
|
|
1390
|
+
"description": "Relative path to file from project root"
|
|
1391
|
+
},
|
|
1392
|
+
"pattern": {
|
|
1393
|
+
"type": "string",
|
|
1394
|
+
"description": "Symbol pattern to search for in the file"
|
|
1395
|
+
}
|
|
1396
|
+
},
|
|
1397
|
+
"required": ["file_path", "pattern"]
|
|
1398
|
+
}
|
|
1399
|
+
),
|
|
1400
|
+
Tool(
|
|
1401
|
+
name="set_project_directory",
|
|
1402
|
+
description="Set the project directory to analyze (use this first before other commands)",
|
|
1403
|
+
inputSchema={
|
|
1404
|
+
"type": "object",
|
|
1405
|
+
"properties": {
|
|
1406
|
+
"project_path": {
|
|
1407
|
+
"type": "string",
|
|
1408
|
+
"description": "Absolute path to the C++ project directory"
|
|
1409
|
+
}
|
|
1410
|
+
},
|
|
1411
|
+
"required": ["project_path"]
|
|
1412
|
+
}
|
|
1413
|
+
),
|
|
1414
|
+
Tool(
|
|
1415
|
+
name="refresh_project",
|
|
1416
|
+
description="Manually refresh/re-parse project files to detect changes",
|
|
1417
|
+
inputSchema={
|
|
1418
|
+
"type": "object",
|
|
1419
|
+
"properties": {},
|
|
1420
|
+
"required": []
|
|
1421
|
+
}
|
|
1422
|
+
),
|
|
1423
|
+
Tool(
|
|
1424
|
+
name="get_server_status",
|
|
1425
|
+
description="Get MCP server status including parsing progress and index stats",
|
|
1426
|
+
inputSchema={
|
|
1427
|
+
"type": "object",
|
|
1428
|
+
"properties": {},
|
|
1429
|
+
"required": []
|
|
1430
|
+
}
|
|
1431
|
+
),
|
|
1432
|
+
Tool(
|
|
1433
|
+
name="get_class_hierarchy",
|
|
1434
|
+
description="Get complete inheritance hierarchy for a C++ class",
|
|
1435
|
+
inputSchema={
|
|
1436
|
+
"type": "object",
|
|
1437
|
+
"properties": {
|
|
1438
|
+
"class_name": {
|
|
1439
|
+
"type": "string",
|
|
1440
|
+
"description": "Name of the class to analyze"
|
|
1441
|
+
}
|
|
1442
|
+
},
|
|
1443
|
+
"required": ["class_name"]
|
|
1444
|
+
}
|
|
1445
|
+
),
|
|
1446
|
+
Tool(
|
|
1447
|
+
name="get_derived_classes",
|
|
1448
|
+
description="Get all classes that inherit from a given base class",
|
|
1449
|
+
inputSchema={
|
|
1450
|
+
"type": "object",
|
|
1451
|
+
"properties": {
|
|
1452
|
+
"class_name": {
|
|
1453
|
+
"type": "string",
|
|
1454
|
+
"description": "Name of the base class"
|
|
1455
|
+
},
|
|
1456
|
+
"project_only": {
|
|
1457
|
+
"type": "boolean",
|
|
1458
|
+
"description": "Only include project classes (exclude dependencies). Default: true",
|
|
1459
|
+
"default": True
|
|
1460
|
+
}
|
|
1461
|
+
},
|
|
1462
|
+
"required": ["class_name"]
|
|
1463
|
+
}
|
|
1464
|
+
),
|
|
1465
|
+
Tool(
|
|
1466
|
+
name="find_callers",
|
|
1467
|
+
description="Find all functions that call a specific function",
|
|
1468
|
+
inputSchema={
|
|
1469
|
+
"type": "object",
|
|
1470
|
+
"properties": {
|
|
1471
|
+
"function_name": {
|
|
1472
|
+
"type": "string",
|
|
1473
|
+
"description": "Name of the function to find callers for"
|
|
1474
|
+
},
|
|
1475
|
+
"class_name": {
|
|
1476
|
+
"type": "string",
|
|
1477
|
+
"description": "Optional: Class name if searching for a method",
|
|
1478
|
+
"default": ""
|
|
1479
|
+
}
|
|
1480
|
+
},
|
|
1481
|
+
"required": ["function_name"]
|
|
1482
|
+
}
|
|
1483
|
+
),
|
|
1484
|
+
Tool(
|
|
1485
|
+
name="find_callees",
|
|
1486
|
+
description="Find all functions called by a specific function",
|
|
1487
|
+
inputSchema={
|
|
1488
|
+
"type": "object",
|
|
1489
|
+
"properties": {
|
|
1490
|
+
"function_name": {
|
|
1491
|
+
"type": "string",
|
|
1492
|
+
"description": "Name of the function to find callees for"
|
|
1493
|
+
},
|
|
1494
|
+
"class_name": {
|
|
1495
|
+
"type": "string",
|
|
1496
|
+
"description": "Optional: Class name if searching for a method",
|
|
1497
|
+
"default": ""
|
|
1498
|
+
}
|
|
1499
|
+
},
|
|
1500
|
+
"required": ["function_name"]
|
|
1501
|
+
}
|
|
1502
|
+
),
|
|
1503
|
+
Tool(
|
|
1504
|
+
name="get_call_path",
|
|
1505
|
+
description="Find call paths from one function to another",
|
|
1506
|
+
inputSchema={
|
|
1507
|
+
"type": "object",
|
|
1508
|
+
"properties": {
|
|
1509
|
+
"from_function": {
|
|
1510
|
+
"type": "string",
|
|
1511
|
+
"description": "Starting function name"
|
|
1512
|
+
},
|
|
1513
|
+
"to_function": {
|
|
1514
|
+
"type": "string",
|
|
1515
|
+
"description": "Target function name"
|
|
1516
|
+
},
|
|
1517
|
+
"max_depth": {
|
|
1518
|
+
"type": "integer",
|
|
1519
|
+
"description": "Maximum search depth (default: 10)",
|
|
1520
|
+
"default": 10
|
|
1521
|
+
}
|
|
1522
|
+
},
|
|
1523
|
+
"required": ["from_function", "to_function"]
|
|
1524
|
+
}
|
|
1525
|
+
)
|
|
1526
|
+
]
|
|
1527
|
+
|
|
1528
|
+
@server.call_tool()
|
|
1529
|
+
async def call_tool(name: str, arguments: Dict[str, Any]) -> List[TextContent]:
|
|
1530
|
+
try:
|
|
1531
|
+
if name == "set_project_directory":
|
|
1532
|
+
project_path = arguments["project_path"]
|
|
1533
|
+
|
|
1534
|
+
if not isinstance(project_path, str) or not project_path.strip():
|
|
1535
|
+
return [TextContent(type="text", text="Error: 'project_path' must be a non-empty string")]
|
|
1536
|
+
|
|
1537
|
+
if project_path != project_path.strip():
|
|
1538
|
+
return [TextContent(type="text", text="Error: 'project_path' may not include leading or trailing whitespace")]
|
|
1539
|
+
|
|
1540
|
+
project_path = project_path.strip()
|
|
1541
|
+
|
|
1542
|
+
if not os.path.isabs(project_path):
|
|
1543
|
+
return [TextContent(type="text", text=f"Error: '{project_path}' is not an absolute path")]
|
|
1544
|
+
|
|
1545
|
+
if not os.path.isdir(project_path):
|
|
1546
|
+
return [TextContent(type="text", text=f"Error: Directory '{project_path}' does not exist")]
|
|
1547
|
+
|
|
1548
|
+
# Re-initialize analyzer with new path
|
|
1549
|
+
global analyzer, analyzer_initialized
|
|
1550
|
+
analyzer = EnhancedCppAnalyzer(project_path)
|
|
1551
|
+
analyzer_initialized = True
|
|
1552
|
+
|
|
1553
|
+
# Start indexing in the background
|
|
1554
|
+
indexed_count = analyzer.index_project(force=False, include_dependencies=True)
|
|
1555
|
+
|
|
1556
|
+
return [TextContent(type="text", text=f"Set project directory to: {project_path}\nIndexed {indexed_count} C++ files")]
|
|
1557
|
+
|
|
1558
|
+
# Check if analyzer is initialized for all other commands
|
|
1559
|
+
if not analyzer_initialized or analyzer is None:
|
|
1560
|
+
return [TextContent(type="text", text="Error: Project directory not set. Please use 'set_project_directory' first with the path to your C++ project.")]
|
|
1561
|
+
|
|
1562
|
+
if name == "search_classes":
|
|
1563
|
+
project_only = arguments.get("project_only", True)
|
|
1564
|
+
results = analyzer.search_classes(arguments["pattern"], project_only)
|
|
1565
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1566
|
+
|
|
1567
|
+
elif name == "search_functions":
|
|
1568
|
+
project_only = arguments.get("project_only", True)
|
|
1569
|
+
class_name = arguments.get("class_name", None)
|
|
1570
|
+
results = analyzer.search_functions(arguments["pattern"], project_only, class_name)
|
|
1571
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1572
|
+
|
|
1573
|
+
elif name == "get_class_info":
|
|
1574
|
+
result = analyzer.get_class_info(arguments["class_name"])
|
|
1575
|
+
if result:
|
|
1576
|
+
return [TextContent(type="text", text=json.dumps(result, indent=2))]
|
|
1577
|
+
else:
|
|
1578
|
+
return [TextContent(type="text", text=f"Class '{arguments['class_name']}' not found")]
|
|
1579
|
+
|
|
1580
|
+
elif name == "get_function_signature":
|
|
1581
|
+
function_name = arguments["function_name"]
|
|
1582
|
+
class_name = arguments.get("class_name", None)
|
|
1583
|
+
results = analyzer.get_function_signature(function_name, class_name)
|
|
1584
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1585
|
+
|
|
1586
|
+
elif name == "search_symbols":
|
|
1587
|
+
pattern = arguments["pattern"]
|
|
1588
|
+
project_only = arguments.get("project_only", True)
|
|
1589
|
+
symbol_types = arguments.get("symbol_types", None)
|
|
1590
|
+
results = analyzer.search_symbols(pattern, project_only, symbol_types)
|
|
1591
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1592
|
+
|
|
1593
|
+
elif name == "find_in_file":
|
|
1594
|
+
results = analyzer.find_in_file(arguments["file_path"], arguments["pattern"])
|
|
1595
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1596
|
+
|
|
1597
|
+
elif name == "refresh_project":
|
|
1598
|
+
modified_count = analyzer.refresh_if_needed()
|
|
1599
|
+
return [TextContent(type="text", text=f"Refreshed project. Re-parsed {modified_count} modified/new files.")]
|
|
1600
|
+
|
|
1601
|
+
elif name == "get_server_status":
|
|
1602
|
+
# Determine analyzer type
|
|
1603
|
+
analyzer_type = "python_enhanced"
|
|
1604
|
+
|
|
1605
|
+
status = {
|
|
1606
|
+
"analyzer_type": analyzer_type,
|
|
1607
|
+
"call_graph_enabled": True,
|
|
1608
|
+
"usr_tracking_enabled": True
|
|
1609
|
+
}
|
|
1610
|
+
|
|
1611
|
+
# Add analyzer stats from enhanced Python analyzer
|
|
1612
|
+
status.update({
|
|
1613
|
+
"parsed_files": len(analyzer.file_index),
|
|
1614
|
+
"indexed_classes": len(analyzer.class_index),
|
|
1615
|
+
"indexed_functions": len(analyzer.function_index),
|
|
1616
|
+
"indexed_symbols": len(analyzer.usr_index),
|
|
1617
|
+
"call_graph_size": len(analyzer.call_graph_analyzer.call_graph),
|
|
1618
|
+
"project_files": sum(1 for symbols in analyzer.file_index.values()
|
|
1619
|
+
for s in symbols if s.is_project)
|
|
1620
|
+
})
|
|
1621
|
+
return [TextContent(type="text", text=json.dumps(status, indent=2))]
|
|
1622
|
+
|
|
1623
|
+
elif name == "get_class_hierarchy":
|
|
1624
|
+
class_name = arguments["class_name"]
|
|
1625
|
+
hierarchy = analyzer.get_class_hierarchy(class_name)
|
|
1626
|
+
if hierarchy:
|
|
1627
|
+
return [TextContent(type="text", text=json.dumps(hierarchy, indent=2))]
|
|
1628
|
+
else:
|
|
1629
|
+
return [TextContent(type="text", text=f"Class '{class_name}' not found")]
|
|
1630
|
+
|
|
1631
|
+
elif name == "get_derived_classes":
|
|
1632
|
+
class_name = arguments["class_name"]
|
|
1633
|
+
project_only = arguments.get("project_only", True)
|
|
1634
|
+
derived = analyzer.get_derived_classes(class_name, project_only)
|
|
1635
|
+
return [TextContent(type="text", text=json.dumps(derived, indent=2))]
|
|
1636
|
+
|
|
1637
|
+
elif name == "find_callers":
|
|
1638
|
+
function_name = arguments["function_name"]
|
|
1639
|
+
class_name = arguments.get("class_name", "")
|
|
1640
|
+
results = analyzer.find_callers(function_name, class_name)
|
|
1641
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1642
|
+
|
|
1643
|
+
elif name == "find_callees":
|
|
1644
|
+
function_name = arguments["function_name"]
|
|
1645
|
+
class_name = arguments.get("class_name", "")
|
|
1646
|
+
results = analyzer.find_callees(function_name, class_name)
|
|
1647
|
+
return [TextContent(type="text", text=json.dumps(results, indent=2))]
|
|
1648
|
+
|
|
1649
|
+
elif name == "get_call_path":
|
|
1650
|
+
from_function = arguments["from_function"]
|
|
1651
|
+
to_function = arguments["to_function"]
|
|
1652
|
+
max_depth = arguments.get("max_depth", 10)
|
|
1653
|
+
paths = analyzer.get_call_path(from_function, to_function, max_depth)
|
|
1654
|
+
return [TextContent(type="text", text=json.dumps(paths, indent=2))]
|
|
1655
|
+
|
|
1656
|
+
else:
|
|
1657
|
+
return [TextContent(type="text", text=f"Unknown tool: {name}")]
|
|
1658
|
+
|
|
1659
|
+
except Exception as e:
|
|
1660
|
+
return [TextContent(type="text", text=f"Error: {str(e)}")]
|
|
1661
|
+
|
|
1662
|
+
async def main():
|
|
1663
|
+
# Import here to avoid issues if mcp package not installed
|
|
1664
|
+
from mcp.server.stdio import stdio_server
|
|
1665
|
+
|
|
1666
|
+
async with stdio_server() as (read_stream, write_stream):
|
|
1667
|
+
await server.run(read_stream, write_stream, server.create_initialization_options())
|
|
1668
|
+
|
|
1669
|
+
|
|
1670
|
+
def run_main():
|
|
1671
|
+
"""Entry point for console script - wraps async main()"""
|
|
1672
|
+
asyncio.run(main())
|
|
1673
|
+
|
|
1674
|
+
if __name__ == "__main__":
|
|
1675
|
+
asyncio.run(main())
|