IncludeCPP 3.7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of IncludeCPP might be problematic. Click here for more details.
- includecpp/__init__.py +59 -0
- includecpp/__init__.pyi +255 -0
- includecpp/__main__.py +4 -0
- includecpp/cli/__init__.py +4 -0
- includecpp/cli/commands.py +8270 -0
- includecpp/cli/config_parser.py +127 -0
- includecpp/core/__init__.py +19 -0
- includecpp/core/ai_integration.py +2132 -0
- includecpp/core/build_manager.py +2416 -0
- includecpp/core/cpp_api.py +376 -0
- includecpp/core/cpp_api.pyi +95 -0
- includecpp/core/cppy_converter.py +3448 -0
- includecpp/core/cssl/CSSL_DOCUMENTATION.md +2075 -0
- includecpp/core/cssl/__init__.py +42 -0
- includecpp/core/cssl/cssl_builtins.py +2271 -0
- includecpp/core/cssl/cssl_builtins.pyi +1393 -0
- includecpp/core/cssl/cssl_events.py +621 -0
- includecpp/core/cssl/cssl_modules.py +2803 -0
- includecpp/core/cssl/cssl_parser.py +2575 -0
- includecpp/core/cssl/cssl_runtime.py +3051 -0
- includecpp/core/cssl/cssl_syntax.py +488 -0
- includecpp/core/cssl/cssl_types.py +1512 -0
- includecpp/core/cssl_bridge.py +882 -0
- includecpp/core/cssl_bridge.pyi +488 -0
- includecpp/core/error_catalog.py +802 -0
- includecpp/core/error_formatter.py +1016 -0
- includecpp/core/exceptions.py +97 -0
- includecpp/core/path_discovery.py +77 -0
- includecpp/core/project_ui.py +3370 -0
- includecpp/core/settings_ui.py +326 -0
- includecpp/generator/__init__.py +1 -0
- includecpp/generator/parser.cpp +1903 -0
- includecpp/generator/parser.h +281 -0
- includecpp/generator/type_resolver.cpp +363 -0
- includecpp/generator/type_resolver.h +68 -0
- includecpp/py.typed +0 -0
- includecpp/templates/cpp.proj.template +18 -0
- includecpp/vscode/__init__.py +1 -0
- includecpp/vscode/cssl/__init__.py +1 -0
- includecpp/vscode/cssl/language-configuration.json +38 -0
- includecpp/vscode/cssl/package.json +50 -0
- includecpp/vscode/cssl/snippets/cssl.snippets.json +1080 -0
- includecpp/vscode/cssl/syntaxes/cssl.tmLanguage.json +341 -0
- includecpp-3.7.3.dist-info/METADATA +1076 -0
- includecpp-3.7.3.dist-info/RECORD +49 -0
- includecpp-3.7.3.dist-info/WHEEL +5 -0
- includecpp-3.7.3.dist-info/entry_points.txt +2 -0
- includecpp-3.7.3.dist-info/licenses/LICENSE +21 -0
- includecpp-3.7.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,2416 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
import hashlib
|
|
5
|
+
import shutil
|
|
6
|
+
import json
|
|
7
|
+
import platform
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import List, Optional, Dict, Any
|
|
10
|
+
|
|
11
|
+
from .exceptions import CppBuildError, CppValidationError
|
|
12
|
+
|
|
13
|
+
class BuildManager:
|
|
14
|
+
# Class-level cache for MSYS2 environment
|
|
15
|
+
_msys2_env_cache = None
|
|
16
|
+
|
|
17
|
+
def __init__(self, project_root: Path, build_dir: Path, config):
|
|
18
|
+
"""Initialize BuildManager for PyPI-installed package.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
project_root: User's project directory (with cpp.proj)
|
|
22
|
+
build_dir: AppData build directory
|
|
23
|
+
config: CppProjectConfig instance
|
|
24
|
+
"""
|
|
25
|
+
self.project_root = project_root
|
|
26
|
+
self.build_dir = build_dir
|
|
27
|
+
self.config = config
|
|
28
|
+
|
|
29
|
+
self.plugins_dir = config.plugins_dir
|
|
30
|
+
self.include_dir = config.include_dir
|
|
31
|
+
|
|
32
|
+
self.bin_dir = build_dir / "bin" / ".appc"
|
|
33
|
+
self.bindings_dir = build_dir / "bindings"
|
|
34
|
+
self.cmake_build_dir = build_dir / "build"
|
|
35
|
+
|
|
36
|
+
self.gen_exe = self.bin_dir / self._get_exe_name("plugin_gen")
|
|
37
|
+
self.registry_file = build_dir / ".module_registry.json"
|
|
38
|
+
|
|
39
|
+
self.bin_dir.mkdir(parents=True, exist_ok=True)
|
|
40
|
+
self.bindings_dir.mkdir(parents=True, exist_ok=True)
|
|
41
|
+
self.cmake_build_dir.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
|
|
43
|
+
def _get_exe_name(self, base_name: str) -> str:
|
|
44
|
+
"""Get platform-specific executable name."""
|
|
45
|
+
if platform.system() == "Windows":
|
|
46
|
+
return f"{base_name}.exe"
|
|
47
|
+
return base_name
|
|
48
|
+
|
|
49
|
+
def _compute_hash(self, filepath: Path) -> str:
|
|
50
|
+
"""Compute SHA256 hash of file (full 64-char digest for v2.3.5+)."""
|
|
51
|
+
if not filepath.exists():
|
|
52
|
+
return "0"
|
|
53
|
+
try:
|
|
54
|
+
with open(filepath, 'rb') as f:
|
|
55
|
+
return hashlib.sha256(f.read()).hexdigest()
|
|
56
|
+
except Exception:
|
|
57
|
+
return "0"
|
|
58
|
+
|
|
59
|
+
def _compute_generator_hash(self, parser_cpp: Path, parser_h: Path) -> str:
|
|
60
|
+
"""Compute combined hash of generator source files."""
|
|
61
|
+
hasher = hashlib.sha256()
|
|
62
|
+
for filepath in [parser_cpp, parser_h]:
|
|
63
|
+
if filepath.exists():
|
|
64
|
+
try:
|
|
65
|
+
with open(filepath, 'rb') as f:
|
|
66
|
+
hasher.update(f.read())
|
|
67
|
+
except Exception:
|
|
68
|
+
pass
|
|
69
|
+
return hasher.hexdigest()
|
|
70
|
+
|
|
71
|
+
def _get_generator_source(self) -> Path:
|
|
72
|
+
"""Get path to parser.cpp in installed package."""
|
|
73
|
+
package_dir = Path(__file__).parent.parent
|
|
74
|
+
parser_cpp = package_dir / "generator" / "parser.cpp"
|
|
75
|
+
|
|
76
|
+
if not parser_cpp.exists():
|
|
77
|
+
raise CppBuildError(
|
|
78
|
+
f"Generator source not found: {parser_cpp}\n"
|
|
79
|
+
"This is a package installation error."
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
return parser_cpp
|
|
83
|
+
|
|
84
|
+
def _build_generator(self, verbose: bool = False):
|
|
85
|
+
"""Compile plugin_gen.exe from parser.cpp with hash checking."""
|
|
86
|
+
parser_cpp = self._get_generator_source()
|
|
87
|
+
parser_h = parser_cpp.with_suffix('.h')
|
|
88
|
+
package_generator_dir = parser_cpp.parent
|
|
89
|
+
|
|
90
|
+
# Check if rebuild needed via hash comparison
|
|
91
|
+
gen_hash_file = self.bin_dir / ".generator_hash"
|
|
92
|
+
if self.gen_exe.exists() and gen_hash_file.exists():
|
|
93
|
+
try:
|
|
94
|
+
stored_hash = gen_hash_file.read_text().strip()
|
|
95
|
+
current_hash = self._compute_generator_hash(parser_cpp, parser_h)
|
|
96
|
+
if stored_hash == current_hash:
|
|
97
|
+
if verbose:
|
|
98
|
+
print(f"Generator up-to-date: {self.gen_exe}")
|
|
99
|
+
return
|
|
100
|
+
elif verbose:
|
|
101
|
+
print("Generator source changed, rebuilding...")
|
|
102
|
+
except Exception:
|
|
103
|
+
pass # Hash check failed, rebuild anyway
|
|
104
|
+
|
|
105
|
+
if self.gen_exe.exists():
|
|
106
|
+
if verbose:
|
|
107
|
+
print(f"Generator exists but needs rebuild")
|
|
108
|
+
else:
|
|
109
|
+
if verbose:
|
|
110
|
+
print(f"Generator not found, building...")
|
|
111
|
+
|
|
112
|
+
if verbose:
|
|
113
|
+
print(f"Compiling generator from: {parser_cpp}")
|
|
114
|
+
|
|
115
|
+
compiler = self._detect_cpp_compiler(verbose=verbose)
|
|
116
|
+
|
|
117
|
+
if compiler == "g++":
|
|
118
|
+
cmd = [
|
|
119
|
+
"g++",
|
|
120
|
+
"-std=c++17",
|
|
121
|
+
"-O2",
|
|
122
|
+
f"-I{package_generator_dir}",
|
|
123
|
+
str(parser_cpp),
|
|
124
|
+
"-o",
|
|
125
|
+
str(self.gen_exe)
|
|
126
|
+
]
|
|
127
|
+
elif compiler == "clang++":
|
|
128
|
+
cmd = [
|
|
129
|
+
"clang++",
|
|
130
|
+
"-std=c++17",
|
|
131
|
+
"-O2",
|
|
132
|
+
f"-I{package_generator_dir}",
|
|
133
|
+
str(parser_cpp),
|
|
134
|
+
"-o",
|
|
135
|
+
str(self.gen_exe)
|
|
136
|
+
]
|
|
137
|
+
elif compiler == "cl":
|
|
138
|
+
cmd = [
|
|
139
|
+
"cl",
|
|
140
|
+
"/std:c++17",
|
|
141
|
+
"/O2",
|
|
142
|
+
f"/I{package_generator_dir}",
|
|
143
|
+
str(parser_cpp),
|
|
144
|
+
f"/Fe:{self.gen_exe}"
|
|
145
|
+
]
|
|
146
|
+
else:
|
|
147
|
+
raise CppBuildError("No C++ compiler found (g++, clang++, or cl)")
|
|
148
|
+
|
|
149
|
+
self._run_compiler_command(cmd, verbose=verbose)
|
|
150
|
+
|
|
151
|
+
if not self.gen_exe.exists():
|
|
152
|
+
raise CppBuildError(f"Generator executable not created: {self.gen_exe}")
|
|
153
|
+
|
|
154
|
+
# Save hash for future comparisons
|
|
155
|
+
try:
|
|
156
|
+
current_hash = self._compute_generator_hash(parser_cpp, parser_h)
|
|
157
|
+
gen_hash_file = self.bin_dir / ".generator_hash"
|
|
158
|
+
gen_hash_file.write_text(current_hash)
|
|
159
|
+
except Exception:
|
|
160
|
+
pass # Hash save failed, not critical
|
|
161
|
+
|
|
162
|
+
if verbose:
|
|
163
|
+
print(f"Generator compiled: {self.gen_exe}")
|
|
164
|
+
|
|
165
|
+
def _get_msys2_env(self) -> dict:
|
|
166
|
+
"""Get MSYS2 MINGW64 environment variables for g++ on Windows.
|
|
167
|
+
|
|
168
|
+
Uses class-level caching to avoid repeated os.environ.copy() calls.
|
|
169
|
+
"""
|
|
170
|
+
if BuildManager._msys2_env_cache is not None:
|
|
171
|
+
return BuildManager._msys2_env_cache
|
|
172
|
+
|
|
173
|
+
env = os.environ.copy()
|
|
174
|
+
if platform.system() == "Windows":
|
|
175
|
+
# Set MSYS2 MINGW64 environment
|
|
176
|
+
env["MINGW_PREFIX"] = "/mingw64"
|
|
177
|
+
env["MSYSTEM"] = "MINGW64"
|
|
178
|
+
env["PKG_CONFIG_PATH"] = "/mingw64/lib/pkgconfig:/mingw64/share/pkgconfig"
|
|
179
|
+
|
|
180
|
+
# Prepend MSYS2 paths
|
|
181
|
+
msys_paths = [
|
|
182
|
+
"C:/msys64/mingw64/bin",
|
|
183
|
+
"C:/msys64/usr/local/bin",
|
|
184
|
+
"C:/msys64/usr/bin",
|
|
185
|
+
"C:/msys64/bin"
|
|
186
|
+
]
|
|
187
|
+
existing_path = env.get("PATH", "")
|
|
188
|
+
env["PATH"] = ";".join(msys_paths) + ";" + existing_path
|
|
189
|
+
|
|
190
|
+
BuildManager._msys2_env_cache = env
|
|
191
|
+
return env
|
|
192
|
+
|
|
193
|
+
def _run_compiler_command(self, cmd: list, verbose: bool = False, cwd: str = None):
|
|
194
|
+
"""Run compiler command with appropriate environment (MSYS2 on Windows)."""
|
|
195
|
+
env = self._get_msys2_env()
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
if verbose:
|
|
199
|
+
print(f"Running: {' '.join(cmd)}")
|
|
200
|
+
result = subprocess.run(cmd, capture_output=True, text=True, encoding='utf-8', check=True, env=env, cwd=cwd)
|
|
201
|
+
if verbose and result.stdout:
|
|
202
|
+
print(result.stdout)
|
|
203
|
+
return result
|
|
204
|
+
except subprocess.CalledProcessError as e:
|
|
205
|
+
raise CppBuildError(
|
|
206
|
+
f"Compilation failed:\nCommand: {' '.join(cmd)}\nStderr: {e.stderr}\nStdout: {e.stdout}"
|
|
207
|
+
) from e
|
|
208
|
+
|
|
209
|
+
def _detect_cpp_compiler(self, verbose: bool = False) -> str:
|
|
210
|
+
"""Detect available C++ compiler with caching.
|
|
211
|
+
|
|
212
|
+
Cache is stored in build_dir/.compiler_cache and cleared with --clean.
|
|
213
|
+
"""
|
|
214
|
+
cache_file = self.build_dir / ".compiler_cache"
|
|
215
|
+
|
|
216
|
+
# Check cache first
|
|
217
|
+
if cache_file.exists():
|
|
218
|
+
try:
|
|
219
|
+
cached = cache_file.read_text().strip()
|
|
220
|
+
if cached and shutil.which(cached):
|
|
221
|
+
if verbose:
|
|
222
|
+
print(f" Using cached compiler: {cached}")
|
|
223
|
+
return cached
|
|
224
|
+
except Exception:
|
|
225
|
+
pass # Cache read failed, detect fresh
|
|
226
|
+
|
|
227
|
+
# Detect compiler
|
|
228
|
+
if verbose:
|
|
229
|
+
print(" Detecting C++ compiler...")
|
|
230
|
+
for compiler in ['g++', 'clang++', 'cl']:
|
|
231
|
+
if shutil.which(compiler):
|
|
232
|
+
# Save to cache
|
|
233
|
+
try:
|
|
234
|
+
cache_file.write_text(compiler)
|
|
235
|
+
if verbose:
|
|
236
|
+
print(f" Found and cached: {compiler}")
|
|
237
|
+
except Exception:
|
|
238
|
+
pass # Cache write failed, not critical
|
|
239
|
+
return compiler
|
|
240
|
+
return None
|
|
241
|
+
|
|
242
|
+
def _clear_compiler_cache(self):
|
|
243
|
+
"""Clear all build caches (called with --clean)."""
|
|
244
|
+
# Clear compiler cache
|
|
245
|
+
cache_file = self.build_dir / ".compiler_cache"
|
|
246
|
+
if cache_file.exists():
|
|
247
|
+
try:
|
|
248
|
+
cache_file.unlink()
|
|
249
|
+
except Exception:
|
|
250
|
+
pass
|
|
251
|
+
|
|
252
|
+
# Clear CMake generator cache
|
|
253
|
+
generator_cache = self.build_dir / ".cmake_generator"
|
|
254
|
+
if generator_cache.exists():
|
|
255
|
+
try:
|
|
256
|
+
generator_cache.unlink()
|
|
257
|
+
except Exception:
|
|
258
|
+
pass
|
|
259
|
+
|
|
260
|
+
# Clear object cache
|
|
261
|
+
obj_cache_dir = self.build_dir / "obj_cache"
|
|
262
|
+
if obj_cache_dir.exists():
|
|
263
|
+
try:
|
|
264
|
+
shutil.rmtree(obj_cache_dir)
|
|
265
|
+
except Exception:
|
|
266
|
+
pass
|
|
267
|
+
|
|
268
|
+
def _scan_plugins(self, verbose: bool = False) -> List[Path]:
|
|
269
|
+
"""Scan user's plugins directory for .cp files."""
|
|
270
|
+
if not self.plugins_dir.exists():
|
|
271
|
+
raise CppBuildError(
|
|
272
|
+
f"Plugins directory not found: {self.plugins_dir}\n"
|
|
273
|
+
f"Create it with: mkdir {self.plugins_dir}"
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
cp_files = list(self.plugins_dir.glob("*.cp"))
|
|
277
|
+
|
|
278
|
+
if not cp_files:
|
|
279
|
+
raise CppBuildError(
|
|
280
|
+
f"No .cp files found in {self.plugins_dir}\n"
|
|
281
|
+
"Create plugin definitions first."
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
if verbose:
|
|
285
|
+
print(f"Found {len(cp_files)} user plugin(s): {[f.name for f in cp_files]}")
|
|
286
|
+
|
|
287
|
+
return cp_files
|
|
288
|
+
|
|
289
|
+
def _generate_bindings(self, verbose: bool = False):
|
|
290
|
+
"""Run plugin_gen.exe to generate bindings.cpp for user plugins."""
|
|
291
|
+
|
|
292
|
+
# Scan user plugins
|
|
293
|
+
user_plugins = self._scan_plugins(verbose)
|
|
294
|
+
|
|
295
|
+
if len(user_plugins) == 0:
|
|
296
|
+
raise CppBuildError("No plugins found in plugins directory")
|
|
297
|
+
|
|
298
|
+
bindings_cpp = self.bindings_dir / "bindings.cpp"
|
|
299
|
+
sources_txt = self.bindings_dir / "sources.txt"
|
|
300
|
+
|
|
301
|
+
# Run generator on plugins directory
|
|
302
|
+
cmd = [
|
|
303
|
+
str(self.gen_exe),
|
|
304
|
+
str(self.plugins_dir),
|
|
305
|
+
str(bindings_cpp),
|
|
306
|
+
str(sources_txt),
|
|
307
|
+
str(self.registry_file)
|
|
308
|
+
]
|
|
309
|
+
|
|
310
|
+
try:
|
|
311
|
+
if verbose:
|
|
312
|
+
print(f"Running generator on {len(user_plugins)} plugin(s)...")
|
|
313
|
+
print(f" Plugins dir: {self.plugins_dir}")
|
|
314
|
+
print(f" Bindings output: {bindings_cpp}")
|
|
315
|
+
|
|
316
|
+
# Important: Run from project_root so relative paths in .cp files work
|
|
317
|
+
result = subprocess.run(cmd, capture_output=True, text=True, encoding='utf-8', check=True, cwd=str(self.project_root))
|
|
318
|
+
|
|
319
|
+
# ALWAYS print stdout/stderr in verbose mode, even on success
|
|
320
|
+
if verbose:
|
|
321
|
+
if result.stdout:
|
|
322
|
+
print("Generator stdout:")
|
|
323
|
+
print(result.stdout)
|
|
324
|
+
if result.stderr:
|
|
325
|
+
print("Generator stderr:")
|
|
326
|
+
print(result.stderr)
|
|
327
|
+
|
|
328
|
+
except subprocess.CalledProcessError as e:
|
|
329
|
+
error_msg = f"Plugin generation failed (exit code {e.returncode}):\n"
|
|
330
|
+
error_msg += f"Command: {' '.join(cmd)}\n"
|
|
331
|
+
if e.stdout:
|
|
332
|
+
error_msg += f"Stdout:\n{e.stdout}\n"
|
|
333
|
+
if e.stderr:
|
|
334
|
+
error_msg += f"Stderr:\n{e.stderr}\n"
|
|
335
|
+
raise CppBuildError(error_msg) from e
|
|
336
|
+
|
|
337
|
+
# Verify outputs were created
|
|
338
|
+
if not bindings_cpp.exists():
|
|
339
|
+
error_msg = f"bindings.cpp not generated: {bindings_cpp}\n"
|
|
340
|
+
error_msg += f"Plugin generator ran successfully but did not create output file.\n"
|
|
341
|
+
error_msg += f"This usually means no .cp files were found or parsed successfully.\n"
|
|
342
|
+
error_msg += f"Total plugins: {len(user_plugins)}"
|
|
343
|
+
raise CppBuildError(error_msg)
|
|
344
|
+
|
|
345
|
+
if verbose:
|
|
346
|
+
print(f"Generated: {bindings_cpp}")
|
|
347
|
+
print(f"Generated: {sources_txt}")
|
|
348
|
+
print(f"Generated: {self.registry_file}")
|
|
349
|
+
|
|
350
|
+
def _generate_cmake(self, verbose: bool = False):
|
|
351
|
+
"""Generate CMakeLists.txt in build directory."""
|
|
352
|
+
sources_txt = self.bindings_dir / "sources.txt"
|
|
353
|
+
|
|
354
|
+
if not sources_txt.exists():
|
|
355
|
+
raise CppBuildError(f"sources.txt not found: {sources_txt}")
|
|
356
|
+
|
|
357
|
+
with open(sources_txt, encoding='utf-8') as f:
|
|
358
|
+
sources = [line.strip() for line in f if line.strip()]
|
|
359
|
+
|
|
360
|
+
source_paths = []
|
|
361
|
+
for src in sources:
|
|
362
|
+
src_path = self.project_root / src
|
|
363
|
+
if not src_path.exists():
|
|
364
|
+
raise CppBuildError(f"Source file not found: {src_path}")
|
|
365
|
+
source_paths.append(str(src_path))
|
|
366
|
+
|
|
367
|
+
bindings_cpp = self.bindings_dir / "bindings.cpp"
|
|
368
|
+
|
|
369
|
+
cmake_content = f'''cmake_minimum_required(VERSION 3.15)
|
|
370
|
+
project(includecpp_api VERSION 1.0.0)
|
|
371
|
+
|
|
372
|
+
set(CMAKE_CXX_STANDARD 17)
|
|
373
|
+
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
|
374
|
+
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
|
375
|
+
|
|
376
|
+
find_package(Python COMPONENTS Interpreter Development REQUIRED)
|
|
377
|
+
find_package(pybind11 CONFIG REQUIRED)
|
|
378
|
+
|
|
379
|
+
pybind11_add_module(api
|
|
380
|
+
"{bindings_cpp}"
|
|
381
|
+
{chr(10).join(f' "{src}"' for src in source_paths)}
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
target_include_directories(api PRIVATE
|
|
385
|
+
"{self.project_root}"
|
|
386
|
+
"{self.include_dir}"
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
if(MSVC)
|
|
390
|
+
target_compile_options(api PRIVATE /W3 /O2 /EHsc /MT)
|
|
391
|
+
else()
|
|
392
|
+
target_compile_options(api PRIVATE -Wall -O3 -pthread)
|
|
393
|
+
# MinGW on Windows: static linking for MinGW runtime and pthread
|
|
394
|
+
if(WIN32)
|
|
395
|
+
target_link_options(api PRIVATE -static-libgcc -static-libstdc++ -Wl,-Bstatic -lpthread -Wl,-Bdynamic -lws2_32)
|
|
396
|
+
endif()
|
|
397
|
+
endif()
|
|
398
|
+
'''
|
|
399
|
+
|
|
400
|
+
cmake_file = self.build_dir / "CMakeLists.txt"
|
|
401
|
+
cmake_file.write_text(cmake_content)
|
|
402
|
+
|
|
403
|
+
if verbose:
|
|
404
|
+
print(f"Generated CMakeLists.txt with {len(source_paths)} source(s)")
|
|
405
|
+
|
|
406
|
+
def _configure_cmake(self, verbose: bool = False):
|
|
407
|
+
"""Configure CMake build with generator caching."""
|
|
408
|
+
generator_cache = self.build_dir / ".cmake_generator"
|
|
409
|
+
if generator_cache.exists():
|
|
410
|
+
cached = generator_cache.read_text().strip()
|
|
411
|
+
if cached == "NONE":
|
|
412
|
+
# No working generator found previously
|
|
413
|
+
raise CppBuildError("CMake not available (cached). Use direct compilation.")
|
|
414
|
+
if cached:
|
|
415
|
+
# Try cached generator
|
|
416
|
+
if verbose:
|
|
417
|
+
print(f"Using cached CMake generator: {cached}")
|
|
418
|
+
try:
|
|
419
|
+
env = self._get_msys2_env()
|
|
420
|
+
cmd = ["cmake", "-B", str(self.cmake_build_dir), "-S", str(self.build_dir), "-G", cached]
|
|
421
|
+
subprocess.run(cmd, capture_output=True, text=True, encoding='utf-8', check=True, env=env)
|
|
422
|
+
if verbose:
|
|
423
|
+
print(f"CMake configured with {cached}")
|
|
424
|
+
return
|
|
425
|
+
except subprocess.CalledProcessError:
|
|
426
|
+
# Cached generator no longer works, re-detect
|
|
427
|
+
generator_cache.unlink()
|
|
428
|
+
if verbose:
|
|
429
|
+
print(f"Cached generator failed, re-detecting...")
|
|
430
|
+
|
|
431
|
+
generators = []
|
|
432
|
+
if platform.system() == "Windows":
|
|
433
|
+
generators = ["MinGW Makefiles", "Ninja", "Visual Studio 17 2022", "Visual Studio 16 2019"]
|
|
434
|
+
else:
|
|
435
|
+
generators = ["Unix Makefiles", "Ninja"]
|
|
436
|
+
|
|
437
|
+
env = self._get_msys2_env()
|
|
438
|
+
last_error = None
|
|
439
|
+
|
|
440
|
+
for generator in generators:
|
|
441
|
+
try:
|
|
442
|
+
cmd = ["cmake", "-B", str(self.cmake_build_dir), "-S", str(self.build_dir), "-G", generator]
|
|
443
|
+
if verbose:
|
|
444
|
+
print(f"Trying CMake generator: {generator}")
|
|
445
|
+
|
|
446
|
+
subprocess.run(cmd, capture_output=True, text=True, encoding='utf-8', check=True, env=env)
|
|
447
|
+
|
|
448
|
+
# Success - cache this generator
|
|
449
|
+
generator_cache.write_text(generator)
|
|
450
|
+
if verbose:
|
|
451
|
+
print(f"CMake configured with {generator} (cached)")
|
|
452
|
+
return
|
|
453
|
+
|
|
454
|
+
except subprocess.CalledProcessError as e:
|
|
455
|
+
last_error = e
|
|
456
|
+
if verbose:
|
|
457
|
+
print(f"Generator {generator} failed, trying next...")
|
|
458
|
+
|
|
459
|
+
# Clean CMake cache before trying next generator
|
|
460
|
+
cmake_cache = self.cmake_build_dir / "CMakeCache.txt"
|
|
461
|
+
cmake_files = self.cmake_build_dir / "CMakeFiles"
|
|
462
|
+
if cmake_cache.exists():
|
|
463
|
+
cmake_cache.unlink()
|
|
464
|
+
if cmake_files.exists():
|
|
465
|
+
shutil.rmtree(cmake_files)
|
|
466
|
+
|
|
467
|
+
# No generator worked - cache this result
|
|
468
|
+
generator_cache.write_text("NONE")
|
|
469
|
+
raise CppBuildError(
|
|
470
|
+
f"CMake configuration failed with all generators.\n"
|
|
471
|
+
f"Last error: {last_error.stderr if last_error else 'Unknown'}"
|
|
472
|
+
) from last_error
|
|
473
|
+
|
|
474
|
+
def _compile_cpp(self, verbose: bool = False):
|
|
475
|
+
"""Compile C++ code with CMake."""
|
|
476
|
+
# Use MSYS2 environment for proper g++/cmake operation
|
|
477
|
+
env = self._get_msys2_env()
|
|
478
|
+
|
|
479
|
+
cmd = [
|
|
480
|
+
"cmake",
|
|
481
|
+
"--build", str(self.cmake_build_dir),
|
|
482
|
+
"--config", "Release"
|
|
483
|
+
]
|
|
484
|
+
|
|
485
|
+
try:
|
|
486
|
+
if verbose:
|
|
487
|
+
print(f"Running: {' '.join(cmd)}")
|
|
488
|
+
|
|
489
|
+
result = subprocess.run(cmd, capture_output=True, text=True, encoding='utf-8', check=True, env=env)
|
|
490
|
+
|
|
491
|
+
if verbose and result.stdout:
|
|
492
|
+
print(result.stdout)
|
|
493
|
+
|
|
494
|
+
except subprocess.CalledProcessError as e:
|
|
495
|
+
raise CppBuildError(
|
|
496
|
+
f"C++ compilation failed:\n{e.stderr}"
|
|
497
|
+
) from e
|
|
498
|
+
|
|
499
|
+
if verbose:
|
|
500
|
+
print("C++ compilation successful")
|
|
501
|
+
|
|
502
|
+
def _compile_direct(self, verbose: bool = False):
|
|
503
|
+
"""Direct compilation with g++/clang++ without CMake (fallback)."""
|
|
504
|
+
compiler = self._detect_cpp_compiler(verbose=verbose)
|
|
505
|
+
if not compiler:
|
|
506
|
+
raise CppBuildError("No C++ compiler found (g++, clang++, or cl)")
|
|
507
|
+
|
|
508
|
+
# Get source files
|
|
509
|
+
sources_txt = self.bindings_dir / "sources.txt"
|
|
510
|
+
if not sources_txt.exists():
|
|
511
|
+
raise CppBuildError(f"sources.txt not found: {sources_txt}")
|
|
512
|
+
|
|
513
|
+
with open(sources_txt, encoding='utf-8') as f:
|
|
514
|
+
sources = [line.strip() for line in f if line.strip()]
|
|
515
|
+
|
|
516
|
+
source_paths = [str(self.project_root / src) for src in sources]
|
|
517
|
+
bindings_cpp = str(self.bindings_dir / "bindings.cpp")
|
|
518
|
+
|
|
519
|
+
# Get Python include and lib paths
|
|
520
|
+
python_include = subprocess.check_output(
|
|
521
|
+
[sys.executable, "-c", "import sysconfig; print(sysconfig.get_path('include'))"],
|
|
522
|
+
text=True, encoding='utf-8'
|
|
523
|
+
).strip()
|
|
524
|
+
|
|
525
|
+
pybind11_include = subprocess.check_output(
|
|
526
|
+
[sys.executable, "-c", "import pybind11; print(pybind11.get_include())"],
|
|
527
|
+
text=True, encoding='utf-8'
|
|
528
|
+
).strip()
|
|
529
|
+
|
|
530
|
+
# Output file
|
|
531
|
+
output_file = str(self.bindings_dir / f"api{self._get_pyd_suffix()}")
|
|
532
|
+
|
|
533
|
+
# Build command
|
|
534
|
+
if compiler in ['g++', 'clang++']:
|
|
535
|
+
cmd = [
|
|
536
|
+
compiler,
|
|
537
|
+
"-O3",
|
|
538
|
+
"-Wall",
|
|
539
|
+
"-shared",
|
|
540
|
+
"-std=c++17",
|
|
541
|
+
"-fPIC",
|
|
542
|
+
f"-I{python_include}",
|
|
543
|
+
f"-I{pybind11_include}",
|
|
544
|
+
f"-I{self.project_root}",
|
|
545
|
+
f"-I{self.include_dir}",
|
|
546
|
+
bindings_cpp,
|
|
547
|
+
*source_paths,
|
|
548
|
+
"-o", output_file
|
|
549
|
+
]
|
|
550
|
+
|
|
551
|
+
# Platform-specific flags
|
|
552
|
+
if platform.system() == "Windows":
|
|
553
|
+
# MinGW on Windows: Need to link Python library for symbols
|
|
554
|
+
# Find Python libs directory
|
|
555
|
+
python_libs_cmd = [sys.executable, "-c",
|
|
556
|
+
"import sysconfig; import os; "
|
|
557
|
+
"libdir = sysconfig.get_config_var('LIBDIR'); "
|
|
558
|
+
"prefix = sysconfig.get_config_var('prefix'); "
|
|
559
|
+
"print(libdir if libdir and os.path.exists(libdir) else os.path.join(prefix, 'libs'))"]
|
|
560
|
+
python_libs_dir = subprocess.check_output(python_libs_cmd, text=True, encoding='utf-8').strip()
|
|
561
|
+
|
|
562
|
+
# Get Python library name (e.g., python312)
|
|
563
|
+
py_version = f"python{sys.version_info.major}{sys.version_info.minor}"
|
|
564
|
+
|
|
565
|
+
cmd.extend([
|
|
566
|
+
"-static-libgcc",
|
|
567
|
+
"-static-libstdc++",
|
|
568
|
+
"-Wl,-Bstatic",
|
|
569
|
+
"-lpthread",
|
|
570
|
+
"-Wl,-Bdynamic",
|
|
571
|
+
"-lws2_32",
|
|
572
|
+
f"-L{python_libs_dir}",
|
|
573
|
+
f"-l{py_version}"
|
|
574
|
+
])
|
|
575
|
+
if verbose:
|
|
576
|
+
print(f"Linking with {py_version} from {python_libs_dir}")
|
|
577
|
+
else:
|
|
578
|
+
# Linux/macOS
|
|
579
|
+
cmd.append("-pthread")
|
|
580
|
+
|
|
581
|
+
elif compiler == 'cl': # MSVC
|
|
582
|
+
cmd = [
|
|
583
|
+
"cl",
|
|
584
|
+
"/O2",
|
|
585
|
+
"/EHsc",
|
|
586
|
+
"/std:c++17",
|
|
587
|
+
"/LD", # Create DLL
|
|
588
|
+
f"/I{python_include}",
|
|
589
|
+
f"/I{pybind11_include}",
|
|
590
|
+
f"/I{self.project_root}",
|
|
591
|
+
f"/I{self.include_dir}",
|
|
592
|
+
bindings_cpp,
|
|
593
|
+
*source_paths,
|
|
594
|
+
f"/Fe:{output_file}"
|
|
595
|
+
]
|
|
596
|
+
else:
|
|
597
|
+
raise CppBuildError(f"Unsupported compiler: {compiler}")
|
|
598
|
+
|
|
599
|
+
if verbose:
|
|
600
|
+
print(f"Direct compilation with {compiler}")
|
|
601
|
+
print(f"Command: {' '.join(cmd)}")
|
|
602
|
+
|
|
603
|
+
self._run_compiler_command(cmd, verbose=verbose, cwd=str(self.bindings_dir))
|
|
604
|
+
|
|
605
|
+
if verbose:
|
|
606
|
+
print(f"Module compiled: {output_file}")
|
|
607
|
+
|
|
608
|
+
# Copy MinGW DLLs if on Windows (after direct compilation)
|
|
609
|
+
self._copy_mingw_dlls(verbose=verbose)
|
|
610
|
+
|
|
611
|
+
def _compile_direct_incremental(self, verbose: bool = False):
|
|
612
|
+
"""Incremental direct compilation - only recompile changed .o files."""
|
|
613
|
+
compiler = self._detect_cpp_compiler(verbose=verbose)
|
|
614
|
+
if not compiler:
|
|
615
|
+
raise CppBuildError("No C++ compiler found (g++, clang++, or cl)")
|
|
616
|
+
|
|
617
|
+
if compiler == 'cl':
|
|
618
|
+
# MSVC doesn't support our incremental approach, fall back
|
|
619
|
+
return self._compile_direct(verbose=verbose)
|
|
620
|
+
|
|
621
|
+
# Create object cache directory
|
|
622
|
+
obj_cache_dir = self.build_dir / "obj_cache"
|
|
623
|
+
obj_cache_dir.mkdir(parents=True, exist_ok=True)
|
|
624
|
+
|
|
625
|
+
# Get source files
|
|
626
|
+
sources_txt = self.bindings_dir / "sources.txt"
|
|
627
|
+
if not sources_txt.exists():
|
|
628
|
+
raise CppBuildError(f"sources.txt not found: {sources_txt}")
|
|
629
|
+
|
|
630
|
+
with open(sources_txt, encoding='utf-8') as f:
|
|
631
|
+
sources = [line.strip() for line in f if line.strip()]
|
|
632
|
+
|
|
633
|
+
source_paths = [self.project_root / src for src in sources]
|
|
634
|
+
bindings_cpp = self.bindings_dir / "bindings.cpp"
|
|
635
|
+
|
|
636
|
+
# Get includes
|
|
637
|
+
python_include = subprocess.check_output(
|
|
638
|
+
[sys.executable, "-c", "import sysconfig; print(sysconfig.get_path('include'))"],
|
|
639
|
+
text=True, encoding='utf-8'
|
|
640
|
+
).strip()
|
|
641
|
+
pybind11_include = subprocess.check_output(
|
|
642
|
+
[sys.executable, "-c", "import pybind11; print(pybind11.get_include())"],
|
|
643
|
+
text=True, encoding='utf-8'
|
|
644
|
+
).strip()
|
|
645
|
+
|
|
646
|
+
include_flags = [
|
|
647
|
+
f"-I{python_include}",
|
|
648
|
+
f"-I{pybind11_include}",
|
|
649
|
+
f"-I{self.project_root}",
|
|
650
|
+
f"-I{self.include_dir}",
|
|
651
|
+
]
|
|
652
|
+
|
|
653
|
+
# Compile each source to .o if needed
|
|
654
|
+
objects_to_link = []
|
|
655
|
+
recompiled = 0
|
|
656
|
+
|
|
657
|
+
# Compile bindings.cpp
|
|
658
|
+
bindings_obj = obj_cache_dir / "bindings.o"
|
|
659
|
+
if not bindings_obj.exists() or bindings_cpp.stat().st_mtime > bindings_obj.stat().st_mtime:
|
|
660
|
+
if verbose:
|
|
661
|
+
print(f" Compiling bindings.cpp...")
|
|
662
|
+
cmd = [compiler, "-c", "-O3", "-std=c++17", "-fPIC", *include_flags,
|
|
663
|
+
str(bindings_cpp), "-o", str(bindings_obj)]
|
|
664
|
+
self._run_compiler_command(cmd, verbose=verbose, cwd=str(self.bindings_dir))
|
|
665
|
+
recompiled += 1
|
|
666
|
+
objects_to_link.append(str(bindings_obj))
|
|
667
|
+
|
|
668
|
+
# Compile source files
|
|
669
|
+
for src_path in source_paths:
|
|
670
|
+
if not src_path.exists():
|
|
671
|
+
continue
|
|
672
|
+
obj_file = obj_cache_dir / (src_path.stem + ".o")
|
|
673
|
+
|
|
674
|
+
if not obj_file.exists() or src_path.stat().st_mtime > obj_file.stat().st_mtime:
|
|
675
|
+
if verbose:
|
|
676
|
+
print(f" Compiling {src_path.name}...")
|
|
677
|
+
cmd = [compiler, "-c", "-O3", "-std=c++17", "-fPIC", *include_flags,
|
|
678
|
+
str(src_path), "-o", str(obj_file)]
|
|
679
|
+
self._run_compiler_command(cmd, verbose=verbose, cwd=str(self.bindings_dir))
|
|
680
|
+
recompiled += 1
|
|
681
|
+
|
|
682
|
+
objects_to_link.append(str(obj_file))
|
|
683
|
+
|
|
684
|
+
if verbose:
|
|
685
|
+
print(f" Recompiled {recompiled} file(s), linking...")
|
|
686
|
+
|
|
687
|
+
# Link all .o files
|
|
688
|
+
output_file = str(self.bindings_dir / f"api{self._get_pyd_suffix()}")
|
|
689
|
+
link_cmd = [compiler, "-shared", "-o", output_file, *objects_to_link]
|
|
690
|
+
|
|
691
|
+
# Platform-specific link flags
|
|
692
|
+
if platform.system() == "Windows":
|
|
693
|
+
python_libs_cmd = [sys.executable, "-c",
|
|
694
|
+
"import sysconfig; import os; "
|
|
695
|
+
"libdir = sysconfig.get_config_var('LIBDIR'); "
|
|
696
|
+
"prefix = sysconfig.get_config_var('prefix'); "
|
|
697
|
+
"print(libdir if libdir and os.path.exists(libdir) else os.path.join(prefix, 'libs'))"]
|
|
698
|
+
python_libs_dir = subprocess.check_output(python_libs_cmd, text=True, encoding='utf-8').strip()
|
|
699
|
+
py_version = f"python{sys.version_info.major}{sys.version_info.minor}"
|
|
700
|
+
|
|
701
|
+
link_cmd.extend([
|
|
702
|
+
"-static-libgcc", "-static-libstdc++",
|
|
703
|
+
"-Wl,-Bstatic", "-lpthread", "-Wl,-Bdynamic",
|
|
704
|
+
"-lws2_32", f"-L{python_libs_dir}", f"-l{py_version}"
|
|
705
|
+
])
|
|
706
|
+
else:
|
|
707
|
+
link_cmd.append("-pthread")
|
|
708
|
+
|
|
709
|
+
self._run_compiler_command(link_cmd, verbose=verbose, cwd=str(self.bindings_dir))
|
|
710
|
+
|
|
711
|
+
if verbose:
|
|
712
|
+
print(f"Module compiled: {output_file}")
|
|
713
|
+
|
|
714
|
+
self._copy_mingw_dlls(verbose=verbose)
|
|
715
|
+
|
|
716
|
+
def _copy_mingw_dlls(self, verbose: bool = False):
|
|
717
|
+
"""Copy required MinGW DLLs to bindings directory on Windows."""
|
|
718
|
+
if platform.system() != "Windows":
|
|
719
|
+
return
|
|
720
|
+
|
|
721
|
+
msys_bin = Path("C:/msys64/mingw64/bin")
|
|
722
|
+
if not msys_bin.exists():
|
|
723
|
+
return
|
|
724
|
+
|
|
725
|
+
required_dlls = ["libwinpthread-1.dll", "libgcc_s_seh-1.dll", "libstdc++-6.dll"]
|
|
726
|
+
|
|
727
|
+
for dll_name in required_dlls:
|
|
728
|
+
source = msys_bin / dll_name
|
|
729
|
+
dest = self.bindings_dir / dll_name
|
|
730
|
+
|
|
731
|
+
if source.exists() and not dest.exists():
|
|
732
|
+
shutil.copy2(str(source), str(dest))
|
|
733
|
+
if verbose:
|
|
734
|
+
print(f"Copied {dll_name} to bindings directory")
|
|
735
|
+
|
|
736
|
+
def _generate_cpp_api_extensions(self, modules: Dict, verbose: bool = False):
|
|
737
|
+
"""Generate cpp_api_extensions.pyi in package directory for VSCode IntelliSense.
|
|
738
|
+
|
|
739
|
+
This is the critical file that makes VSCode autocomplete work!
|
|
740
|
+
It provides type hints for the ModuleWrapper class that show what
|
|
741
|
+
attributes will be available for each module.
|
|
742
|
+
"""
|
|
743
|
+
# Find package directory
|
|
744
|
+
package_root = Path(__file__).parent.parent
|
|
745
|
+
pyi_file = package_root / "core" / "cpp_api_extensions.pyi"
|
|
746
|
+
|
|
747
|
+
with open(pyi_file, 'w', encoding='utf-8') as f:
|
|
748
|
+
f.write('"""Auto-generated type stubs for IncludeCPP module wrappers.\n\n')
|
|
749
|
+
f.write('This file enables VSCode IntelliSense autocomplete for C++ modules.\n')
|
|
750
|
+
f.write('DO NOT EDIT - Auto-generated by IncludeCPP build system.\n')
|
|
751
|
+
f.write('"""\n\n')
|
|
752
|
+
f.write('from typing import Any, List, Dict, Optional, Union, Protocol, overload\n\n')
|
|
753
|
+
|
|
754
|
+
# Generate a Protocol/class for each module's wrapper
|
|
755
|
+
for module_name, module_info in modules.items():
|
|
756
|
+
class_name = f"{module_name.capitalize()}ModuleWrapper"
|
|
757
|
+
|
|
758
|
+
f.write(f'class {class_name}(Protocol):\n')
|
|
759
|
+
f.write(f' """Type hints for {module_name} module wrapper (VSCode autocomplete support)."""\n\n')
|
|
760
|
+
|
|
761
|
+
# Add getInfo method (from ModuleWrapper base class)
|
|
762
|
+
f.write(' def getInfo(self) -> Dict[str, Any]:\n')
|
|
763
|
+
f.write(f' """Get {module_name} module information."""\n')
|
|
764
|
+
f.write(' ...\n\n')
|
|
765
|
+
|
|
766
|
+
# Generate STRUCT types (v2.0+)
|
|
767
|
+
structs = module_info.get('structs', [])
|
|
768
|
+
for struct in structs:
|
|
769
|
+
struct_name = struct.get('name', '')
|
|
770
|
+
is_template = struct.get('is_template', False)
|
|
771
|
+
template_types = struct.get('template_types', [])
|
|
772
|
+
fields = struct.get('fields', [])
|
|
773
|
+
doc = struct.get('doc', '')
|
|
774
|
+
|
|
775
|
+
if is_template:
|
|
776
|
+
# Generate struct for each template type
|
|
777
|
+
for ttype in template_types:
|
|
778
|
+
full_name = f"{struct_name}_{ttype}"
|
|
779
|
+
self._write_struct_protocol(f, full_name, struct, ttype)
|
|
780
|
+
else:
|
|
781
|
+
# Non-template struct
|
|
782
|
+
self._write_struct_protocol(f, struct_name, struct, None)
|
|
783
|
+
|
|
784
|
+
# Generate classes as nested class attributes
|
|
785
|
+
classes = module_info.get('classes', [])
|
|
786
|
+
for cls in classes:
|
|
787
|
+
class_name_inner = cls.get('name', '')
|
|
788
|
+
class_doc = cls.get('doc', '')
|
|
789
|
+
|
|
790
|
+
f.write(f' class {class_name_inner}:\n')
|
|
791
|
+
if class_doc:
|
|
792
|
+
f.write(f' """{class_doc}"""\n\n')
|
|
793
|
+
else:
|
|
794
|
+
f.write(f' """C++ class: {class_name_inner}"""\n\n')
|
|
795
|
+
|
|
796
|
+
constructors = cls.get('constructors', [])
|
|
797
|
+
if constructors and len(constructors) > 1:
|
|
798
|
+
# Multiple constructors - use @overload
|
|
799
|
+
for ctor in constructors:
|
|
800
|
+
param_types = ctor.get('params', [])
|
|
801
|
+
f.write(' @overload\n')
|
|
802
|
+
if param_types:
|
|
803
|
+
param_list = ['self']
|
|
804
|
+
for i, ptype in enumerate(param_types):
|
|
805
|
+
py_type = self._cpp_to_python_type(ptype)
|
|
806
|
+
param_list.append(f'arg{i}: {py_type}')
|
|
807
|
+
params_str = ', '.join(param_list)
|
|
808
|
+
f.write(f' def __init__({params_str}) -> None: ...\n')
|
|
809
|
+
else:
|
|
810
|
+
f.write(f' def __init__(self) -> None: ...\n')
|
|
811
|
+
f.write('\n')
|
|
812
|
+
# Actual implementation signature
|
|
813
|
+
f.write(' def __init__(self, *args: Any, **kwargs: Any) -> None:\n')
|
|
814
|
+
f.write(f' """Initialize {class_name_inner} instance"""\n')
|
|
815
|
+
f.write(' ...\n\n')
|
|
816
|
+
elif constructors and len(constructors) == 1:
|
|
817
|
+
# Single constructor
|
|
818
|
+
param_types = constructors[0].get('params', [])
|
|
819
|
+
if param_types:
|
|
820
|
+
param_list = ['self']
|
|
821
|
+
for i, ptype in enumerate(param_types):
|
|
822
|
+
py_type = self._cpp_to_python_type(ptype)
|
|
823
|
+
param_list.append(f'arg{i}: {py_type}')
|
|
824
|
+
params_str = ', '.join(param_list)
|
|
825
|
+
f.write(f' def __init__({params_str}) -> None:\n')
|
|
826
|
+
else:
|
|
827
|
+
f.write(f' def __init__(self) -> None:\n')
|
|
828
|
+
f.write(f' """Initialize {class_name_inner} instance"""\n')
|
|
829
|
+
f.write(' ...\n\n')
|
|
830
|
+
else:
|
|
831
|
+
# Fallback - generic constructor
|
|
832
|
+
f.write(' def __init__(self, *args: Any, **kwargs: Any) -> None:\n')
|
|
833
|
+
f.write(f' """Initialize {class_name_inner} instance"""\n')
|
|
834
|
+
f.write(' ...\n\n')
|
|
835
|
+
|
|
836
|
+
# Generate methods
|
|
837
|
+
methods = cls.get('methods', [])
|
|
838
|
+
if isinstance(methods, list):
|
|
839
|
+
for method in methods:
|
|
840
|
+
if isinstance(method, dict):
|
|
841
|
+
method_name = method.get('name', '')
|
|
842
|
+
method_doc = method.get('doc', '')
|
|
843
|
+
else:
|
|
844
|
+
method_name = str(method)
|
|
845
|
+
method_doc = ''
|
|
846
|
+
|
|
847
|
+
if method_name:
|
|
848
|
+
f.write(f' def {method_name}(self, *args: Any, **kwargs: Any) -> Any:\n')
|
|
849
|
+
if method_doc:
|
|
850
|
+
f.write(f' """{method_doc}"""\n')
|
|
851
|
+
else:
|
|
852
|
+
f.write(f' """C++ method: {method_name}"""\n')
|
|
853
|
+
f.write(' ...\n\n')
|
|
854
|
+
|
|
855
|
+
# Generate fields as properties
|
|
856
|
+
fields_list = cls.get('fields', [])
|
|
857
|
+
for field in fields_list:
|
|
858
|
+
if isinstance(field, dict):
|
|
859
|
+
field_name = field.get('name', '')
|
|
860
|
+
field_type = self._cpp_to_python_type(field.get('type', 'Any'))
|
|
861
|
+
else:
|
|
862
|
+
field_name = str(field)
|
|
863
|
+
field_type = 'Any'
|
|
864
|
+
|
|
865
|
+
if field_name:
|
|
866
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
867
|
+
|
|
868
|
+
if not methods and not fields_list:
|
|
869
|
+
f.write(' pass\n')
|
|
870
|
+
|
|
871
|
+
f.write('\n')
|
|
872
|
+
|
|
873
|
+
# Generate module-level functions as methods
|
|
874
|
+
functions = module_info.get('functions', [])
|
|
875
|
+
for func in functions:
|
|
876
|
+
if isinstance(func, dict):
|
|
877
|
+
func_name = func.get('name', '')
|
|
878
|
+
func_doc = func.get('doc', '')
|
|
879
|
+
else:
|
|
880
|
+
func_name = str(func)
|
|
881
|
+
func_doc = ''
|
|
882
|
+
|
|
883
|
+
if func_name:
|
|
884
|
+
# Module-level functions are callable directly on the wrapper
|
|
885
|
+
f.write(f' def {func_name}(self, *args: Any, **kwargs: Any) -> Any:\n')
|
|
886
|
+
if func_doc:
|
|
887
|
+
f.write(f' """{func_doc}"""\n')
|
|
888
|
+
else:
|
|
889
|
+
f.write(f' """C++ function: {func_name}"""\n')
|
|
890
|
+
f.write(' ...\n\n')
|
|
891
|
+
|
|
892
|
+
if not classes and not functions and not structs:
|
|
893
|
+
f.write(' pass\n')
|
|
894
|
+
|
|
895
|
+
f.write('\n\n')
|
|
896
|
+
|
|
897
|
+
f.write('# CppApi with typed include() overloads for each module\n')
|
|
898
|
+
f.write('class CppApi:\n')
|
|
899
|
+
f.write(' """C++ API Manager with typed module loading.\n\n')
|
|
900
|
+
f.write(' The include() method returns a module wrapper with full type hints\n')
|
|
901
|
+
f.write(' for VSCode/PyCharm autocomplete support.\n')
|
|
902
|
+
f.write(' """\n\n')
|
|
903
|
+
|
|
904
|
+
f.write(' def __init__(self, project_root: Optional[str] = None, auto_update: bool = True) -> None:\n')
|
|
905
|
+
f.write(' """Initialize CppApi.\n\n')
|
|
906
|
+
f.write(' Args:\n')
|
|
907
|
+
f.write(' project_root: Path to project root (default: auto-detect)\n')
|
|
908
|
+
f.write(' auto_update: Whether to auto-rebuild on source changes\n')
|
|
909
|
+
f.write(' """\n')
|
|
910
|
+
f.write(' ...\n\n')
|
|
911
|
+
|
|
912
|
+
# Generate overloaded include() methods for each module
|
|
913
|
+
for module_name, _ in modules.items():
|
|
914
|
+
wrapper_class = f"{module_name.capitalize()}ModuleWrapper"
|
|
915
|
+
f.write(' @overload\n')
|
|
916
|
+
f.write(f' def include(self, module_name: str = "{module_name}", auto_update: Optional[bool] = None) -> {wrapper_class}: ...\n\n')
|
|
917
|
+
|
|
918
|
+
# Fallback overload for unknown modules
|
|
919
|
+
f.write(' @overload\n')
|
|
920
|
+
f.write(' def include(self, module_name: str, auto_update: Optional[bool] = None) -> Any: ...\n\n')
|
|
921
|
+
|
|
922
|
+
# Actual implementation signature
|
|
923
|
+
f.write(' def include(self, module_name: str, auto_update: Optional[bool] = None) -> Any:\n')
|
|
924
|
+
f.write(' """Load a C++ module.\n\n')
|
|
925
|
+
f.write(' Args:\n')
|
|
926
|
+
f.write(' module_name: Name of the module to load\n')
|
|
927
|
+
f.write(' auto_update: Override auto-update setting for this module\n\n')
|
|
928
|
+
f.write(' Returns:\n')
|
|
929
|
+
f.write(' ModuleWrapper with access to C++ classes, functions, and structs\n')
|
|
930
|
+
f.write(' """\n')
|
|
931
|
+
f.write(' ...\n\n')
|
|
932
|
+
|
|
933
|
+
f.write(' def rebuild(self, verbose: bool = False) -> bool:\n')
|
|
934
|
+
f.write(' """Rebuild all C++ modules."""\n')
|
|
935
|
+
f.write(' ...\n\n')
|
|
936
|
+
|
|
937
|
+
f.write(' def list_modules(self) -> List[str]:\n')
|
|
938
|
+
f.write(' """List available modules."""\n')
|
|
939
|
+
f.write(' ...\n')
|
|
940
|
+
|
|
941
|
+
if verbose:
|
|
942
|
+
print(f"Generated VSCode IntelliSense stubs: {pyi_file}")
|
|
943
|
+
|
|
944
|
+
def _generate_init_pyi_overloads(self, modules: Dict, verbose: bool = False):
|
|
945
|
+
"""Generate module declarations in __init__.pyi for dynamic module imports.
|
|
946
|
+
|
|
947
|
+
This enables VSCode autocomplete for: from includecpp import fast_list
|
|
948
|
+
By adding explicit module variable declarations.
|
|
949
|
+
"""
|
|
950
|
+
package_root = Path(__file__).parent.parent
|
|
951
|
+
init_pyi = package_root / "__init__.pyi"
|
|
952
|
+
|
|
953
|
+
if not init_pyi.exists():
|
|
954
|
+
if verbose:
|
|
955
|
+
print(f"__init__.pyi not found at {init_pyi}")
|
|
956
|
+
return
|
|
957
|
+
|
|
958
|
+
content = init_pyi.read_text(encoding='utf-8')
|
|
959
|
+
|
|
960
|
+
# Markers for the auto-generated section
|
|
961
|
+
start_marker = "# Dynamic module access via: from includecpp import <module_name>"
|
|
962
|
+
auto_gen_marker = "# Auto-generated module declarations"
|
|
963
|
+
end_marker = "def __dir__"
|
|
964
|
+
|
|
965
|
+
if start_marker not in content:
|
|
966
|
+
if verbose:
|
|
967
|
+
print("Marker not found in __init__.pyi, skipping module generation")
|
|
968
|
+
return
|
|
969
|
+
|
|
970
|
+
# Generate module declarations - these are the KEY for IDE autocomplete!
|
|
971
|
+
declarations = []
|
|
972
|
+
declarations.append(auto_gen_marker)
|
|
973
|
+
declarations.append("# These allow: from includecpp import <module_name>")
|
|
974
|
+
for module_name in modules.keys():
|
|
975
|
+
class_name = f"{module_name.capitalize()}ModuleWrapper"
|
|
976
|
+
declarations.append(f'{module_name}: {class_name}')
|
|
977
|
+
declarations.append("")
|
|
978
|
+
|
|
979
|
+
# Parse and rebuild the file content
|
|
980
|
+
lines = content.split('\n')
|
|
981
|
+
new_lines = []
|
|
982
|
+
in_auto_section = False
|
|
983
|
+
found_start_marker = False
|
|
984
|
+
declarations_inserted = False
|
|
985
|
+
|
|
986
|
+
for line in lines:
|
|
987
|
+
# Start of auto-generated section (skip old content)
|
|
988
|
+
if auto_gen_marker in line:
|
|
989
|
+
in_auto_section = True
|
|
990
|
+
continue
|
|
991
|
+
|
|
992
|
+
# Skip lines in auto-generated section until we hit end marker
|
|
993
|
+
if in_auto_section:
|
|
994
|
+
if end_marker in line:
|
|
995
|
+
# End of auto section, insert new declarations
|
|
996
|
+
in_auto_section = False
|
|
997
|
+
new_lines.extend(declarations)
|
|
998
|
+
new_lines.append(line)
|
|
999
|
+
declarations_inserted = True
|
|
1000
|
+
continue
|
|
1001
|
+
# Skip old auto-generated lines (module declarations)
|
|
1002
|
+
if ':' in line and 'ModuleWrapper' in line:
|
|
1003
|
+
continue
|
|
1004
|
+
if line.strip().startswith('#') and 'module' in line.lower():
|
|
1005
|
+
continue
|
|
1006
|
+
# Keep other lines
|
|
1007
|
+
if line.strip():
|
|
1008
|
+
new_lines.append(line)
|
|
1009
|
+
continue
|
|
1010
|
+
|
|
1011
|
+
# Found the section marker
|
|
1012
|
+
if start_marker in line:
|
|
1013
|
+
new_lines.append(line)
|
|
1014
|
+
found_start_marker = True
|
|
1015
|
+
continue
|
|
1016
|
+
|
|
1017
|
+
# First run: insert declarations before def __dir__ if not already done
|
|
1018
|
+
if found_start_marker and not declarations_inserted and end_marker in line:
|
|
1019
|
+
new_lines.extend(declarations)
|
|
1020
|
+
declarations_inserted = True
|
|
1021
|
+
|
|
1022
|
+
new_lines.append(line)
|
|
1023
|
+
|
|
1024
|
+
init_pyi.write_text('\n'.join(new_lines), encoding='utf-8')
|
|
1025
|
+
|
|
1026
|
+
if verbose:
|
|
1027
|
+
print(f"Updated __init__.pyi with {len(modules)} module declarations")
|
|
1028
|
+
|
|
1029
|
+
def _write_struct_protocol(self, f, struct_name: str, struct: Dict, template_type: Optional[str]):
|
|
1030
|
+
"""Write struct class to protocol file for VSCode autocomplete."""
|
|
1031
|
+
f.write(f' class {struct_name}:\n')
|
|
1032
|
+
|
|
1033
|
+
doc = struct.get('doc', f'Struct: {struct["name"]}')
|
|
1034
|
+
if template_type:
|
|
1035
|
+
doc += f'<{template_type}>'
|
|
1036
|
+
f.write(f' """{doc}"""\n\n')
|
|
1037
|
+
|
|
1038
|
+
# Constructor
|
|
1039
|
+
f.write(' def __init__(self')
|
|
1040
|
+
for field in struct.get('fields', []):
|
|
1041
|
+
field_type = field.get('type', 'Any')
|
|
1042
|
+
field_name = field.get('name', '')
|
|
1043
|
+
|
|
1044
|
+
# Substitute template parameter
|
|
1045
|
+
if template_type and field_type == 'T':
|
|
1046
|
+
field_type = self._cpp_to_python_type(template_type)
|
|
1047
|
+
else:
|
|
1048
|
+
field_type = self._cpp_to_python_type(field_type)
|
|
1049
|
+
|
|
1050
|
+
f.write(f', {field_name}: {field_type} = ...')
|
|
1051
|
+
f.write(') -> None:\n')
|
|
1052
|
+
f.write(f' """Initialize {struct_name}"""\n')
|
|
1053
|
+
f.write(' ...\n\n')
|
|
1054
|
+
|
|
1055
|
+
# Fields with actual types
|
|
1056
|
+
for field in struct.get('fields', []):
|
|
1057
|
+
field_type = field.get('type', 'Any')
|
|
1058
|
+
field_name = field.get('name', '')
|
|
1059
|
+
if template_type and field_type == 'T':
|
|
1060
|
+
field_type = self._cpp_to_python_type(template_type)
|
|
1061
|
+
else:
|
|
1062
|
+
field_type = self._cpp_to_python_type(field_type)
|
|
1063
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
1064
|
+
f.write('\n')
|
|
1065
|
+
|
|
1066
|
+
# to_dict method
|
|
1067
|
+
f.write(' def to_dict(self) -> Dict[str, Any]:\n')
|
|
1068
|
+
f.write(' """Convert struct to dictionary"""\n')
|
|
1069
|
+
f.write(' ...\n\n')
|
|
1070
|
+
|
|
1071
|
+
# from_dict static method
|
|
1072
|
+
f.write(' @staticmethod\n')
|
|
1073
|
+
f.write(f' def from_dict(d: Dict[str, Any]) -> "{struct_name}":\n')
|
|
1074
|
+
f.write(' """Create struct from dictionary"""\n')
|
|
1075
|
+
f.write(' ...\n\n')
|
|
1076
|
+
|
|
1077
|
+
def _generate_module_pyi(self, module_name: str, module_info: Dict, verbose: bool = False):
|
|
1078
|
+
"""Generate individual .pyi stub file for a C++ module.
|
|
1079
|
+
|
|
1080
|
+
This creates {module_name}.pyi in the bindings directory alongside api.pyd.
|
|
1081
|
+
VSCode will use this for autocomplete when the module is imported.
|
|
1082
|
+
|
|
1083
|
+
Args:
|
|
1084
|
+
module_name: Name of the module (e.g., 'geometry')
|
|
1085
|
+
module_info: Module descriptor from registry
|
|
1086
|
+
verbose: Print progress
|
|
1087
|
+
"""
|
|
1088
|
+
pyi_file = self.bindings_dir / f"{module_name}.pyi"
|
|
1089
|
+
|
|
1090
|
+
with open(pyi_file, 'w', encoding='utf-8') as f:
|
|
1091
|
+
f.write(f'"""Type stubs for {module_name} C++ module.\n\n')
|
|
1092
|
+
f.write('Auto-generated by IncludeCPP - DO NOT EDIT.\n')
|
|
1093
|
+
f.write('Provides VSCode/PyCharm autocomplete for C++ bindings.\n')
|
|
1094
|
+
f.write('"""\n\n')
|
|
1095
|
+
f.write('from typing import Any, List, Dict, Optional, Union, overload\n\n')
|
|
1096
|
+
|
|
1097
|
+
# Generate classes
|
|
1098
|
+
classes = module_info.get('classes', [])
|
|
1099
|
+
for cls in classes:
|
|
1100
|
+
class_name = cls.get('name', '')
|
|
1101
|
+
class_doc = cls.get('doc', '')
|
|
1102
|
+
|
|
1103
|
+
f.write(f'class {class_name}:\n')
|
|
1104
|
+
if class_doc:
|
|
1105
|
+
f.write(f' """{class_doc}"""\n\n')
|
|
1106
|
+
else:
|
|
1107
|
+
f.write(f' """C++ class: {class_name}"""\n\n')
|
|
1108
|
+
|
|
1109
|
+
constructors = cls.get('constructors', [])
|
|
1110
|
+
if constructors and len(constructors) > 1:
|
|
1111
|
+
# Multiple constructors - use @overload
|
|
1112
|
+
for ctor in constructors:
|
|
1113
|
+
param_types = ctor.get('params', [])
|
|
1114
|
+
f.write(' @overload\n')
|
|
1115
|
+
if param_types:
|
|
1116
|
+
param_list = ['self']
|
|
1117
|
+
for i, ptype in enumerate(param_types):
|
|
1118
|
+
py_type = self._cpp_to_python_type(ptype)
|
|
1119
|
+
param_list.append(f'arg{i}: {py_type}')
|
|
1120
|
+
params_str = ', '.join(param_list)
|
|
1121
|
+
f.write(f' def __init__({params_str}) -> None: ...\n')
|
|
1122
|
+
else:
|
|
1123
|
+
f.write(f' def __init__(self) -> None: ...\n')
|
|
1124
|
+
f.write('\n')
|
|
1125
|
+
# Actual implementation signature
|
|
1126
|
+
f.write(f' def __init__(self, *args: Any, **kwargs: Any) -> None:\n')
|
|
1127
|
+
f.write(f' """Initialize {class_name} instance."""\n')
|
|
1128
|
+
f.write(' ...\n\n')
|
|
1129
|
+
elif constructors and len(constructors) == 1:
|
|
1130
|
+
# Single constructor
|
|
1131
|
+
param_types = constructors[0].get('params', [])
|
|
1132
|
+
if param_types:
|
|
1133
|
+
param_list = ['self']
|
|
1134
|
+
for i, ptype in enumerate(param_types):
|
|
1135
|
+
py_type = self._cpp_to_python_type(ptype)
|
|
1136
|
+
param_list.append(f'arg{i}: {py_type}')
|
|
1137
|
+
params_str = ', '.join(param_list)
|
|
1138
|
+
f.write(f' def __init__({params_str}) -> None:\n')
|
|
1139
|
+
else:
|
|
1140
|
+
f.write(f' def __init__(self) -> None:\n')
|
|
1141
|
+
f.write(f' """Initialize {class_name} instance."""\n')
|
|
1142
|
+
f.write(' ...\n\n')
|
|
1143
|
+
else:
|
|
1144
|
+
# Fallback: legacy format or no constructor info
|
|
1145
|
+
constructor_params = cls.get('constructor_params', [])
|
|
1146
|
+
if constructor_params:
|
|
1147
|
+
param_list = ['self']
|
|
1148
|
+
for param in constructor_params:
|
|
1149
|
+
param_name = param.get('name', 'arg')
|
|
1150
|
+
param_type = self._cpp_to_python_type(param.get('type', 'Any'))
|
|
1151
|
+
param_default = param.get('default', None)
|
|
1152
|
+
if param_default:
|
|
1153
|
+
py_default = self._convert_cpp_default(param_default, param_type)
|
|
1154
|
+
param_list.append(f'{param_name}: {param_type} = {py_default}')
|
|
1155
|
+
else:
|
|
1156
|
+
param_list.append(f'{param_name}: {param_type}')
|
|
1157
|
+
params_str = ', '.join(param_list)
|
|
1158
|
+
f.write(f' def __init__({params_str}) -> None:\n')
|
|
1159
|
+
else:
|
|
1160
|
+
f.write(f' def __init__(self, *args: Any, **kwargs: Any) -> None:\n')
|
|
1161
|
+
f.write(f' """Initialize {class_name} instance."""\n')
|
|
1162
|
+
f.write(' ...\n\n')
|
|
1163
|
+
|
|
1164
|
+
# Methods
|
|
1165
|
+
methods = cls.get('methods', [])
|
|
1166
|
+
if isinstance(methods, list):
|
|
1167
|
+
for method in methods:
|
|
1168
|
+
if isinstance(method, dict):
|
|
1169
|
+
method_name = method.get('name', '')
|
|
1170
|
+
method_doc = method.get('doc', '')
|
|
1171
|
+
return_type = method.get('return_type', 'Any')
|
|
1172
|
+
parameters = method.get('parameters', [])
|
|
1173
|
+
is_static = method.get('static', False)
|
|
1174
|
+
else:
|
|
1175
|
+
method_name = str(method)
|
|
1176
|
+
method_doc = ''
|
|
1177
|
+
return_type = 'Any'
|
|
1178
|
+
parameters = []
|
|
1179
|
+
is_static = False
|
|
1180
|
+
|
|
1181
|
+
if method_name:
|
|
1182
|
+
if is_static:
|
|
1183
|
+
f.write(' @staticmethod\n')
|
|
1184
|
+
|
|
1185
|
+
if parameters:
|
|
1186
|
+
param_list = [] if is_static else ['self']
|
|
1187
|
+
for param in parameters:
|
|
1188
|
+
param_name = param.get('name', 'arg')
|
|
1189
|
+
param_type = self._cpp_to_python_type(param.get('type', 'Any'))
|
|
1190
|
+
param_default = param.get('default', None)
|
|
1191
|
+
if param_default:
|
|
1192
|
+
py_default = self._convert_cpp_default(param_default, param_type)
|
|
1193
|
+
param_list.append(f'{param_name}: {param_type} = {py_default}')
|
|
1194
|
+
else:
|
|
1195
|
+
param_list.append(f'{param_name}: {param_type}')
|
|
1196
|
+
params_str = ', '.join(param_list)
|
|
1197
|
+
else:
|
|
1198
|
+
params_str = 'self' if not is_static else ''
|
|
1199
|
+
|
|
1200
|
+
py_return_type = self._cpp_to_python_type(return_type)
|
|
1201
|
+
f.write(f' def {method_name}({params_str}) -> {py_return_type}:\n')
|
|
1202
|
+
if method_doc:
|
|
1203
|
+
f.write(f' """{method_doc}"""\n')
|
|
1204
|
+
else:
|
|
1205
|
+
f.write(f' """C++ method: {method_name}"""\n')
|
|
1206
|
+
f.write(' ...\n\n')
|
|
1207
|
+
|
|
1208
|
+
# Fields as class attributes
|
|
1209
|
+
fields = cls.get('fields', [])
|
|
1210
|
+
for field in fields:
|
|
1211
|
+
if isinstance(field, dict):
|
|
1212
|
+
field_name = field.get('name', '')
|
|
1213
|
+
field_type = self._cpp_to_python_type(field.get('type', 'Any'))
|
|
1214
|
+
else:
|
|
1215
|
+
field_name = str(field)
|
|
1216
|
+
field_type = 'Any'
|
|
1217
|
+
if field_name:
|
|
1218
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
1219
|
+
|
|
1220
|
+
if not methods and not fields:
|
|
1221
|
+
f.write(' pass\n')
|
|
1222
|
+
f.write('\n')
|
|
1223
|
+
|
|
1224
|
+
# Generate structs
|
|
1225
|
+
structs = module_info.get('structs', [])
|
|
1226
|
+
for struct in structs:
|
|
1227
|
+
struct_name = struct.get('name', '')
|
|
1228
|
+
is_template = struct.get('is_template', False)
|
|
1229
|
+
template_types = struct.get('template_types', [])
|
|
1230
|
+
fields = struct.get('fields', [])
|
|
1231
|
+
doc = struct.get('doc', '')
|
|
1232
|
+
|
|
1233
|
+
if is_template:
|
|
1234
|
+
for ttype in template_types:
|
|
1235
|
+
full_name = f"{struct_name}_{ttype}"
|
|
1236
|
+
self._write_struct_to_pyi(f, full_name, struct, ttype)
|
|
1237
|
+
else:
|
|
1238
|
+
self._write_struct_to_pyi(f, struct_name, struct, None)
|
|
1239
|
+
|
|
1240
|
+
# Generate module-level functions
|
|
1241
|
+
functions = module_info.get('functions', [])
|
|
1242
|
+
for func in functions:
|
|
1243
|
+
if isinstance(func, dict):
|
|
1244
|
+
func_name = func.get('name', '')
|
|
1245
|
+
func_doc = func.get('doc', '')
|
|
1246
|
+
return_type = func.get('return_type', 'Any')
|
|
1247
|
+
parameters = func.get('parameters', [])
|
|
1248
|
+
else:
|
|
1249
|
+
func_name = str(func)
|
|
1250
|
+
func_doc = ''
|
|
1251
|
+
return_type = 'Any'
|
|
1252
|
+
parameters = []
|
|
1253
|
+
|
|
1254
|
+
if func_name:
|
|
1255
|
+
if parameters:
|
|
1256
|
+
param_list = []
|
|
1257
|
+
for param in parameters:
|
|
1258
|
+
param_name = param.get('name', 'arg')
|
|
1259
|
+
param_type = self._cpp_to_python_type(param.get('type', 'Any'))
|
|
1260
|
+
param_default = param.get('default', None)
|
|
1261
|
+
if param_default:
|
|
1262
|
+
py_default = self._convert_cpp_default(param_default, param_type)
|
|
1263
|
+
param_list.append(f'{param_name}: {param_type} = {py_default}')
|
|
1264
|
+
else:
|
|
1265
|
+
param_list.append(f'{param_name}: {param_type}')
|
|
1266
|
+
params_str = ', '.join(param_list)
|
|
1267
|
+
else:
|
|
1268
|
+
params_str = '*args: Any, **kwargs: Any'
|
|
1269
|
+
|
|
1270
|
+
py_return_type = self._cpp_to_python_type(return_type)
|
|
1271
|
+
f.write(f'def {func_name}({params_str}) -> {py_return_type}:\n')
|
|
1272
|
+
if func_doc:
|
|
1273
|
+
f.write(f' """{func_doc}"""\n')
|
|
1274
|
+
else:
|
|
1275
|
+
f.write(f' """C++ function: {func_name}"""\n')
|
|
1276
|
+
f.write(' ...\n\n')
|
|
1277
|
+
|
|
1278
|
+
if verbose:
|
|
1279
|
+
print(f" Generated: {pyi_file.name}")
|
|
1280
|
+
|
|
1281
|
+
def _write_struct_to_pyi(self, f, struct_name: str, struct: Dict, template_type: Optional[str]):
|
|
1282
|
+
"""Write a struct class definition to a .pyi file."""
|
|
1283
|
+
doc = struct.get('doc', f'C++ struct: {struct["name"]}')
|
|
1284
|
+
if template_type:
|
|
1285
|
+
doc += f'<{template_type}>'
|
|
1286
|
+
|
|
1287
|
+
f.write(f'class {struct_name}:\n')
|
|
1288
|
+
f.write(f' """{doc}"""\n\n')
|
|
1289
|
+
|
|
1290
|
+
# Constructor with fields
|
|
1291
|
+
fields = struct.get('fields', [])
|
|
1292
|
+
f.write(' def __init__(self')
|
|
1293
|
+
for field in fields:
|
|
1294
|
+
field_type = field.get('type', 'Any')
|
|
1295
|
+
field_name = field.get('name', '')
|
|
1296
|
+
if template_type and field_type == 'T':
|
|
1297
|
+
field_type = self._cpp_to_python_type(template_type)
|
|
1298
|
+
else:
|
|
1299
|
+
field_type = self._cpp_to_python_type(field_type)
|
|
1300
|
+
f.write(f', {field_name}: {field_type} = ...')
|
|
1301
|
+
f.write(') -> None:\n')
|
|
1302
|
+
f.write(f' """Initialize {struct_name}."""\n')
|
|
1303
|
+
f.write(' ...\n\n')
|
|
1304
|
+
|
|
1305
|
+
# Fields as attributes
|
|
1306
|
+
for field in fields:
|
|
1307
|
+
field_type = field.get('type', 'Any')
|
|
1308
|
+
field_name = field.get('name', '')
|
|
1309
|
+
if template_type and field_type == 'T':
|
|
1310
|
+
field_type = self._cpp_to_python_type(template_type)
|
|
1311
|
+
else:
|
|
1312
|
+
field_type = self._cpp_to_python_type(field_type)
|
|
1313
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
1314
|
+
|
|
1315
|
+
f.write('\n')
|
|
1316
|
+
f.write(' def to_dict(self) -> Dict[str, Any]:\n')
|
|
1317
|
+
f.write(' """Convert struct to dictionary."""\n')
|
|
1318
|
+
f.write(' ...\n\n')
|
|
1319
|
+
|
|
1320
|
+
f.write(' @staticmethod\n')
|
|
1321
|
+
f.write(f' def from_dict(d: Dict[str, Any]) -> "{struct_name}":\n')
|
|
1322
|
+
f.write(' """Create struct from dictionary."""\n')
|
|
1323
|
+
f.write(' ...\n\n')
|
|
1324
|
+
|
|
1325
|
+
def _configure_vscode_autocomplete(self, verbose: bool = False):
|
|
1326
|
+
"""Auto-configure VSCode for C++ module autocomplete.
|
|
1327
|
+
|
|
1328
|
+
Creates or updates .vscode/settings.json to include the bindings
|
|
1329
|
+
directory in python.analysis.extraPaths for Pylance autocomplete.
|
|
1330
|
+
"""
|
|
1331
|
+
vscode_dir = self.project_root / ".vscode"
|
|
1332
|
+
settings_file = vscode_dir / "settings.json"
|
|
1333
|
+
|
|
1334
|
+
# Path to bindings directory (use forward slashes for JSON)
|
|
1335
|
+
bindings_path = str(self.bindings_dir).replace("\\", "/")
|
|
1336
|
+
|
|
1337
|
+
try:
|
|
1338
|
+
# Load existing settings or create new
|
|
1339
|
+
if settings_file.exists():
|
|
1340
|
+
with open(settings_file, 'r', encoding='utf-8') as f:
|
|
1341
|
+
try:
|
|
1342
|
+
settings = json.load(f)
|
|
1343
|
+
except json.JSONDecodeError:
|
|
1344
|
+
settings = {}
|
|
1345
|
+
else:
|
|
1346
|
+
settings = {}
|
|
1347
|
+
vscode_dir.mkdir(parents=True, exist_ok=True)
|
|
1348
|
+
|
|
1349
|
+
# Get or create extraPaths list
|
|
1350
|
+
extra_paths = settings.get("python.analysis.extraPaths", [])
|
|
1351
|
+
if not isinstance(extra_paths, list):
|
|
1352
|
+
extra_paths = []
|
|
1353
|
+
|
|
1354
|
+
# Add bindings path if not already present
|
|
1355
|
+
if bindings_path not in extra_paths:
|
|
1356
|
+
extra_paths.append(bindings_path)
|
|
1357
|
+
settings["python.analysis.extraPaths"] = extra_paths
|
|
1358
|
+
|
|
1359
|
+
# Write updated settings
|
|
1360
|
+
with open(settings_file, 'w', encoding='utf-8') as f:
|
|
1361
|
+
json.dump(settings, f, indent=4)
|
|
1362
|
+
|
|
1363
|
+
if verbose:
|
|
1364
|
+
print(f"\nVSCode configured for autocomplete:")
|
|
1365
|
+
print(f" Updated: {settings_file}")
|
|
1366
|
+
print(f" Added path: {bindings_path}")
|
|
1367
|
+
elif verbose:
|
|
1368
|
+
print(f"\nVSCode autocomplete already configured")
|
|
1369
|
+
|
|
1370
|
+
except Exception as e:
|
|
1371
|
+
if verbose:
|
|
1372
|
+
print(f"Warning: Could not configure VSCode: {e}")
|
|
1373
|
+
|
|
1374
|
+
def _generate_all_module_pyi(self, verbose: bool = False):
|
|
1375
|
+
"""Generate .pyi stub files for all C++ modules in bindings directory.
|
|
1376
|
+
|
|
1377
|
+
This is called after a successful build to create VSCode-compatible
|
|
1378
|
+
type stubs for each module.
|
|
1379
|
+
"""
|
|
1380
|
+
if not self.registry_file.exists():
|
|
1381
|
+
if verbose:
|
|
1382
|
+
print("No registry file found, skipping module .pyi generation")
|
|
1383
|
+
return
|
|
1384
|
+
|
|
1385
|
+
try:
|
|
1386
|
+
with open(self.registry_file, 'r', encoding='utf-8') as f:
|
|
1387
|
+
registry = json.load(f)
|
|
1388
|
+
except Exception as e:
|
|
1389
|
+
if verbose:
|
|
1390
|
+
print(f"Failed to load registry for module .pyi generation: {e}")
|
|
1391
|
+
return
|
|
1392
|
+
|
|
1393
|
+
modules = registry.get('modules', registry)
|
|
1394
|
+
|
|
1395
|
+
if verbose:
|
|
1396
|
+
print(f"\nGenerating .pyi stubs for {len(modules)} module(s)...")
|
|
1397
|
+
|
|
1398
|
+
for module_name, module_info in modules.items():
|
|
1399
|
+
try:
|
|
1400
|
+
self._generate_module_pyi(module_name, module_info, verbose=verbose)
|
|
1401
|
+
except Exception as e:
|
|
1402
|
+
if verbose:
|
|
1403
|
+
print(f" Warning: Failed to generate {module_name}.pyi: {e}")
|
|
1404
|
+
|
|
1405
|
+
def _generate_pyi_stub(self, verbose: bool = False):
|
|
1406
|
+
"""Generate .pyi stub file for VSCode IntelliSense with CORRECT module structure.
|
|
1407
|
+
|
|
1408
|
+
This generates TWO types of stubs:
|
|
1409
|
+
1. Module wrapper stubs in package directory for VSCode autocomplete
|
|
1410
|
+
2. API module stub in bindings directory for runtime type checking
|
|
1411
|
+
3. Individual module .pyi files for direct module imports
|
|
1412
|
+
"""
|
|
1413
|
+
if not self.registry_file.exists():
|
|
1414
|
+
if verbose:
|
|
1415
|
+
print("No registry file found, skipping .pyi generation")
|
|
1416
|
+
return
|
|
1417
|
+
|
|
1418
|
+
try:
|
|
1419
|
+
with open(self.registry_file, 'r', encoding='utf-8') as f:
|
|
1420
|
+
registry = json.load(f)
|
|
1421
|
+
except Exception as e:
|
|
1422
|
+
if verbose:
|
|
1423
|
+
print(f"Failed to load registry for .pyi generation: {e}")
|
|
1424
|
+
return
|
|
1425
|
+
|
|
1426
|
+
modules = registry.get('modules', registry)
|
|
1427
|
+
|
|
1428
|
+
# Generate cpp_api_extensions.pyi in package directory for VSCode IntelliSense
|
|
1429
|
+
# This is the key to making autocomplete work!
|
|
1430
|
+
self._generate_cpp_api_extensions(modules, verbose)
|
|
1431
|
+
|
|
1432
|
+
# Generate __getattr__ overloads in __init__.pyi for dynamic imports
|
|
1433
|
+
# This enables: from includecpp import fast_list
|
|
1434
|
+
self._generate_init_pyi_overloads(modules, verbose)
|
|
1435
|
+
|
|
1436
|
+
# Also generate api.pyi in bindings for completeness
|
|
1437
|
+
pyi_file = self.bindings_dir / "api.pyi"
|
|
1438
|
+
|
|
1439
|
+
with open(pyi_file, 'w', encoding='utf-8') as f:
|
|
1440
|
+
f.write('"""Auto-generated type stubs for IncludeCPP C++ bindings\n\n')
|
|
1441
|
+
f.write('This file describes the raw C++ module structure.\n')
|
|
1442
|
+
f.write('For VSCode autocomplete, see cpp_api_extensions.pyi\n')
|
|
1443
|
+
f.write('"""\n')
|
|
1444
|
+
f.write('from typing import Any, List, Dict, Optional, Union, overload, Sequence\n\n')
|
|
1445
|
+
|
|
1446
|
+
# Generate stubs for each module
|
|
1447
|
+
for module_name, module_info in modules.items():
|
|
1448
|
+
f.write(f'class {module_name}:\n')
|
|
1449
|
+
f.write(f' """Module: {module_name}\n\n')
|
|
1450
|
+
|
|
1451
|
+
# Add module docstring with summary
|
|
1452
|
+
sources = module_info.get('sources', [])
|
|
1453
|
+
if sources:
|
|
1454
|
+
f.write(f' Sources: {", ".join(sources)}\n')
|
|
1455
|
+
|
|
1456
|
+
deps = module_info.get('dependencies', [])
|
|
1457
|
+
if deps:
|
|
1458
|
+
dep_names = [d.get('target', '?') for d in deps]
|
|
1459
|
+
f.write(f' Dependencies: {", ".join(dep_names)}\n')
|
|
1460
|
+
|
|
1461
|
+
f.write(' """\n\n')
|
|
1462
|
+
|
|
1463
|
+
# Generate STRUCT types (v2.0+)
|
|
1464
|
+
structs = module_info.get('structs', [])
|
|
1465
|
+
for struct in structs:
|
|
1466
|
+
struct_name = struct.get('name', '')
|
|
1467
|
+
is_template = struct.get('is_template', False)
|
|
1468
|
+
template_types = struct.get('template_types', [])
|
|
1469
|
+
fields = struct.get('fields', [])
|
|
1470
|
+
doc = struct.get('doc', '')
|
|
1471
|
+
|
|
1472
|
+
if is_template:
|
|
1473
|
+
# Generate struct for each template type
|
|
1474
|
+
for ttype in template_types:
|
|
1475
|
+
full_name = f"{struct_name}_{ttype}"
|
|
1476
|
+
f.write(f' class {full_name}:\n')
|
|
1477
|
+
if doc:
|
|
1478
|
+
f.write(f' """{doc}\n\n')
|
|
1479
|
+
f.write(f' Template instantiation: {struct_name}<{ttype}>\n')
|
|
1480
|
+
f.write(' """\n\n')
|
|
1481
|
+
else:
|
|
1482
|
+
f.write(f' """POD struct: {struct_name}<{ttype}>"""\n\n')
|
|
1483
|
+
|
|
1484
|
+
# Constructor
|
|
1485
|
+
f.write(f' def __init__(self')
|
|
1486
|
+
for field in fields:
|
|
1487
|
+
field_type = field.get('type', 'Any')
|
|
1488
|
+
field_name = field.get('name', '')
|
|
1489
|
+
# Replace template parameter T
|
|
1490
|
+
if field_type == 'T':
|
|
1491
|
+
field_type = self._cpp_to_python_type(ttype)
|
|
1492
|
+
else:
|
|
1493
|
+
field_type = self._cpp_to_python_type(field_type)
|
|
1494
|
+
f.write(f', {field_name}: {field_type} = ...')
|
|
1495
|
+
f.write(') -> None:\n')
|
|
1496
|
+
f.write(f' """Initialize {full_name}"""\n')
|
|
1497
|
+
f.write(' ...\n\n')
|
|
1498
|
+
|
|
1499
|
+
# Fields with actual types
|
|
1500
|
+
for field in fields:
|
|
1501
|
+
field_type = field.get('type', 'Any')
|
|
1502
|
+
field_name = field.get('name', '')
|
|
1503
|
+
if field_type == 'T':
|
|
1504
|
+
field_type = self._cpp_to_python_type(ttype)
|
|
1505
|
+
else:
|
|
1506
|
+
field_type = self._cpp_to_python_type(field_type)
|
|
1507
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
1508
|
+
f.write('\n')
|
|
1509
|
+
|
|
1510
|
+
# to_dict method
|
|
1511
|
+
f.write(' def to_dict(self) -> Dict[str, Any]:\n')
|
|
1512
|
+
f.write(' """Convert struct to dictionary"""\n')
|
|
1513
|
+
f.write(' ...\n\n')
|
|
1514
|
+
|
|
1515
|
+
# from_dict static method
|
|
1516
|
+
f.write(' @staticmethod\n')
|
|
1517
|
+
f.write(f' def from_dict(d: Dict[str, Any]) -> "{module_name}.{full_name}":\n')
|
|
1518
|
+
f.write(' """Create struct from dictionary"""\n')
|
|
1519
|
+
f.write(' ...\n\n')
|
|
1520
|
+
else:
|
|
1521
|
+
# Non-template struct
|
|
1522
|
+
f.write(f' class {struct_name}:\n')
|
|
1523
|
+
if doc:
|
|
1524
|
+
f.write(f' """{doc}"""\n\n')
|
|
1525
|
+
else:
|
|
1526
|
+
f.write(f' """POD struct: {struct_name}"""\n\n')
|
|
1527
|
+
|
|
1528
|
+
# Constructor
|
|
1529
|
+
f.write(f' def __init__(self')
|
|
1530
|
+
for field in fields:
|
|
1531
|
+
field_type = self._cpp_to_python_type(field.get('type', 'Any'))
|
|
1532
|
+
field_name = field.get('name', '')
|
|
1533
|
+
f.write(f', {field_name}: {field_type} = ...')
|
|
1534
|
+
f.write(') -> None:\n')
|
|
1535
|
+
f.write(f' """Initialize {struct_name}"""\n')
|
|
1536
|
+
f.write(' ...\n\n')
|
|
1537
|
+
|
|
1538
|
+
# Fields
|
|
1539
|
+
for field in fields:
|
|
1540
|
+
field_type = self._cpp_to_python_type(field.get('type', 'Any'))
|
|
1541
|
+
field_name = field.get('name', '')
|
|
1542
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
1543
|
+
f.write('\n')
|
|
1544
|
+
|
|
1545
|
+
# to_dict method
|
|
1546
|
+
f.write(' def to_dict(self) -> Dict[str, Any]:\n')
|
|
1547
|
+
f.write(' """Convert struct to dictionary"""\n')
|
|
1548
|
+
f.write(' ...\n\n')
|
|
1549
|
+
|
|
1550
|
+
# from_dict static method
|
|
1551
|
+
f.write(' @staticmethod\n')
|
|
1552
|
+
f.write(f' def from_dict(d: Dict[str, Any]) -> "{module_name}.{struct_name}":\n')
|
|
1553
|
+
f.write(' """Create struct from dictionary"""\n')
|
|
1554
|
+
f.write(' ...\n\n')
|
|
1555
|
+
|
|
1556
|
+
# Generate classes
|
|
1557
|
+
classes = module_info.get('classes', [])
|
|
1558
|
+
for cls in classes:
|
|
1559
|
+
class_name = cls.get('name', '')
|
|
1560
|
+
class_doc = cls.get('doc', '')
|
|
1561
|
+
|
|
1562
|
+
f.write(f' class {class_name}:\n')
|
|
1563
|
+
if class_doc:
|
|
1564
|
+
f.write(f' """{class_doc}"""\n\n')
|
|
1565
|
+
else:
|
|
1566
|
+
f.write(f' """C++ class: {class_name}"""\n\n')
|
|
1567
|
+
|
|
1568
|
+
# Constructor
|
|
1569
|
+
f.write(f' def __init__(self, *args: Any, **kwargs: Any) -> None:\n')
|
|
1570
|
+
f.write(f' """Initialize {class_name} instance"""\n')
|
|
1571
|
+
f.write(f' ...\n\n')
|
|
1572
|
+
|
|
1573
|
+
# Initialize method (factory method)
|
|
1574
|
+
f.write(f' @staticmethod\n')
|
|
1575
|
+
f.write(f' def Initialize(*args: Any, **kwargs: Any) -> "{module_name}.{class_name}":\n')
|
|
1576
|
+
f.write(f' """Create and initialize a new {class_name} instance"""\n')
|
|
1577
|
+
f.write(f' ...\n\n')
|
|
1578
|
+
|
|
1579
|
+
# Generate methods with real signatures (v2.3.5)
|
|
1580
|
+
methods = cls.get('methods', [])
|
|
1581
|
+
if isinstance(methods, list):
|
|
1582
|
+
for method in methods:
|
|
1583
|
+
if isinstance(method, dict):
|
|
1584
|
+
method_name = method.get('name', '')
|
|
1585
|
+
method_doc = method.get('doc', '')
|
|
1586
|
+
return_type = method.get('return_type', 'Any')
|
|
1587
|
+
parameters = method.get('parameters', [])
|
|
1588
|
+
is_const = method.get('const', False)
|
|
1589
|
+
is_static = method.get('static', False)
|
|
1590
|
+
else:
|
|
1591
|
+
method_name = str(method)
|
|
1592
|
+
method_doc = ''
|
|
1593
|
+
return_type = 'Any'
|
|
1594
|
+
parameters = []
|
|
1595
|
+
is_const = False
|
|
1596
|
+
is_static = False
|
|
1597
|
+
|
|
1598
|
+
if method_name:
|
|
1599
|
+
if is_static:
|
|
1600
|
+
f.write(f' @staticmethod\n')
|
|
1601
|
+
|
|
1602
|
+
if parameters:
|
|
1603
|
+
param_list = ['self'] if not is_static else []
|
|
1604
|
+
for param in parameters:
|
|
1605
|
+
param_name = param.get('name', 'arg')
|
|
1606
|
+
param_type = self._cpp_to_python_type(param.get('type', 'Any'))
|
|
1607
|
+
param_default = param.get('default', None)
|
|
1608
|
+
|
|
1609
|
+
if param_default:
|
|
1610
|
+
py_default = self._convert_cpp_default(param_default, param_type)
|
|
1611
|
+
param_list.append(f'{param_name}: {param_type} = {py_default}')
|
|
1612
|
+
else:
|
|
1613
|
+
param_list.append(f'{param_name}: {param_type}')
|
|
1614
|
+
|
|
1615
|
+
params_str = ', '.join(param_list)
|
|
1616
|
+
else:
|
|
1617
|
+
params_str = 'self' if not is_static else ''
|
|
1618
|
+
|
|
1619
|
+
py_return_type = self._cpp_to_python_type(return_type)
|
|
1620
|
+
|
|
1621
|
+
f.write(f' def {method_name}({params_str}) -> {py_return_type}:\n')
|
|
1622
|
+
if method_doc:
|
|
1623
|
+
f.write(f' """{method_doc}"""\n')
|
|
1624
|
+
else:
|
|
1625
|
+
f.write(f' """C++ method: {method_name}"""\n')
|
|
1626
|
+
f.write(f' ...\n\n')
|
|
1627
|
+
|
|
1628
|
+
# Generate fields as properties
|
|
1629
|
+
fields = cls.get('fields', [])
|
|
1630
|
+
for field in fields:
|
|
1631
|
+
if isinstance(field, dict):
|
|
1632
|
+
field_name = field.get('name', '')
|
|
1633
|
+
field_type = self._cpp_to_python_type(field.get('type', 'Any'))
|
|
1634
|
+
else:
|
|
1635
|
+
field_name = str(field)
|
|
1636
|
+
field_type = 'Any'
|
|
1637
|
+
|
|
1638
|
+
if field_name:
|
|
1639
|
+
f.write(f' {field_name}: {field_type}\n')
|
|
1640
|
+
|
|
1641
|
+
if not methods and not fields:
|
|
1642
|
+
f.write(f' pass\n\n')
|
|
1643
|
+
|
|
1644
|
+
# Generate functions with real signatures (v2.3.5)
|
|
1645
|
+
functions = module_info.get('functions', [])
|
|
1646
|
+
for func in functions:
|
|
1647
|
+
if isinstance(func, dict):
|
|
1648
|
+
func_name = func.get('name', '')
|
|
1649
|
+
func_doc = func.get('doc', '')
|
|
1650
|
+
return_type = func.get('return_type', 'Any')
|
|
1651
|
+
parameters = func.get('parameters', [])
|
|
1652
|
+
is_static = func.get('static', False)
|
|
1653
|
+
else:
|
|
1654
|
+
func_name = str(func)
|
|
1655
|
+
func_doc = ''
|
|
1656
|
+
return_type = 'Any'
|
|
1657
|
+
parameters = []
|
|
1658
|
+
is_static = False
|
|
1659
|
+
|
|
1660
|
+
if func_name:
|
|
1661
|
+
f.write(f' @staticmethod\n')
|
|
1662
|
+
|
|
1663
|
+
if parameters:
|
|
1664
|
+
param_list = []
|
|
1665
|
+
for param in parameters:
|
|
1666
|
+
param_name = param.get('name', 'arg')
|
|
1667
|
+
param_type = self._cpp_to_python_type(param.get('type', 'Any'))
|
|
1668
|
+
param_default = param.get('default', None)
|
|
1669
|
+
|
|
1670
|
+
if param_default:
|
|
1671
|
+
py_default = self._convert_cpp_default(param_default, param_type)
|
|
1672
|
+
param_list.append(f'{param_name}: {param_type} = {py_default}')
|
|
1673
|
+
else:
|
|
1674
|
+
param_list.append(f'{param_name}: {param_type}')
|
|
1675
|
+
|
|
1676
|
+
params_str = ', '.join(param_list)
|
|
1677
|
+
else:
|
|
1678
|
+
params_str = ''
|
|
1679
|
+
|
|
1680
|
+
py_return_type = self._cpp_to_python_type(return_type)
|
|
1681
|
+
|
|
1682
|
+
f.write(f' def {func_name}({params_str}) -> {py_return_type}:\n')
|
|
1683
|
+
|
|
1684
|
+
if func_doc:
|
|
1685
|
+
f.write(f' """{func_doc}"""\n')
|
|
1686
|
+
else:
|
|
1687
|
+
f.write(f' """C++ function: {func_name}"""\n')
|
|
1688
|
+
f.write(f' ...\n\n')
|
|
1689
|
+
|
|
1690
|
+
if not classes and not functions and not structs:
|
|
1691
|
+
f.write(f' pass\n')
|
|
1692
|
+
|
|
1693
|
+
f.write('\n')
|
|
1694
|
+
|
|
1695
|
+
if verbose:
|
|
1696
|
+
print(f"Generated type stub: {pyi_file}")
|
|
1697
|
+
|
|
1698
|
+
def _update_source_hashes(self, verbose: bool = False):
|
|
1699
|
+
"""Update source_hashes in registry after successful build (v2.3.5 format)."""
|
|
1700
|
+
if not self.registry_file.exists():
|
|
1701
|
+
if verbose:
|
|
1702
|
+
print("No registry file found, skipping hash update")
|
|
1703
|
+
return
|
|
1704
|
+
|
|
1705
|
+
try:
|
|
1706
|
+
with open(self.registry_file, 'r', encoding='utf-8') as f:
|
|
1707
|
+
registry = json.load(f)
|
|
1708
|
+
except Exception as e:
|
|
1709
|
+
if verbose:
|
|
1710
|
+
print(f"Failed to load registry for hash update: {e}")
|
|
1711
|
+
return
|
|
1712
|
+
|
|
1713
|
+
modules = registry.get('modules', registry)
|
|
1714
|
+
|
|
1715
|
+
for module_name, module_info in modules.items():
|
|
1716
|
+
sources = module_info.get('sources', [])
|
|
1717
|
+
source_hashes = {}
|
|
1718
|
+
|
|
1719
|
+
# Hash all source files
|
|
1720
|
+
for source in sources:
|
|
1721
|
+
source_path = self.project_root / source
|
|
1722
|
+
hash_value = self._compute_hash(source_path)
|
|
1723
|
+
source_hashes[source] = hash_value
|
|
1724
|
+
|
|
1725
|
+
# Also hash the .cp file
|
|
1726
|
+
cp_file = module_info.get('cp_file', '')
|
|
1727
|
+
if cp_file:
|
|
1728
|
+
cp_path = Path(cp_file)
|
|
1729
|
+
if cp_path.exists():
|
|
1730
|
+
cp_hash = self._compute_hash(cp_path)
|
|
1731
|
+
source_hashes[f"{module_name}.cp"] = cp_hash
|
|
1732
|
+
|
|
1733
|
+
# Store in v2.3.5 format (source_hashes field)
|
|
1734
|
+
module_info['source_hashes'] = source_hashes
|
|
1735
|
+
|
|
1736
|
+
# Remove old v1.6 'hashes' field if exists
|
|
1737
|
+
if 'hashes' in module_info:
|
|
1738
|
+
del module_info['hashes']
|
|
1739
|
+
|
|
1740
|
+
if 'modules' in registry:
|
|
1741
|
+
registry['modules'] = modules
|
|
1742
|
+
else:
|
|
1743
|
+
registry = modules
|
|
1744
|
+
|
|
1745
|
+
try:
|
|
1746
|
+
with open(self.registry_file, 'w', encoding='utf-8') as f:
|
|
1747
|
+
json.dump(registry, f, indent=2)
|
|
1748
|
+
if verbose:
|
|
1749
|
+
print(f"Updated source hashes in registry (v2.3.5 format)")
|
|
1750
|
+
except Exception as e:
|
|
1751
|
+
if verbose:
|
|
1752
|
+
print(f"Failed to save registry with hashes: {e}")
|
|
1753
|
+
|
|
1754
|
+
def _cpp_to_python_type(self, cpp_type: str) -> str:
|
|
1755
|
+
"""Convert C++ type to Python type hint."""
|
|
1756
|
+
# Basic type mappings
|
|
1757
|
+
type_map = {
|
|
1758
|
+
'int': 'int',
|
|
1759
|
+
'long': 'int',
|
|
1760
|
+
'short': 'int',
|
|
1761
|
+
'float': 'float',
|
|
1762
|
+
'double': 'float',
|
|
1763
|
+
'bool': 'bool',
|
|
1764
|
+
'string': 'str',
|
|
1765
|
+
'std::string': 'str',
|
|
1766
|
+
'void': 'None',
|
|
1767
|
+
'char': 'str',
|
|
1768
|
+
}
|
|
1769
|
+
|
|
1770
|
+
# Remove const, &, *
|
|
1771
|
+
clean_type = cpp_type.strip()
|
|
1772
|
+
clean_type = clean_type.replace('const', '').strip()
|
|
1773
|
+
clean_type = clean_type.rstrip('&*').strip()
|
|
1774
|
+
|
|
1775
|
+
# Check basic types
|
|
1776
|
+
if clean_type in type_map:
|
|
1777
|
+
return type_map[clean_type]
|
|
1778
|
+
|
|
1779
|
+
# Handle vector<T>
|
|
1780
|
+
if 'vector<' in clean_type or 'std::vector<' in clean_type:
|
|
1781
|
+
start = clean_type.find('<') + 1
|
|
1782
|
+
end = clean_type.rfind('>')
|
|
1783
|
+
if start > 0 and end > start:
|
|
1784
|
+
element_type = clean_type[start:end].strip()
|
|
1785
|
+
return f'List[{self._cpp_to_python_type(element_type)}]'
|
|
1786
|
+
|
|
1787
|
+
# Handle map<K,V>
|
|
1788
|
+
if 'map<' in clean_type or 'std::map<' in clean_type:
|
|
1789
|
+
start = clean_type.find('<') + 1
|
|
1790
|
+
end = clean_type.rfind('>')
|
|
1791
|
+
if start > 0 and end > start:
|
|
1792
|
+
inner = clean_type[start:end]
|
|
1793
|
+
# Simple split by comma (doesn't handle nested templates perfectly)
|
|
1794
|
+
parts = [p.strip() for p in inner.split(',', 1)]
|
|
1795
|
+
if len(parts) == 2:
|
|
1796
|
+
key_type = self._cpp_to_python_type(parts[0])
|
|
1797
|
+
val_type = self._cpp_to_python_type(parts[1])
|
|
1798
|
+
return f'Dict[{key_type}, {val_type}]'
|
|
1799
|
+
|
|
1800
|
+
# Unknown type - return as is
|
|
1801
|
+
return 'Any'
|
|
1802
|
+
|
|
1803
|
+
def _convert_cpp_default(self, cpp_default: str, param_type: str) -> str:
|
|
1804
|
+
"""Convert C++ default value to Python equivalent.
|
|
1805
|
+
|
|
1806
|
+
Args:
|
|
1807
|
+
cpp_default: C++ default value (e.g., "nullptr", "true", "0")
|
|
1808
|
+
param_type: Python type hint for the parameter
|
|
1809
|
+
|
|
1810
|
+
Returns:
|
|
1811
|
+
Python default value string
|
|
1812
|
+
"""
|
|
1813
|
+
cpp_default = cpp_default.strip()
|
|
1814
|
+
|
|
1815
|
+
if cpp_default == "nullptr" or cpp_default == "NULL":
|
|
1816
|
+
return "None"
|
|
1817
|
+
|
|
1818
|
+
if cpp_default == "true":
|
|
1819
|
+
return "True"
|
|
1820
|
+
if cpp_default == "false":
|
|
1821
|
+
return "False"
|
|
1822
|
+
|
|
1823
|
+
if cpp_default.startswith('"') and cpp_default.endswith('"'):
|
|
1824
|
+
return cpp_default
|
|
1825
|
+
if cpp_default.startswith("'") and cpp_default.endswith("'"):
|
|
1826
|
+
return f'"{cpp_default[1:-1]}"'
|
|
1827
|
+
|
|
1828
|
+
if cpp_default.replace('.', '', 1).replace('-', '', 1).replace('+', '', 1).isdigit():
|
|
1829
|
+
return cpp_default
|
|
1830
|
+
|
|
1831
|
+
if cpp_default == '""' or cpp_default == "''":
|
|
1832
|
+
return '""'
|
|
1833
|
+
|
|
1834
|
+
if cpp_default.startswith('{') and cpp_default.endswith('}'):
|
|
1835
|
+
return cpp_default.replace('{', '[').replace('}', ']')
|
|
1836
|
+
|
|
1837
|
+
return cpp_default
|
|
1838
|
+
|
|
1839
|
+
def _install_module(self, verbose: bool = False, skip_if_exists: bool = False):
|
|
1840
|
+
"""Copy compiled api.pyd to bindings directory."""
|
|
1841
|
+
dest = self.bindings_dir / f"api{self._get_pyd_suffix()}"
|
|
1842
|
+
|
|
1843
|
+
# If module already in bindings_dir (direct compilation), skip
|
|
1844
|
+
if skip_if_exists and dest.exists():
|
|
1845
|
+
if verbose:
|
|
1846
|
+
print(f"Module already in place: {dest}")
|
|
1847
|
+
# Still copy MinGW DLLs
|
|
1848
|
+
self._copy_mingw_dlls(verbose=verbose)
|
|
1849
|
+
return
|
|
1850
|
+
|
|
1851
|
+
pyd_locations = [
|
|
1852
|
+
self.cmake_build_dir / "Release" / f"api{self._get_pyd_suffix()}",
|
|
1853
|
+
self.cmake_build_dir / f"api{self._get_pyd_suffix()}",
|
|
1854
|
+
]
|
|
1855
|
+
|
|
1856
|
+
api_pyd = None
|
|
1857
|
+
for loc in pyd_locations:
|
|
1858
|
+
if loc.exists():
|
|
1859
|
+
api_pyd = loc
|
|
1860
|
+
break
|
|
1861
|
+
|
|
1862
|
+
if not api_pyd:
|
|
1863
|
+
raise CppBuildError(
|
|
1864
|
+
f"Compiled module not found. Searched:\n" +
|
|
1865
|
+
"\n".join(f" - {loc}" for loc in pyd_locations)
|
|
1866
|
+
)
|
|
1867
|
+
|
|
1868
|
+
if dest.exists():
|
|
1869
|
+
backup = dest.with_suffix(dest.suffix + ".backup")
|
|
1870
|
+
shutil.move(str(dest), str(backup))
|
|
1871
|
+
if verbose:
|
|
1872
|
+
print(f"Backed up old module: {backup}")
|
|
1873
|
+
|
|
1874
|
+
shutil.copy2(str(api_pyd), str(dest))
|
|
1875
|
+
|
|
1876
|
+
if verbose:
|
|
1877
|
+
print(f"Installed module: {dest}")
|
|
1878
|
+
|
|
1879
|
+
# Copy MinGW DLLs if on Windows
|
|
1880
|
+
self._copy_mingw_dlls(verbose=verbose)
|
|
1881
|
+
|
|
1882
|
+
def _get_pyd_suffix(self) -> str:
|
|
1883
|
+
"""Get platform-specific Python extension suffix."""
|
|
1884
|
+
if platform.system() == "Windows":
|
|
1885
|
+
return ".pyd"
|
|
1886
|
+
else:
|
|
1887
|
+
return ".so"
|
|
1888
|
+
|
|
1889
|
+
def _validate_module(self, verbose: bool = False):
|
|
1890
|
+
"""Validate that api module can be imported."""
|
|
1891
|
+
bindings_path = str(self.bindings_dir)
|
|
1892
|
+
|
|
1893
|
+
if bindings_path not in sys.path:
|
|
1894
|
+
sys.path.insert(0, bindings_path)
|
|
1895
|
+
|
|
1896
|
+
try:
|
|
1897
|
+
if 'api' in sys.modules:
|
|
1898
|
+
del sys.modules['api']
|
|
1899
|
+
|
|
1900
|
+
import api
|
|
1901
|
+
|
|
1902
|
+
if verbose:
|
|
1903
|
+
print(f"Module validated: {api.__name__}")
|
|
1904
|
+
if hasattr(api, '__doc__'):
|
|
1905
|
+
print(f" Doc: {api.__doc__}")
|
|
1906
|
+
|
|
1907
|
+
except ImportError as e:
|
|
1908
|
+
raise CppValidationError(
|
|
1909
|
+
f"Module import failed: {e}\n"
|
|
1910
|
+
f"Check that api{self._get_pyd_suffix()} exists in {self.bindings_dir}"
|
|
1911
|
+
) from e
|
|
1912
|
+
|
|
1913
|
+
# ========================================================================
|
|
1914
|
+
|
|
1915
|
+
def _load_registry(self) -> Dict[str, Any]:
|
|
1916
|
+
"""Load module registry (v1.6 or v2.0 format)."""
|
|
1917
|
+
if not self.registry_file.exists():
|
|
1918
|
+
return {"schema_version": "2.0", "modules": {}}
|
|
1919
|
+
|
|
1920
|
+
try:
|
|
1921
|
+
with open(self.registry_file, 'r', encoding='utf-8') as f:
|
|
1922
|
+
data = json.load(f)
|
|
1923
|
+
|
|
1924
|
+
# Detect version: v1.6.0 has modules at root, v2.0 has "modules" key
|
|
1925
|
+
if "schema_version" not in data:
|
|
1926
|
+
# v1.6.0 format - convert to v2.0
|
|
1927
|
+
return {"schema_version": "1.6", "modules": data}
|
|
1928
|
+
|
|
1929
|
+
return data
|
|
1930
|
+
|
|
1931
|
+
except json.JSONDecodeError:
|
|
1932
|
+
return {"schema_version": "2.0", "modules": {}}
|
|
1933
|
+
|
|
1934
|
+
def _parse_all_modules(self, verbose: bool = False) -> Dict[str, Dict]:
|
|
1935
|
+
"""Parse all .cp files and return module descriptors.
|
|
1936
|
+
|
|
1937
|
+
Returns:
|
|
1938
|
+
Dict mapping module_name -> module_descriptor (from registry JSON)
|
|
1939
|
+
"""
|
|
1940
|
+
# Run plugin_gen to generate registry
|
|
1941
|
+
self._generate_bindings(verbose=verbose)
|
|
1942
|
+
|
|
1943
|
+
# Load registry to get module info
|
|
1944
|
+
registry = self._load_registry()
|
|
1945
|
+
return registry.get("modules", {})
|
|
1946
|
+
|
|
1947
|
+
def _build_dependency_graph(self, modules: Dict[str, Dict]) -> List[List[str]]:
|
|
1948
|
+
"""Build topological dependency order using Kahn's algorithm.
|
|
1949
|
+
|
|
1950
|
+
Args:
|
|
1951
|
+
modules: Dict of module_name -> module_descriptor
|
|
1952
|
+
|
|
1953
|
+
Returns:
|
|
1954
|
+
List of dependency levels: [[level0_modules], [level1_modules], ...]
|
|
1955
|
+
Each level can be compiled in parallel.
|
|
1956
|
+
|
|
1957
|
+
Raises:
|
|
1958
|
+
CppBuildError: If circular dependency detected
|
|
1959
|
+
"""
|
|
1960
|
+
# Build adjacency list
|
|
1961
|
+
graph = {}
|
|
1962
|
+
in_degree = {}
|
|
1963
|
+
|
|
1964
|
+
for module_name in modules:
|
|
1965
|
+
graph[module_name] = []
|
|
1966
|
+
in_degree[module_name] = 0
|
|
1967
|
+
|
|
1968
|
+
# Build edges: dependency -> module (reversed for build order)
|
|
1969
|
+
for module_name, descriptor in modules.items():
|
|
1970
|
+
for dep in descriptor.get('dependencies', []):
|
|
1971
|
+
dep_module = dep.get('target')
|
|
1972
|
+
if dep_module not in modules:
|
|
1973
|
+
raise CppBuildError(
|
|
1974
|
+
f"Module '{module_name}' depends on unknown module '{dep_module}'"
|
|
1975
|
+
)
|
|
1976
|
+
|
|
1977
|
+
# dep_module must be built before module_name
|
|
1978
|
+
graph[dep_module].append(module_name)
|
|
1979
|
+
in_degree[module_name] += 1
|
|
1980
|
+
|
|
1981
|
+
# Kahn's algorithm for topological sort with levels
|
|
1982
|
+
levels = []
|
|
1983
|
+
visited = set()
|
|
1984
|
+
|
|
1985
|
+
while len(visited) < len(modules):
|
|
1986
|
+
# Find all nodes with in_degree 0 (no dependencies)
|
|
1987
|
+
current_level = []
|
|
1988
|
+
for module in modules:
|
|
1989
|
+
if module not in visited and in_degree[module] == 0:
|
|
1990
|
+
current_level.append(module)
|
|
1991
|
+
|
|
1992
|
+
if not current_level:
|
|
1993
|
+
# Circular dependency detected
|
|
1994
|
+
remaining = set(modules.keys()) - visited
|
|
1995
|
+
raise CppBuildError(
|
|
1996
|
+
f"Circular dependency detected among: {', '.join(remaining)}\n"
|
|
1997
|
+
f"Cannot determine build order."
|
|
1998
|
+
)
|
|
1999
|
+
|
|
2000
|
+
levels.append(current_level)
|
|
2001
|
+
|
|
2002
|
+
# Update in_degrees
|
|
2003
|
+
for module in current_level:
|
|
2004
|
+
visited.add(module)
|
|
2005
|
+
for neighbor in graph.get(module, []):
|
|
2006
|
+
in_degree[neighbor] -= 1
|
|
2007
|
+
|
|
2008
|
+
return levels
|
|
2009
|
+
|
|
2010
|
+
def _get_affected_modules(self, module_name: str, dep_levels: List[List[str]],
|
|
2011
|
+
all_modules: Dict[str, Dict]) -> List[str]:
|
|
2012
|
+
"""Get list of modules affected by changes to given module.
|
|
2013
|
+
|
|
2014
|
+
This includes the module itself and all modules that depend on it.
|
|
2015
|
+
|
|
2016
|
+
Args:
|
|
2017
|
+
module_name: Module that changed
|
|
2018
|
+
dep_levels: Dependency levels from _build_dependency_graph
|
|
2019
|
+
all_modules: All module descriptors
|
|
2020
|
+
|
|
2021
|
+
Returns:
|
|
2022
|
+
List of affected module names in build order
|
|
2023
|
+
"""
|
|
2024
|
+
affected = set()
|
|
2025
|
+
affected.add(module_name)
|
|
2026
|
+
|
|
2027
|
+
dependents = {}
|
|
2028
|
+
for mod_name in all_modules:
|
|
2029
|
+
dependents[mod_name] = []
|
|
2030
|
+
|
|
2031
|
+
for mod_name, mod_info in all_modules.items():
|
|
2032
|
+
for dep in mod_info.get('dependencies', []):
|
|
2033
|
+
dep_target = dep.get('target')
|
|
2034
|
+
if dep_target in dependents:
|
|
2035
|
+
dependents[dep_target].append(mod_name)
|
|
2036
|
+
|
|
2037
|
+
queue = [module_name]
|
|
2038
|
+
while queue:
|
|
2039
|
+
current = queue.pop(0)
|
|
2040
|
+
for dependent in dependents.get(current, []):
|
|
2041
|
+
if dependent not in affected:
|
|
2042
|
+
affected.add(dependent)
|
|
2043
|
+
queue.append(dependent)
|
|
2044
|
+
|
|
2045
|
+
result = []
|
|
2046
|
+
for level in dep_levels:
|
|
2047
|
+
for mod in level:
|
|
2048
|
+
if mod in affected:
|
|
2049
|
+
result.append(mod)
|
|
2050
|
+
|
|
2051
|
+
return result
|
|
2052
|
+
|
|
2053
|
+
def _module_needs_rebuild(self, module_name: str, module_info: Dict, registry: Dict) -> tuple[bool, str]:
|
|
2054
|
+
"""Check if module needs rebuild using hashes.
|
|
2055
|
+
|
|
2056
|
+
Supports both v2.0 (source_hashes) and v1.6 (hashes) registry formats.
|
|
2057
|
+
|
|
2058
|
+
Returns:
|
|
2059
|
+
(needs_rebuild: bool, reason: str)
|
|
2060
|
+
"""
|
|
2061
|
+
pyd_name = f"api_{module_name}"
|
|
2062
|
+
pyd_path = self.bindings_dir / f"{pyd_name}{self._get_pyd_suffix()}"
|
|
2063
|
+
|
|
2064
|
+
# Check if .pyd exists
|
|
2065
|
+
if not pyd_path.exists():
|
|
2066
|
+
return (True, f".pyd not found: {pyd_path.name}")
|
|
2067
|
+
|
|
2068
|
+
# Get stored hashes with v2.0/v1.6 compatibility
|
|
2069
|
+
old_registry = registry.get('modules', {}).get(module_name, {})
|
|
2070
|
+
stored_hashes = old_registry.get('source_hashes', old_registry.get('hashes', {}))
|
|
2071
|
+
|
|
2072
|
+
# If no hashes stored at all, rebuild needed
|
|
2073
|
+
if not stored_hashes:
|
|
2074
|
+
return (True, "No hash history found")
|
|
2075
|
+
|
|
2076
|
+
# Check source file hashes
|
|
2077
|
+
for source_file in module_info.get('sources', []):
|
|
2078
|
+
source_path = self.project_root / source_file
|
|
2079
|
+
if not source_path.exists():
|
|
2080
|
+
source_path = self.include_dir / source_file
|
|
2081
|
+
|
|
2082
|
+
if source_path.exists():
|
|
2083
|
+
current_hash = self._compute_hash(source_path)
|
|
2084
|
+
|
|
2085
|
+
# Try full path first, then filename only (v1.6 compatibility)
|
|
2086
|
+
stored_hash = stored_hashes.get(source_file, stored_hashes.get(source_path.name, None))
|
|
2087
|
+
|
|
2088
|
+
if stored_hash is None:
|
|
2089
|
+
return (True, f"No hash for: {source_path.name}")
|
|
2090
|
+
|
|
2091
|
+
# Handle hash length mismatch (v1.6 = 16 chars, v2.0+ = 64 chars)
|
|
2092
|
+
if len(stored_hash) != len(current_hash):
|
|
2093
|
+
min_len = min(len(stored_hash), len(current_hash))
|
|
2094
|
+
if current_hash[:min_len] != stored_hash[:min_len]:
|
|
2095
|
+
return (True, f"Source changed: {source_path.name}")
|
|
2096
|
+
elif current_hash != stored_hash:
|
|
2097
|
+
return (True, f"Source changed: {source_path.name}")
|
|
2098
|
+
|
|
2099
|
+
# Check .cp file hash
|
|
2100
|
+
cp_file = module_info.get('cp_file', '')
|
|
2101
|
+
if cp_file:
|
|
2102
|
+
cp_path = Path(cp_file)
|
|
2103
|
+
if cp_path.exists():
|
|
2104
|
+
current_hash = self._compute_hash(cp_path)
|
|
2105
|
+
cp_key = f"{module_name}.cp"
|
|
2106
|
+
stored_hash = stored_hashes.get(cp_key, None)
|
|
2107
|
+
|
|
2108
|
+
if stored_hash is None:
|
|
2109
|
+
return (True, ".cp file hash missing")
|
|
2110
|
+
|
|
2111
|
+
# Handle hash length mismatch
|
|
2112
|
+
if len(stored_hash) != len(current_hash):
|
|
2113
|
+
min_len = min(len(stored_hash), len(current_hash))
|
|
2114
|
+
if current_hash[:min_len] != stored_hash[:min_len]:
|
|
2115
|
+
return (True, ".cp file changed")
|
|
2116
|
+
elif current_hash != stored_hash:
|
|
2117
|
+
return (True, ".cp file changed")
|
|
2118
|
+
|
|
2119
|
+
# Check if dependencies were rebuilt more recently
|
|
2120
|
+
for dep in module_info.get('dependencies', []):
|
|
2121
|
+
dep_module = dep.get('target')
|
|
2122
|
+
dep_info = registry.get('modules', {}).get(dep_module, {})
|
|
2123
|
+
|
|
2124
|
+
dep_built = dep_info.get('last_built', '')
|
|
2125
|
+
self_built = old_registry.get('last_built', '')
|
|
2126
|
+
|
|
2127
|
+
if dep_built > self_built:
|
|
2128
|
+
return (True, f"Dependency rebuilt: {dep_module}")
|
|
2129
|
+
|
|
2130
|
+
return (False, "Up to date")
|
|
2131
|
+
|
|
2132
|
+
def rebuild(self,
|
|
2133
|
+
modules: Optional[List[str]] = None,
|
|
2134
|
+
incremental: bool = True,
|
|
2135
|
+
parallel: bool = False,
|
|
2136
|
+
clean: bool = False,
|
|
2137
|
+
verbose: bool = False,
|
|
2138
|
+
fast: bool = False) -> bool:
|
|
2139
|
+
"""v2.0: Rebuild modules with incremental and per-module support.
|
|
2140
|
+
|
|
2141
|
+
Args:
|
|
2142
|
+
modules: List of specific modules to rebuild (None = all)
|
|
2143
|
+
incremental: Use incremental compilation (skip unchanged)
|
|
2144
|
+
parallel: Compile independent modules in parallel (not implemented yet)
|
|
2145
|
+
clean: Force clean rebuild (ignore incremental)
|
|
2146
|
+
verbose: Print detailed output
|
|
2147
|
+
fast: Ultra-fast mode - skip unnecessary checks, assume generator is up to date
|
|
2148
|
+
|
|
2149
|
+
Returns:
|
|
2150
|
+
True if build succeeded
|
|
2151
|
+
"""
|
|
2152
|
+
# Clear caches on clean build
|
|
2153
|
+
if clean:
|
|
2154
|
+
self._clear_compiler_cache()
|
|
2155
|
+
|
|
2156
|
+
# Fast mode: skip to essentials only
|
|
2157
|
+
if fast and not clean:
|
|
2158
|
+
return self._rebuild_fast(modules=modules, verbose=verbose)
|
|
2159
|
+
|
|
2160
|
+
if verbose:
|
|
2161
|
+
print("=" * 60)
|
|
2162
|
+
print("IncludeCPP v2.0 Build System")
|
|
2163
|
+
print("=" * 60)
|
|
2164
|
+
print(f"Project: {self.config.config.get('project', 'unnamed')}")
|
|
2165
|
+
print(f"Incremental: {incremental and not clean}")
|
|
2166
|
+
print("=" * 60)
|
|
2167
|
+
|
|
2168
|
+
# Phase 1: Build generator
|
|
2169
|
+
if verbose:
|
|
2170
|
+
print("\n[1/5] Building plugin generator...")
|
|
2171
|
+
self._build_generator(verbose=verbose)
|
|
2172
|
+
|
|
2173
|
+
# Phase 2: Parse all .cp files (generates registry)
|
|
2174
|
+
if verbose:
|
|
2175
|
+
print("\n[2/5] Parsing .cp files...")
|
|
2176
|
+
all_modules = self._parse_all_modules(verbose=verbose)
|
|
2177
|
+
|
|
2178
|
+
if not all_modules:
|
|
2179
|
+
raise CppBuildError("No modules found in plugins directory")
|
|
2180
|
+
|
|
2181
|
+
# Phase 3: Load existing registry for incremental check
|
|
2182
|
+
if verbose:
|
|
2183
|
+
print("\n[3/5] Determining rebuild targets...")
|
|
2184
|
+
|
|
2185
|
+
registry = self._load_registry()
|
|
2186
|
+
|
|
2187
|
+
# Determine what to rebuild
|
|
2188
|
+
if clean:
|
|
2189
|
+
to_rebuild = list(all_modules.keys())
|
|
2190
|
+
if verbose:
|
|
2191
|
+
print(f" Clean build: rebuilding all {len(to_rebuild)} modules")
|
|
2192
|
+
|
|
2193
|
+
elif modules is not None:
|
|
2194
|
+
# User specified modules
|
|
2195
|
+
to_rebuild = [m for m in modules if m in all_modules]
|
|
2196
|
+
if verbose:
|
|
2197
|
+
print(f" User specified: {', '.join(to_rebuild)}")
|
|
2198
|
+
|
|
2199
|
+
elif incremental:
|
|
2200
|
+
# Incremental: only rebuild changed modules
|
|
2201
|
+
to_rebuild = []
|
|
2202
|
+
for module_name, module_info in all_modules.items():
|
|
2203
|
+
needs_rebuild, reason = self._module_needs_rebuild(module_name, module_info, registry)
|
|
2204
|
+
if needs_rebuild:
|
|
2205
|
+
to_rebuild.append(module_name)
|
|
2206
|
+
if verbose:
|
|
2207
|
+
print(f" -> {module_name}: {reason}")
|
|
2208
|
+
|
|
2209
|
+
if not to_rebuild:
|
|
2210
|
+
if verbose:
|
|
2211
|
+
print(" All modules up to date!")
|
|
2212
|
+
return True
|
|
2213
|
+
|
|
2214
|
+
else:
|
|
2215
|
+
# Full rebuild
|
|
2216
|
+
to_rebuild = list(all_modules.keys())
|
|
2217
|
+
if verbose:
|
|
2218
|
+
print(f" Full rebuild: {len(to_rebuild)} modules")
|
|
2219
|
+
|
|
2220
|
+
# Phase 4: Build dependency order
|
|
2221
|
+
if verbose:
|
|
2222
|
+
print(f"\n[4/5] Building {len(to_rebuild)} module(s)...")
|
|
2223
|
+
|
|
2224
|
+
try:
|
|
2225
|
+
dep_levels = self._build_dependency_graph(all_modules)
|
|
2226
|
+
|
|
2227
|
+
# Filter to only modules we need to rebuild
|
|
2228
|
+
filtered_levels = []
|
|
2229
|
+
for level in dep_levels:
|
|
2230
|
+
filtered = [m for m in level if m in to_rebuild]
|
|
2231
|
+
if filtered:
|
|
2232
|
+
filtered_levels.append(filtered)
|
|
2233
|
+
|
|
2234
|
+
if verbose and len(filtered_levels) > 1:
|
|
2235
|
+
print(f" Build order: {len(filtered_levels)} dependency level(s)")
|
|
2236
|
+
|
|
2237
|
+
except CppBuildError as e:
|
|
2238
|
+
raise CppBuildError(f"Dependency resolution failed: {e}") from e
|
|
2239
|
+
|
|
2240
|
+
# Phase 5: Build modules (sequential for now, parallel in future)
|
|
2241
|
+
# For v2.0, we still use monolithic build but with per-module tracking
|
|
2242
|
+
# True per-module builds will come later in implementation
|
|
2243
|
+
success = self.rebuild_all(verbose=verbose)
|
|
2244
|
+
|
|
2245
|
+
if verbose:
|
|
2246
|
+
print("\n" + "=" * 60)
|
|
2247
|
+
print("BUILD COMPLETED")
|
|
2248
|
+
print("=" * 60)
|
|
2249
|
+
|
|
2250
|
+
return success
|
|
2251
|
+
|
|
2252
|
+
def _rebuild_fast(self, modules: Optional[List[str]] = None, verbose: bool = False) -> bool:
|
|
2253
|
+
"""Ultra-fast rebuild - skip unnecessary steps.
|
|
2254
|
+
|
|
2255
|
+
Checks modification times to skip work when nothing changed.
|
|
2256
|
+
Typically runs in <3 seconds for no-change rebuilds.
|
|
2257
|
+
"""
|
|
2258
|
+
import time
|
|
2259
|
+
start = time.time()
|
|
2260
|
+
|
|
2261
|
+
# Check if generator exists, build only if missing
|
|
2262
|
+
if not self.gen_exe.exists():
|
|
2263
|
+
if verbose:
|
|
2264
|
+
print("Generator missing, building...")
|
|
2265
|
+
self._build_generator(verbose=verbose)
|
|
2266
|
+
elif verbose:
|
|
2267
|
+
print("Generator: cached")
|
|
2268
|
+
|
|
2269
|
+
# Check if bindings need regeneration
|
|
2270
|
+
bindings_cpp = self.bindings_dir / "bindings.cpp"
|
|
2271
|
+
needs_bindings = not bindings_cpp.exists()
|
|
2272
|
+
|
|
2273
|
+
if not needs_bindings:
|
|
2274
|
+
# Check if any .cp file is newer than bindings.cpp
|
|
2275
|
+
bindings_mtime = bindings_cpp.stat().st_mtime
|
|
2276
|
+
for cp in self.plugins_dir.glob("*.cp"):
|
|
2277
|
+
if cp.stat().st_mtime > bindings_mtime:
|
|
2278
|
+
needs_bindings = True
|
|
2279
|
+
if verbose:
|
|
2280
|
+
print(f" {cp.name} changed")
|
|
2281
|
+
break
|
|
2282
|
+
|
|
2283
|
+
if needs_bindings:
|
|
2284
|
+
if verbose:
|
|
2285
|
+
print("Regenerating bindings...")
|
|
2286
|
+
self._generate_bindings(verbose=verbose)
|
|
2287
|
+
elif verbose:
|
|
2288
|
+
print("Bindings: cached")
|
|
2289
|
+
|
|
2290
|
+
# Use incremental compilation with object caching
|
|
2291
|
+
if verbose:
|
|
2292
|
+
print("Compiling (incremental direct)...")
|
|
2293
|
+
self._compile_direct_incremental(verbose=verbose)
|
|
2294
|
+
|
|
2295
|
+
# Quick validation
|
|
2296
|
+
self._validate_module(verbose=verbose)
|
|
2297
|
+
|
|
2298
|
+
# Generate pyi only if needed
|
|
2299
|
+
try:
|
|
2300
|
+
self._generate_pyi_stub(verbose=verbose)
|
|
2301
|
+
except Exception:
|
|
2302
|
+
pass
|
|
2303
|
+
|
|
2304
|
+
elapsed = time.time() - start
|
|
2305
|
+
if verbose:
|
|
2306
|
+
print(f"Fast rebuild completed in {elapsed:.2f}s")
|
|
2307
|
+
|
|
2308
|
+
return True
|
|
2309
|
+
|
|
2310
|
+
def rebuild_all(self, verbose: bool = False) -> bool:
|
|
2311
|
+
"""Complete build process with CMake fallback to direct compilation.
|
|
2312
|
+
|
|
2313
|
+
1. Build/update plugin_gen.exe
|
|
2314
|
+
2. Generate bindings.cpp
|
|
2315
|
+
3. Try CMake build, fallback to direct compilation if CMake fails
|
|
2316
|
+
4. Validate module
|
|
2317
|
+
"""
|
|
2318
|
+
use_direct = False
|
|
2319
|
+
|
|
2320
|
+
# Phase 1: Always needed
|
|
2321
|
+
basic_steps = [
|
|
2322
|
+
("Building plugin generator", self._build_generator),
|
|
2323
|
+
("Generating bindings", self._generate_bindings),
|
|
2324
|
+
]
|
|
2325
|
+
|
|
2326
|
+
for i, (desc, func) in enumerate(basic_steps):
|
|
2327
|
+
if verbose:
|
|
2328
|
+
print(f"\n[{i+1}/7] {desc}...")
|
|
2329
|
+
try:
|
|
2330
|
+
func(verbose=verbose)
|
|
2331
|
+
except Exception as e:
|
|
2332
|
+
raise CppBuildError(f"{desc} failed: {e}") from e
|
|
2333
|
+
|
|
2334
|
+
# Phase 2: Try CMake, fallback to direct compilation
|
|
2335
|
+
step_num = len(basic_steps) + 1
|
|
2336
|
+
|
|
2337
|
+
try:
|
|
2338
|
+
# Try CMake build
|
|
2339
|
+
if verbose:
|
|
2340
|
+
print(f"\n[{step_num}/7] Generating CMake config...")
|
|
2341
|
+
self._generate_cmake(verbose=verbose)
|
|
2342
|
+
|
|
2343
|
+
if verbose:
|
|
2344
|
+
print(f"\n[{step_num+1}/7] Configuring CMake...")
|
|
2345
|
+
self._configure_cmake(verbose=verbose)
|
|
2346
|
+
|
|
2347
|
+
if verbose:
|
|
2348
|
+
print(f"\n[{step_num+2}/7] Compiling C++...")
|
|
2349
|
+
self._compile_cpp(verbose=verbose)
|
|
2350
|
+
|
|
2351
|
+
if verbose:
|
|
2352
|
+
print(f"\n[{step_num+3}/7] Installing module...")
|
|
2353
|
+
self._install_module(verbose=verbose)
|
|
2354
|
+
|
|
2355
|
+
except CppBuildError as e:
|
|
2356
|
+
if verbose:
|
|
2357
|
+
print(f"\nCMake build failed: {e}")
|
|
2358
|
+
print("Falling back to direct incremental compilation...")
|
|
2359
|
+
|
|
2360
|
+
# Direct incremental compilation fallback (v2.9.8: with object caching)
|
|
2361
|
+
if verbose:
|
|
2362
|
+
print(f"\n[{step_num}/7] Compiling with direct g++ (incremental)...")
|
|
2363
|
+
try:
|
|
2364
|
+
self._compile_direct_incremental(verbose=verbose)
|
|
2365
|
+
# Module is already in bindings_dir, skip install
|
|
2366
|
+
use_direct = True
|
|
2367
|
+
except Exception as e2:
|
|
2368
|
+
raise CppBuildError(f"Both CMake and direct compilation failed.\nCMake: {e}\nDirect: {e2}") from e2
|
|
2369
|
+
|
|
2370
|
+
# Phase 3: Validate
|
|
2371
|
+
if verbose:
|
|
2372
|
+
print(f"\n[7/7] Validating module...")
|
|
2373
|
+
try:
|
|
2374
|
+
self._validate_module(verbose=verbose)
|
|
2375
|
+
except Exception as e:
|
|
2376
|
+
raise CppBuildError(f"Module validation failed: {e}") from e
|
|
2377
|
+
|
|
2378
|
+
# Phase 4: Generate .pyi stub for VSCode IntelliSense
|
|
2379
|
+
try:
|
|
2380
|
+
self._generate_pyi_stub(verbose=verbose)
|
|
2381
|
+
except Exception as e:
|
|
2382
|
+
if verbose:
|
|
2383
|
+
print(f"Warning: .pyi generation failed: {e}")
|
|
2384
|
+
|
|
2385
|
+
# Phase 4b: Generate individual module .pyi files for VSCode autocomplete
|
|
2386
|
+
try:
|
|
2387
|
+
self._generate_all_module_pyi(verbose=verbose)
|
|
2388
|
+
except Exception as e:
|
|
2389
|
+
if verbose:
|
|
2390
|
+
print(f"Warning: Module .pyi generation failed: {e}")
|
|
2391
|
+
|
|
2392
|
+
# Phase 5: Update registry with source hashes for incremental builds
|
|
2393
|
+
self._update_source_hashes(verbose=verbose)
|
|
2394
|
+
|
|
2395
|
+
# Auto-configure VSCode for autocomplete
|
|
2396
|
+
self._configure_vscode_autocomplete(verbose=verbose)
|
|
2397
|
+
|
|
2398
|
+
if verbose:
|
|
2399
|
+
print(f"\n{'='*60}")
|
|
2400
|
+
print("BUILD SUCCESSFUL!")
|
|
2401
|
+
if use_direct:
|
|
2402
|
+
print("(Used direct compilation fallback)")
|
|
2403
|
+
print(f"{'='*60}")
|
|
2404
|
+
print(f"Module: {self.bindings_dir / ('api' + self._get_pyd_suffix())}")
|
|
2405
|
+
print(f"Registry: {self.registry_file}")
|
|
2406
|
+
print(f"Type Stubs: {self.bindings_dir / 'api.pyi'}")
|
|
2407
|
+
|
|
2408
|
+
# Show module stubs info
|
|
2409
|
+
pyi_files = list(self.bindings_dir.glob("*.pyi"))
|
|
2410
|
+
module_pyis = [f for f in pyi_files if f.name != "api.pyi"]
|
|
2411
|
+
if module_pyis:
|
|
2412
|
+
print(f"\nModule Stubs ({len(module_pyis)}):")
|
|
2413
|
+
for pyi in module_pyis:
|
|
2414
|
+
print(f" - {pyi.name}")
|
|
2415
|
+
|
|
2416
|
+
return True
|