comfy-env 0.0.64__py3-none-any.whl → 0.0.66__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +70 -122
- comfy_env/cli.py +78 -7
- comfy_env/config/__init__.py +19 -0
- comfy_env/config/parser.py +151 -0
- comfy_env/config/types.py +64 -0
- comfy_env/install.py +83 -361
- comfy_env/isolation/__init__.py +9 -0
- comfy_env/isolation/wrap.py +351 -0
- comfy_env/nodes.py +2 -2
- comfy_env/pixi/__init__.py +48 -0
- comfy_env/pixi/core.py +356 -0
- comfy_env/{resolver.py → pixi/resolver.py} +1 -14
- comfy_env/prestartup.py +60 -0
- comfy_env/templates/comfy-env-instructions.txt +30 -87
- comfy_env/templates/comfy-env.toml +68 -136
- comfy_env/workers/__init__.py +21 -32
- comfy_env/workers/base.py +1 -1
- comfy_env/workers/{torch_mp.py → mp.py} +47 -14
- comfy_env/workers/{venv.py → subprocess.py} +405 -441
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/METADATA +2 -1
- comfy_env-0.0.66.dist-info/RECORD +34 -0
- comfy_env/decorator.py +0 -700
- comfy_env/env/__init__.py +0 -47
- comfy_env/env/config.py +0 -201
- comfy_env/env/config_file.py +0 -740
- comfy_env/env/manager.py +0 -636
- comfy_env/env/security.py +0 -267
- comfy_env/ipc/__init__.py +0 -55
- comfy_env/ipc/bridge.py +0 -476
- comfy_env/ipc/protocol.py +0 -265
- comfy_env/ipc/tensor.py +0 -371
- comfy_env/ipc/torch_bridge.py +0 -401
- comfy_env/ipc/transport.py +0 -318
- comfy_env/ipc/worker.py +0 -221
- comfy_env/isolation.py +0 -310
- comfy_env/pixi.py +0 -760
- comfy_env/stub_imports.py +0 -270
- comfy_env/stubs/__init__.py +0 -1
- comfy_env/stubs/comfy/__init__.py +0 -6
- comfy_env/stubs/comfy/model_management.py +0 -58
- comfy_env/stubs/comfy/utils.py +0 -29
- comfy_env/stubs/folder_paths.py +0 -71
- comfy_env/workers/pool.py +0 -241
- comfy_env-0.0.64.dist-info/RECORD +0 -48
- /comfy_env/{env/cuda_gpu_detection.py → pixi/cuda_detection.py} +0 -0
- /comfy_env/{env → pixi}/platform/__init__.py +0 -0
- /comfy_env/{env → pixi}/platform/base.py +0 -0
- /comfy_env/{env → pixi}/platform/darwin.py +0 -0
- /comfy_env/{env → pixi}/platform/linux.py +0 -0
- /comfy_env/{env → pixi}/platform/windows.py +0 -0
- /comfy_env/{registry.py → pixi/registry.py} +0 -0
- /comfy_env/{wheel_sources.yml → pixi/wheel_sources.yml} +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/WHEEL +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Process isolation for ComfyUI node packs.
|
|
3
|
+
|
|
4
|
+
This module provides wrap_isolated_nodes() which wraps node classes
|
|
5
|
+
to run their FUNCTION methods in an isolated Python environment.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
# In your node pack's __init__.py:
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from comfy_env import wrap_isolated_nodes
|
|
11
|
+
|
|
12
|
+
NODE_CLASS_MAPPINGS = {}
|
|
13
|
+
|
|
14
|
+
# Main nodes (no isolation)
|
|
15
|
+
from .nodes.main import NODE_CLASS_MAPPINGS as main_nodes
|
|
16
|
+
NODE_CLASS_MAPPINGS.update(main_nodes)
|
|
17
|
+
|
|
18
|
+
# Isolated nodes (has comfy-env.toml in that directory)
|
|
19
|
+
from .nodes.isolated import NODE_CLASS_MAPPINGS as isolated_nodes
|
|
20
|
+
NODE_CLASS_MAPPINGS.update(
|
|
21
|
+
wrap_isolated_nodes(isolated_nodes, Path(__file__).parent / "nodes/isolated")
|
|
22
|
+
)
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
import atexit
|
|
26
|
+
import inspect
|
|
27
|
+
import os
|
|
28
|
+
import sys
|
|
29
|
+
import threading
|
|
30
|
+
from functools import wraps
|
|
31
|
+
from pathlib import Path
|
|
32
|
+
from typing import Any, Dict, Optional
|
|
33
|
+
|
|
34
|
+
# Debug logging (set COMFY_ENV_DEBUG=1 to enable)
|
|
35
|
+
_DEBUG = os.environ.get("COMFY_ENV_DEBUG", "").lower() in ("1", "true", "yes")
|
|
36
|
+
|
|
37
|
+
# Global worker cache (one per isolated environment)
|
|
38
|
+
_workers: Dict[str, Any] = {}
|
|
39
|
+
_workers_lock = threading.Lock()
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _get_isolated_python_version(env_dir: Path) -> Optional[str]:
|
|
43
|
+
"""Get Python version from isolated environment."""
|
|
44
|
+
if sys.platform == "win32":
|
|
45
|
+
python_path = env_dir / "python.exe"
|
|
46
|
+
else:
|
|
47
|
+
python_path = env_dir / "bin" / "python"
|
|
48
|
+
|
|
49
|
+
if not python_path.exists():
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
import subprocess
|
|
53
|
+
try:
|
|
54
|
+
result = subprocess.run(
|
|
55
|
+
[str(python_path), "-c", "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"],
|
|
56
|
+
capture_output=True, text=True, timeout=5
|
|
57
|
+
)
|
|
58
|
+
if result.returncode == 0:
|
|
59
|
+
return result.stdout.strip()
|
|
60
|
+
except Exception:
|
|
61
|
+
pass
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _get_worker(
|
|
66
|
+
env_dir: Path,
|
|
67
|
+
working_dir: Path,
|
|
68
|
+
sys_path: list[str],
|
|
69
|
+
lib_path: Optional[str] = None,
|
|
70
|
+
):
|
|
71
|
+
"""Get or create a persistent worker for the isolated environment."""
|
|
72
|
+
cache_key = str(env_dir)
|
|
73
|
+
|
|
74
|
+
with _workers_lock:
|
|
75
|
+
if cache_key in _workers:
|
|
76
|
+
worker = _workers[cache_key]
|
|
77
|
+
if worker.is_alive():
|
|
78
|
+
return worker
|
|
79
|
+
# Worker died, will recreate
|
|
80
|
+
|
|
81
|
+
# Check if Python versions match
|
|
82
|
+
host_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
|
83
|
+
isolated_version = _get_isolated_python_version(env_dir)
|
|
84
|
+
|
|
85
|
+
if isolated_version and isolated_version != host_version:
|
|
86
|
+
# Different Python version - must use SubprocessWorker
|
|
87
|
+
from ..workers.subprocess import SubprocessWorker
|
|
88
|
+
|
|
89
|
+
if sys.platform == "win32":
|
|
90
|
+
python_path = env_dir / "python.exe"
|
|
91
|
+
else:
|
|
92
|
+
python_path = env_dir / "bin" / "python"
|
|
93
|
+
|
|
94
|
+
print(f"[comfy-env] Starting isolated worker (SubprocessWorker)")
|
|
95
|
+
print(f"[comfy-env] Python: {python_path} ({isolated_version} vs host {host_version})")
|
|
96
|
+
|
|
97
|
+
worker = SubprocessWorker(
|
|
98
|
+
python=str(python_path),
|
|
99
|
+
working_dir=working_dir,
|
|
100
|
+
sys_path=sys_path,
|
|
101
|
+
name=working_dir.name,
|
|
102
|
+
)
|
|
103
|
+
else:
|
|
104
|
+
# Same Python version - use MPWorker (faster)
|
|
105
|
+
from ..workers.mp import MPWorker
|
|
106
|
+
|
|
107
|
+
print(f"[comfy-env] Starting isolated worker (MPWorker)")
|
|
108
|
+
print(f"[comfy-env] Env: {env_dir}")
|
|
109
|
+
|
|
110
|
+
worker = MPWorker(
|
|
111
|
+
name=working_dir.name,
|
|
112
|
+
sys_path=sys_path,
|
|
113
|
+
lib_path=lib_path,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
_workers[cache_key] = worker
|
|
117
|
+
return worker
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _shutdown_workers():
|
|
121
|
+
"""Shutdown all cached workers. Called at exit."""
|
|
122
|
+
with _workers_lock:
|
|
123
|
+
for name, worker in _workers.items():
|
|
124
|
+
try:
|
|
125
|
+
worker.shutdown()
|
|
126
|
+
except Exception:
|
|
127
|
+
pass
|
|
128
|
+
_workers.clear()
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
atexit.register(_shutdown_workers)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _find_env_paths(node_dir: Path) -> tuple[Optional[Path], Optional[Path]]:
|
|
135
|
+
"""
|
|
136
|
+
Find site-packages and lib directories for the isolated environment.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
(site_packages, lib_dir) - lib_dir is for LD_LIBRARY_PATH
|
|
140
|
+
"""
|
|
141
|
+
import glob
|
|
142
|
+
|
|
143
|
+
# Check pixi environment first
|
|
144
|
+
pixi_env = node_dir / ".pixi" / "envs" / "default"
|
|
145
|
+
if pixi_env.exists():
|
|
146
|
+
# Find site-packages (pythonX.Y varies)
|
|
147
|
+
if sys.platform == "win32":
|
|
148
|
+
site_packages = pixi_env / "Lib" / "site-packages"
|
|
149
|
+
lib_dir = pixi_env / "Library" / "bin" # Windows DLLs
|
|
150
|
+
else:
|
|
151
|
+
pattern = str(pixi_env / "lib" / "python*" / "site-packages")
|
|
152
|
+
matches = glob.glob(pattern)
|
|
153
|
+
site_packages = Path(matches[0]) if matches else None
|
|
154
|
+
lib_dir = pixi_env / "lib"
|
|
155
|
+
if site_packages and site_packages.exists():
|
|
156
|
+
return site_packages, lib_dir if lib_dir.exists() else None
|
|
157
|
+
|
|
158
|
+
# Check .venv directory
|
|
159
|
+
venv_dir = node_dir / ".venv"
|
|
160
|
+
if venv_dir.exists():
|
|
161
|
+
if sys.platform == "win32":
|
|
162
|
+
site_packages = venv_dir / "Lib" / "site-packages"
|
|
163
|
+
else:
|
|
164
|
+
pattern = str(venv_dir / "lib" / "python*" / "site-packages")
|
|
165
|
+
matches = glob.glob(pattern)
|
|
166
|
+
site_packages = Path(matches[0]) if matches else None
|
|
167
|
+
if site_packages and site_packages.exists():
|
|
168
|
+
return site_packages, None # venvs don't have separate lib
|
|
169
|
+
|
|
170
|
+
return None, None
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def _find_env_dir(node_dir: Path) -> Optional[Path]:
|
|
174
|
+
"""Find the environment directory (for cache key)."""
|
|
175
|
+
pixi_env = node_dir / ".pixi" / "envs" / "default"
|
|
176
|
+
if pixi_env.exists():
|
|
177
|
+
return pixi_env
|
|
178
|
+
venv_dir = node_dir / ".venv"
|
|
179
|
+
if venv_dir.exists():
|
|
180
|
+
return venv_dir
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _wrap_node_class(
|
|
185
|
+
cls: type,
|
|
186
|
+
env_dir: Path,
|
|
187
|
+
working_dir: Path,
|
|
188
|
+
sys_path: list[str],
|
|
189
|
+
lib_path: Optional[str] = None,
|
|
190
|
+
) -> type:
|
|
191
|
+
"""
|
|
192
|
+
Wrap a node class so its FUNCTION method runs in the isolated environment.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
cls: The node class to wrap
|
|
196
|
+
env_dir: Path to the isolated environment directory
|
|
197
|
+
working_dir: Working directory for the worker
|
|
198
|
+
sys_path: Additional paths to add to sys.path in the worker
|
|
199
|
+
lib_path: Path to add to LD_LIBRARY_PATH for conda libraries
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
The wrapped class (modified in place)
|
|
203
|
+
"""
|
|
204
|
+
func_name = getattr(cls, "FUNCTION", None)
|
|
205
|
+
if not func_name:
|
|
206
|
+
return cls # Not a valid ComfyUI node class
|
|
207
|
+
|
|
208
|
+
original_method = getattr(cls, func_name, None)
|
|
209
|
+
if original_method is None:
|
|
210
|
+
return cls
|
|
211
|
+
|
|
212
|
+
# Get source file for the class
|
|
213
|
+
try:
|
|
214
|
+
source_file = Path(inspect.getfile(cls)).resolve()
|
|
215
|
+
except (TypeError, OSError):
|
|
216
|
+
# Can't get source file, skip wrapping
|
|
217
|
+
return cls
|
|
218
|
+
|
|
219
|
+
# Compute relative module path from working_dir
|
|
220
|
+
# e.g., /path/to/nodes/io/load_mesh.py -> nodes.io.load_mesh
|
|
221
|
+
try:
|
|
222
|
+
relative_path = source_file.relative_to(working_dir)
|
|
223
|
+
# Convert path to module: nodes/io/load_mesh.py -> nodes.io.load_mesh
|
|
224
|
+
module_name = str(relative_path.with_suffix("")).replace("/", ".").replace("\\", ".")
|
|
225
|
+
except ValueError:
|
|
226
|
+
# File not under working_dir, use stem as fallback
|
|
227
|
+
module_name = source_file.stem
|
|
228
|
+
|
|
229
|
+
@wraps(original_method)
|
|
230
|
+
def proxy(self, **kwargs):
|
|
231
|
+
if _DEBUG:
|
|
232
|
+
print(f"[comfy-env] PROXY CALLED: {cls.__name__}.{func_name}", flush=True)
|
|
233
|
+
print(f"[comfy-env] kwargs keys: {list(kwargs.keys())}", flush=True)
|
|
234
|
+
|
|
235
|
+
worker = _get_worker(env_dir, working_dir, sys_path, lib_path)
|
|
236
|
+
if _DEBUG:
|
|
237
|
+
print(f"[comfy-env] worker alive: {worker.is_alive()}", flush=True)
|
|
238
|
+
|
|
239
|
+
# Clone tensors for IPC if needed
|
|
240
|
+
try:
|
|
241
|
+
from ..workers.tensor_utils import prepare_for_ipc_recursive
|
|
242
|
+
|
|
243
|
+
kwargs = {k: prepare_for_ipc_recursive(v) for k, v in kwargs.items()}
|
|
244
|
+
except ImportError:
|
|
245
|
+
pass # No torch available, skip cloning
|
|
246
|
+
|
|
247
|
+
if _DEBUG:
|
|
248
|
+
print(f"[comfy-env] calling worker.call_method...", flush=True)
|
|
249
|
+
result = worker.call_method(
|
|
250
|
+
module_name=module_name,
|
|
251
|
+
class_name=cls.__name__,
|
|
252
|
+
method_name=func_name,
|
|
253
|
+
self_state=self.__dict__.copy() if hasattr(self, "__dict__") else None,
|
|
254
|
+
kwargs=kwargs,
|
|
255
|
+
timeout=600.0,
|
|
256
|
+
)
|
|
257
|
+
if _DEBUG:
|
|
258
|
+
print(f"[comfy-env] call_method returned", flush=True)
|
|
259
|
+
|
|
260
|
+
# Clone result tensors
|
|
261
|
+
try:
|
|
262
|
+
from ..workers.tensor_utils import prepare_for_ipc_recursive
|
|
263
|
+
|
|
264
|
+
result = prepare_for_ipc_recursive(result)
|
|
265
|
+
except ImportError:
|
|
266
|
+
pass
|
|
267
|
+
|
|
268
|
+
return result
|
|
269
|
+
|
|
270
|
+
# Replace the method
|
|
271
|
+
setattr(cls, func_name, proxy)
|
|
272
|
+
|
|
273
|
+
# Mark as isolated for debugging
|
|
274
|
+
cls._comfy_env_isolated = True
|
|
275
|
+
|
|
276
|
+
return cls
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def wrap_isolated_nodes(
|
|
280
|
+
node_class_mappings: Dict[str, type],
|
|
281
|
+
nodes_dir: Path,
|
|
282
|
+
) -> Dict[str, type]:
|
|
283
|
+
"""
|
|
284
|
+
Wrap nodes from a directory that has a comfy-env.toml.
|
|
285
|
+
|
|
286
|
+
This is the directory-based isolation API. Call it for each subdirectory
|
|
287
|
+
of nodes/ that has a comfy-env.toml.
|
|
288
|
+
|
|
289
|
+
Args:
|
|
290
|
+
node_class_mappings: The NODE_CLASS_MAPPINGS dict from the nodes in this dir.
|
|
291
|
+
nodes_dir: The directory containing comfy-env.toml and the node files.
|
|
292
|
+
|
|
293
|
+
Returns:
|
|
294
|
+
The same dict with node classes wrapped for isolation.
|
|
295
|
+
|
|
296
|
+
Example:
|
|
297
|
+
# __init__.py
|
|
298
|
+
from comfy_env import wrap_isolated_nodes
|
|
299
|
+
from pathlib import Path
|
|
300
|
+
|
|
301
|
+
NODE_CLASS_MAPPINGS = {}
|
|
302
|
+
|
|
303
|
+
# Native nodes (no isolation)
|
|
304
|
+
from .nodes.main import NODE_CLASS_MAPPINGS as main_nodes
|
|
305
|
+
NODE_CLASS_MAPPINGS.update(main_nodes)
|
|
306
|
+
|
|
307
|
+
# Isolated nodes (has comfy-env.toml)
|
|
308
|
+
from .nodes.cgal import NODE_CLASS_MAPPINGS as cgal_nodes
|
|
309
|
+
NODE_CLASS_MAPPINGS.update(
|
|
310
|
+
wrap_isolated_nodes(cgal_nodes, Path(__file__).parent / "nodes/cgal")
|
|
311
|
+
)
|
|
312
|
+
"""
|
|
313
|
+
# Skip if running inside worker subprocess
|
|
314
|
+
if os.environ.get("COMFYUI_ISOLATION_WORKER") == "1":
|
|
315
|
+
return node_class_mappings
|
|
316
|
+
|
|
317
|
+
nodes_dir = Path(nodes_dir).resolve()
|
|
318
|
+
|
|
319
|
+
# Check for comfy-env.toml
|
|
320
|
+
config_file = nodes_dir / "comfy-env.toml"
|
|
321
|
+
if not config_file.exists():
|
|
322
|
+
print(f"[comfy-env] Warning: No comfy-env.toml in {nodes_dir}")
|
|
323
|
+
return node_class_mappings
|
|
324
|
+
|
|
325
|
+
# Find environment directory and paths
|
|
326
|
+
env_dir = _find_env_dir(nodes_dir)
|
|
327
|
+
site_packages, lib_dir = _find_env_paths(nodes_dir)
|
|
328
|
+
|
|
329
|
+
if not env_dir or not site_packages:
|
|
330
|
+
print(f"[comfy-env] Warning: Isolated environment not found")
|
|
331
|
+
print(f"[comfy-env] Expected: .pixi/envs/default or .venv")
|
|
332
|
+
print(f"[comfy-env] Run 'comfy-env install' in {nodes_dir}")
|
|
333
|
+
return node_class_mappings
|
|
334
|
+
|
|
335
|
+
# Build sys.path for the worker - site-packages first, then node dir
|
|
336
|
+
sys_path = [str(site_packages), str(nodes_dir)]
|
|
337
|
+
|
|
338
|
+
# lib_dir for LD_LIBRARY_PATH (conda libraries)
|
|
339
|
+
lib_path = str(lib_dir) if lib_dir else None
|
|
340
|
+
|
|
341
|
+
print(f"[comfy-env] Wrapping {len(node_class_mappings)} nodes from {nodes_dir.name}")
|
|
342
|
+
print(f"[comfy-env] site-packages: {site_packages}")
|
|
343
|
+
if lib_path:
|
|
344
|
+
print(f"[comfy-env] lib: {lib_path}")
|
|
345
|
+
|
|
346
|
+
# Wrap all node classes
|
|
347
|
+
for node_name, node_cls in node_class_mappings.items():
|
|
348
|
+
if hasattr(node_cls, "FUNCTION"):
|
|
349
|
+
_wrap_node_class(node_cls, env_dir, nodes_dir, sys_path, lib_path)
|
|
350
|
+
|
|
351
|
+
return node_class_mappings
|
comfy_env/nodes.py
CHANGED
|
@@ -16,7 +16,7 @@ from pathlib import Path
|
|
|
16
16
|
from typing import TYPE_CHECKING, Callable, List, Set
|
|
17
17
|
|
|
18
18
|
if TYPE_CHECKING:
|
|
19
|
-
from .
|
|
19
|
+
from .config.types import NodeReq
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
def normalize_repo_url(repo: str) -> str:
|
|
@@ -111,7 +111,7 @@ def install_node_deps(
|
|
|
111
111
|
log: Logging callback
|
|
112
112
|
visited: Set of already-processed node names (for cycle detection)
|
|
113
113
|
"""
|
|
114
|
-
from .
|
|
114
|
+
from .config.parser import discover_config
|
|
115
115
|
|
|
116
116
|
for req in node_reqs:
|
|
117
117
|
# Skip if already visited (cycle detection)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pixi integration for comfy-env.
|
|
3
|
+
|
|
4
|
+
All dependencies go through pixi for unified management.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .core import (
|
|
8
|
+
ensure_pixi,
|
|
9
|
+
get_pixi_path,
|
|
10
|
+
get_pixi_python,
|
|
11
|
+
pixi_run,
|
|
12
|
+
pixi_install,
|
|
13
|
+
clean_pixi_artifacts,
|
|
14
|
+
CUDA_WHEELS_INDEX,
|
|
15
|
+
)
|
|
16
|
+
from .registry import PACKAGE_REGISTRY
|
|
17
|
+
from .cuda_detection import (
|
|
18
|
+
detect_cuda_version,
|
|
19
|
+
detect_cuda_environment,
|
|
20
|
+
detect_gpu_info,
|
|
21
|
+
detect_gpus,
|
|
22
|
+
get_gpu_summary,
|
|
23
|
+
get_recommended_cuda_version,
|
|
24
|
+
GPUInfo,
|
|
25
|
+
CUDAEnvironment,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
__all__ = [
|
|
29
|
+
# Core pixi functions
|
|
30
|
+
"ensure_pixi",
|
|
31
|
+
"get_pixi_path",
|
|
32
|
+
"get_pixi_python",
|
|
33
|
+
"pixi_run",
|
|
34
|
+
"pixi_install",
|
|
35
|
+
"clean_pixi_artifacts",
|
|
36
|
+
"CUDA_WHEELS_INDEX",
|
|
37
|
+
# Registry
|
|
38
|
+
"PACKAGE_REGISTRY",
|
|
39
|
+
# CUDA detection
|
|
40
|
+
"detect_cuda_version",
|
|
41
|
+
"detect_cuda_environment",
|
|
42
|
+
"detect_gpu_info",
|
|
43
|
+
"detect_gpus",
|
|
44
|
+
"get_gpu_summary",
|
|
45
|
+
"get_recommended_cuda_version",
|
|
46
|
+
"GPUInfo",
|
|
47
|
+
"CUDAEnvironment",
|
|
48
|
+
]
|