comfy-env 0.1.15__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. comfy_env/__init__.py +116 -41
  2. comfy_env/cli.py +89 -317
  3. comfy_env/config/__init__.py +18 -6
  4. comfy_env/config/parser.py +22 -76
  5. comfy_env/config/types.py +37 -0
  6. comfy_env/detection/__init__.py +77 -0
  7. comfy_env/detection/cuda.py +61 -0
  8. comfy_env/detection/gpu.py +230 -0
  9. comfy_env/detection/platform.py +70 -0
  10. comfy_env/detection/runtime.py +103 -0
  11. comfy_env/environment/__init__.py +53 -0
  12. comfy_env/environment/cache.py +141 -0
  13. comfy_env/environment/libomp.py +41 -0
  14. comfy_env/environment/paths.py +38 -0
  15. comfy_env/environment/setup.py +88 -0
  16. comfy_env/install.py +127 -329
  17. comfy_env/isolation/__init__.py +32 -2
  18. comfy_env/isolation/tensor_utils.py +83 -0
  19. comfy_env/isolation/workers/__init__.py +16 -0
  20. comfy_env/{workers → isolation/workers}/mp.py +1 -1
  21. comfy_env/{workers → isolation/workers}/subprocess.py +1 -1
  22. comfy_env/isolation/wrap.py +128 -509
  23. comfy_env/packages/__init__.py +60 -0
  24. comfy_env/packages/apt.py +36 -0
  25. comfy_env/packages/cuda_wheels.py +97 -0
  26. comfy_env/packages/node_dependencies.py +77 -0
  27. comfy_env/packages/pixi.py +85 -0
  28. comfy_env/packages/toml_generator.py +88 -0
  29. comfy_env-0.1.16.dist-info/METADATA +279 -0
  30. comfy_env-0.1.16.dist-info/RECORD +36 -0
  31. comfy_env/cache.py +0 -203
  32. comfy_env/nodes.py +0 -187
  33. comfy_env/pixi/__init__.py +0 -48
  34. comfy_env/pixi/core.py +0 -587
  35. comfy_env/pixi/cuda_detection.py +0 -303
  36. comfy_env/pixi/platform/__init__.py +0 -21
  37. comfy_env/pixi/platform/base.py +0 -96
  38. comfy_env/pixi/platform/darwin.py +0 -53
  39. comfy_env/pixi/platform/linux.py +0 -68
  40. comfy_env/pixi/platform/windows.py +0 -284
  41. comfy_env/pixi/resolver.py +0 -198
  42. comfy_env/prestartup.py +0 -208
  43. comfy_env/workers/__init__.py +0 -38
  44. comfy_env/workers/tensor_utils.py +0 -188
  45. comfy_env-0.1.15.dist-info/METADATA +0 -291
  46. comfy_env-0.1.15.dist-info/RECORD +0 -31
  47. /comfy_env/{workers → isolation/workers}/base.py +0 -0
  48. {comfy_env-0.1.15.dist-info → comfy_env-0.1.16.dist-info}/WHEEL +0 -0
  49. {comfy_env-0.1.15.dist-info → comfy_env-0.1.16.dist-info}/entry_points.txt +0 -0
  50. {comfy_env-0.1.15.dist-info → comfy_env-0.1.16.dist-info}/licenses/LICENSE +0 -0
comfy_env/install.py CHANGED
@@ -5,17 +5,13 @@ import os
5
5
  from pathlib import Path
6
6
  from typing import Callable, List, Optional, Set, Union
7
7
 
8
- from .config.parser import ComfyEnvConfig, NodeReq, load_config, discover_config
8
+ from .config import ComfyEnvConfig, NodeDependency, load_config, discover_config, CONFIG_FILE_NAME
9
9
 
10
-
11
- # Environment variable to disable comfy-env isolation
12
10
  USE_COMFY_ENV_VAR = "USE_COMFY_ENV"
13
11
 
14
12
 
15
13
  def _is_comfy_env_enabled() -> bool:
16
- """Check if isolation is enabled."""
17
- val = os.environ.get(USE_COMFY_ENV_VAR, "1").lower()
18
- return val not in ("0", "false", "no", "off")
14
+ return os.environ.get(USE_COMFY_ENV_VAR, "1").lower() not in ("0", "false", "no", "off")
19
15
 
20
16
 
21
17
  def install(
@@ -25,15 +21,11 @@ def install(
25
21
  dry_run: bool = False,
26
22
  ) -> bool:
27
23
  """Install dependencies from comfy-env.toml."""
28
- # Auto-discover caller's directory if not provided
29
24
  if node_dir is None:
30
- frame = inspect.stack()[1]
31
- caller_file = frame.filename
32
- node_dir = Path(caller_file).parent.resolve()
25
+ node_dir = Path(inspect.stack()[1].filename).parent.resolve()
33
26
 
34
27
  log = log_callback or print
35
28
 
36
- # Load config
37
29
  if config is not None:
38
30
  config_path = Path(config)
39
31
  if not config_path.is_absolute():
@@ -43,32 +35,16 @@ def install(
43
35
  cfg = discover_config(node_dir)
44
36
 
45
37
  if cfg is None:
46
- raise FileNotFoundError(
47
- f"No comfy-env.toml found in {node_dir}. "
48
- "Create comfy-env.toml to define dependencies."
49
- )
50
-
51
- # Install apt packages first (Linux only)
52
- if cfg.apt_packages:
53
- _install_apt_packages(cfg.apt_packages, log, dry_run)
38
+ raise FileNotFoundError(f"No comfy-env.toml found in {node_dir}")
54
39
 
55
- # Set persistent env vars (for OpenMP settings, etc.)
56
- if cfg.env_vars:
57
- _set_persistent_env_vars(cfg.env_vars, log, dry_run)
40
+ if cfg.apt_packages: _install_apt_packages(cfg.apt_packages, log, dry_run)
41
+ if cfg.env_vars: _set_persistent_env_vars(cfg.env_vars, log, dry_run)
42
+ if cfg.node_reqs: _install_node_dependencies(cfg.node_reqs, node_dir, log, dry_run)
58
43
 
59
- # Install node dependencies
60
- if cfg.node_reqs:
61
- _install_node_dependencies(cfg.node_reqs, node_dir, log, dry_run)
62
-
63
- # Check if isolation is enabled
64
44
  if _is_comfy_env_enabled():
65
- # Install everything via pixi (isolated environment)
66
45
  _install_via_pixi(cfg, node_dir, log, dry_run)
67
-
68
- # Auto-discover and install isolated subdirectory environments
69
46
  _install_isolated_subdirs(node_dir, log, dry_run)
70
47
  else:
71
- # Install directly to host Python (no isolation)
72
48
  log("\n[comfy-env] Isolation disabled (USE_COMFY_ENV=0)")
73
49
  _install_to_host_python(cfg, node_dir, log, dry_run)
74
50
 
@@ -76,369 +52,191 @@ def install(
76
52
  return True
77
53
 
78
54
 
79
- def _install_apt_packages(
80
- packages: List[str],
81
- log: Callable[[str], None],
82
- dry_run: bool,
83
- ) -> None:
84
- """Install apt packages (Linux only)."""
85
- import os
55
+ def _install_apt_packages(packages: List[str], log: Callable[[str], None], dry_run: bool) -> None:
56
+ from .packages.apt import apt_install
86
57
  import platform
87
- import shutil
88
- import subprocess
89
-
90
58
  if platform.system() != "Linux":
91
- log(f"[apt] Skipping apt packages (not Linux)")
92
- return
93
-
94
- log(f"\n[apt] Installing {len(packages)} system package(s):")
95
- for pkg in packages:
96
- log(f" - {pkg}")
97
-
98
- if dry_run:
99
- log(" (dry run - no changes made)")
100
59
  return
60
+ log(f"\n[apt] Installing: {', '.join(packages)}")
61
+ if not dry_run:
62
+ apt_install(packages, log)
101
63
 
102
- # Determine if we need sudo
103
- is_root = os.geteuid() == 0
104
- has_sudo = shutil.which("sudo") is not None
105
- use_sudo = not is_root and has_sudo
106
- prefix = ["sudo"] if use_sudo else []
107
-
108
- if not is_root and not has_sudo:
109
- log(f"[apt] Warning: No root access. Install manually:")
110
- log(f" sudo apt-get update && sudo apt-get install -y {' '.join(packages)}")
111
- return
112
64
 
113
- # Run apt-get update (suppress output, just show errors)
114
- log("[apt] Updating package lists...")
115
- result = subprocess.run(
116
- prefix + ["apt-get", "update"],
117
- capture_output=True,
118
- text=True,
119
- )
120
- if result.returncode != 0:
121
- log(f"[apt] Warning: apt-get update failed: {result.stderr.strip()}")
122
-
123
- # Install each package individually (some may not exist on all distros)
124
- log("[apt] Installing packages...")
125
- installed = []
126
- skipped = []
127
- for pkg in packages:
128
- result = subprocess.run(
129
- prefix + ["apt-get", "install", "-y", pkg],
130
- capture_output=True,
131
- text=True,
132
- )
133
- if result.returncode == 0:
134
- installed.append(pkg)
135
- log(f" [apt] Installed {pkg}")
136
- else:
137
- skipped.append(pkg)
138
- log(f" [apt] Skipped {pkg} (not available)")
139
-
140
- if installed:
141
- log(f"[apt] Installed {len(installed)} package(s)")
142
- if skipped:
143
- log(f"[apt] Skipped {len(skipped)} unavailable package(s)")
144
-
145
-
146
- def _set_persistent_env_vars(
147
- env_vars: dict,
148
- log: Callable[[str], None],
149
- dry_run: bool,
150
- ) -> None:
151
- """Set env vars permanently (survives restarts)."""
152
- import os
153
- import platform
154
- import subprocess
155
- from pathlib import Path
65
+ def _set_persistent_env_vars(env_vars: dict, log: Callable[[str], None], dry_run: bool) -> None:
66
+ import platform, subprocess
67
+ if not env_vars: return
156
68
 
157
- if not env_vars:
158
- return
69
+ log(f"\n[env] Setting {len(env_vars)} env var(s)")
70
+ for k, v in env_vars.items(): log(f" {k}={v}")
71
+ if dry_run: return
159
72
 
160
73
  system = platform.system()
161
- log(f"\n[env] Setting {len(env_vars)} persistent environment variable(s)...")
162
-
163
- for key, value in env_vars.items():
164
- log(f" - {key}={value}")
165
-
166
- if dry_run:
167
- log(" (dry run - no changes made)")
168
- return
169
-
170
74
  if system == "Windows":
171
- # Windows: use setx (writes to registry)
172
- for key, value in env_vars.items():
173
- result = subprocess.run(
174
- ["setx", key, value],
175
- capture_output=True, text=True
176
- )
177
- if result.returncode == 0:
178
- log(f" [env] Set {key} (Windows registry)")
179
- else:
180
- log(f" [env] Warning: Failed to set {key}: {result.stderr.strip()}")
181
- log("[env] Restart terminal/ComfyUI for changes to take effect")
182
-
183
- elif system == "Darwin": # macOS
184
- # macOS: launchctl for GUI apps + zshrc for terminal
185
- for key, value in env_vars.items():
186
- subprocess.run(["launchctl", "setenv", key, value], capture_output=True)
187
- log(f" [env] Set {key} (launchctl)")
188
-
189
- # Also add to zshrc for terminal (zsh is default on macOS)
75
+ for k, v in env_vars.items():
76
+ subprocess.run(["setx", k, v], capture_output=True)
77
+ elif system == "Darwin":
78
+ for k, v in env_vars.items():
79
+ subprocess.run(["launchctl", "setenv", k, v], capture_output=True)
190
80
  _add_to_shell_profile(env_vars, log)
191
-
192
- else: # Linux
81
+ else:
193
82
  _add_to_shell_profile(env_vars, log)
194
83
 
195
84
 
196
- def _add_to_shell_profile(
197
- env_vars: dict,
198
- log: Callable[[str], None],
199
- ) -> None:
200
- """Add env vars to shell profile (Linux/macOS)."""
201
- import os
202
- from pathlib import Path
203
-
204
- # Determine shell profile
85
+ def _add_to_shell_profile(env_vars: dict, log: Callable[[str], None]) -> None:
205
86
  shell = os.environ.get("SHELL", "/bin/bash")
206
- if "zsh" in shell:
207
- rc_file = Path.home() / ".zshrc"
208
- else:
209
- rc_file = Path.home() / ".bashrc"
210
-
87
+ rc_file = Path.home() / (".zshrc" if "zsh" in shell else ".bashrc")
211
88
  profile_file = Path.home() / ".comfy-env-profile"
212
89
 
213
- # Write env vars to our dedicated file
214
90
  with open(profile_file, "w") as f:
215
- f.write("# Generated by comfy-env - do not edit manually\n")
216
- for key, value in env_vars.items():
217
- f.write(f'export {key}="{value}"\n')
218
- log(f" [env] Wrote {profile_file}")
91
+ f.write("# Generated by comfy-env\n")
92
+ for k, v in env_vars.items():
93
+ f.write(f'export {k}="{v}"\n')
219
94
 
220
- # Add source line to shell rc (only once)
221
95
  source_line = f'source "{profile_file}"'
222
96
  existing = rc_file.read_text() if rc_file.exists() else ""
223
-
224
- if source_line not in existing and str(profile_file) not in existing:
97
+ if source_line not in existing:
225
98
  with open(rc_file, "a") as f:
226
- f.write(f'\n# comfy-env environment variables\n')
227
- f.write(f'{source_line}\n')
228
- log(f" [env] Added source line to {rc_file}")
229
- else:
230
- log(f" [env] Already configured in {rc_file}")
231
-
232
- log("[env] Restart terminal/ComfyUI for changes to take effect")
233
-
99
+ f.write(f'\n# comfy-env\n{source_line}\n')
100
+ log(f" [env] Wrote {profile_file}")
234
101
 
235
- def _install_node_dependencies(
236
- node_reqs: List[NodeReq],
237
- node_dir: Path,
238
- log: Callable[[str], None],
239
- dry_run: bool,
240
- ) -> None:
241
- """Install node dependencies (other ComfyUI custom nodes)."""
242
- from .nodes import install_node_deps
243
102
 
103
+ def _install_node_dependencies(node_reqs: List[NodeDependency], node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
104
+ from .packages.node_dependencies import install_node_dependencies
244
105
  custom_nodes_dir = node_dir.parent
245
106
  log(f"\nInstalling {len(node_reqs)} node dependencies...")
246
-
247
107
  if dry_run:
248
108
  for req in node_reqs:
249
- node_path = custom_nodes_dir / req.name
250
- status = "exists" if node_path.exists() else "would clone"
251
- log(f" {req.name}: {status}")
109
+ log(f" {req.name}: {'exists' if (custom_nodes_dir / req.name).exists() else 'would clone'}")
252
110
  return
253
-
254
- visited: Set[str] = {node_dir.name}
255
- install_node_deps(node_reqs, custom_nodes_dir, log, visited)
111
+ install_node_dependencies(node_reqs, custom_nodes_dir, log, {node_dir.name})
256
112
 
257
113
 
258
- def _install_via_pixi(
259
- cfg: ComfyEnvConfig,
260
- node_dir: Path,
261
- log: Callable[[str], None],
262
- dry_run: bool,
263
- ) -> None:
264
- """Install all packages via pixi."""
265
- from .pixi import pixi_install
114
+ def _install_via_pixi(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
115
+ from .packages.pixi import ensure_pixi, get_pixi_python, pixi_clean
116
+ from .packages.toml_generator import write_pixi_toml
117
+ from .packages.cuda_wheels import get_wheel_url, CUDA_TORCH_MAP
118
+ from .detection import get_recommended_cuda_version
119
+ from .environment.cache import get_central_env_path, write_marker, write_env_metadata, MARKER_FILE, get_cache_dir
120
+ import shutil, subprocess, sys
266
121
 
267
- # Count what we're installing
268
- cuda_count = len(cfg.cuda_packages)
269
-
270
- # Count from passthrough (pixi-native format)
271
122
  deps = cfg.pixi_passthrough.get("dependencies", {})
272
123
  pypi_deps = cfg.pixi_passthrough.get("pypi-dependencies", {})
273
-
274
- if cuda_count == 0 and not deps and not pypi_deps:
124
+ if not cfg.cuda_packages and not deps and not pypi_deps:
275
125
  log("No packages to install")
276
126
  return
277
127
 
278
128
  log(f"\nInstalling via pixi:")
279
- if cuda_count:
280
- log(f" CUDA packages: {', '.join(cfg.cuda_packages)}")
281
- if deps:
282
- log(f" Conda packages: {len(deps)}")
283
- if pypi_deps:
284
- log(f" PyPI packages: {len(pypi_deps)}")
285
-
286
- if dry_run:
287
- log("\n(dry run - no changes made)")
288
- return
129
+ if cfg.cuda_packages: log(f" CUDA: {', '.join(cfg.cuda_packages)}")
130
+ if deps: log(f" Conda: {len(deps)}")
131
+ if pypi_deps: log(f" PyPI: {len(pypi_deps)}")
132
+ if dry_run: return
133
+
134
+ pixi_clean(node_dir, log)
135
+ (node_dir / ".pixi").mkdir(parents=True, exist_ok=True)
136
+ (node_dir / ".pixi" / "config.toml").write_text("detached-environments = false\n")
137
+
138
+ pixi_path = ensure_pixi(log=log)
139
+ cuda_version = torch_version = None
140
+ if cfg.has_cuda and sys.platform != "darwin":
141
+ cuda_version = get_recommended_cuda_version()
142
+ if cuda_version:
143
+ torch_version = CUDA_TORCH_MAP.get(".".join(cuda_version.split(".")[:2]), "2.8")
289
144
 
290
- pixi_install(cfg, node_dir, log)
145
+ write_pixi_toml(cfg, node_dir, log)
146
+ log("Running pixi install...")
147
+ result = subprocess.run([str(pixi_path), "install"], cwd=node_dir, capture_output=True, text=True)
148
+ if result.returncode != 0:
149
+ raise RuntimeError(f"pixi install failed: {result.stderr}")
291
150
 
151
+ if cfg.cuda_packages and cuda_version:
152
+ log(f"Installing CUDA packages...")
153
+ python_path = get_pixi_python(node_dir)
154
+ if not python_path:
155
+ raise RuntimeError("No Python in pixi env")
292
156
 
293
- def _install_to_host_python(
294
- cfg: ComfyEnvConfig,
295
- node_dir: Path,
296
- log: Callable[[str], None],
297
- dry_run: bool,
298
- ) -> None:
299
- """Install packages directly to host Python (no isolation)."""
300
- import shutil
301
- import subprocess
302
- import sys
157
+ result = subprocess.run([str(python_path), "-c", "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"],
158
+ capture_output=True, text=True)
159
+ py_version = result.stdout.strip() if result.returncode == 0 else f"{sys.version_info.major}.{sys.version_info.minor}"
303
160
 
304
- from .pixi import CUDA_WHEELS_INDEX, find_wheel_url
305
- from .pixi.cuda_detection import get_recommended_cuda_version
161
+ for package in cfg.cuda_packages:
162
+ wheel_url = get_wheel_url(package, torch_version, cuda_version, py_version)
163
+ if not wheel_url:
164
+ raise RuntimeError(f"No wheel for {package}")
165
+ log(f" {package}")
166
+ result = subprocess.run([str(python_path), "-m", "pip", "install", "--no-deps", "--no-cache-dir", wheel_url],
167
+ capture_output=True, text=True)
168
+ if result.returncode != 0:
169
+ raise RuntimeError(f"Failed: {result.stderr}")
170
+
171
+ old_env = node_dir / ".pixi" / "envs" / "default"
172
+ config_path = node_dir / "comfy-env.toml"
173
+ main_node_dir = node_dir
174
+ for parent in node_dir.parents:
175
+ if parent.parent.name == "custom_nodes":
176
+ main_node_dir = parent
177
+ break
178
+
179
+ central_env = get_central_env_path(main_node_dir, config_path)
180
+ if old_env.exists():
181
+ get_cache_dir()
182
+ if central_env.exists(): shutil.rmtree(central_env)
183
+ shutil.move(str(old_env), str(central_env))
184
+ write_marker(config_path, central_env)
185
+ write_env_metadata(central_env, config_path.parent / MARKER_FILE)
186
+ shutil.rmtree(node_dir / ".pixi", ignore_errors=True)
187
+ log(f"Env: {central_env}")
188
+
189
+
190
+ def _install_to_host_python(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
191
+ import shutil, subprocess, sys
192
+ from .packages.cuda_wheels import get_wheel_url, CUDA_TORCH_MAP
193
+ from .detection import get_recommended_cuda_version
306
194
 
307
- # Collect packages to install
308
195
  pypi_deps = cfg.pixi_passthrough.get("pypi-dependencies", {})
309
- conda_deps = cfg.pixi_passthrough.get("dependencies", {})
310
-
311
- # Warn about conda dependencies (can't install without pixi)
312
- # Filter out 'python' and 'pip' which are meta-dependencies
313
- real_conda_deps = {k: v for k, v in conda_deps.items() if k not in ("python", "pip")}
314
- if real_conda_deps:
315
- log(f"\n[warning] Cannot install conda packages without isolation:")
316
- for pkg in real_conda_deps:
317
- log(f" - {pkg}")
318
- log(" Set USE_COMFY_ENV=1 to enable isolated environments")
319
-
320
- # Nothing to install?
321
196
  if not pypi_deps and not cfg.cuda_packages:
322
197
  log("No packages to install")
323
198
  return
324
199
 
325
- # Build pip install command
326
200
  pip_packages = []
327
-
328
- # Add pypi dependencies
329
201
  for pkg, spec in pypi_deps.items():
330
202
  if isinstance(spec, str):
331
- if spec == "*":
332
- pip_packages.append(pkg)
333
- else:
334
- pip_packages.append(f"{pkg}{spec}")
203
+ pip_packages.append(pkg if spec == "*" else f"{pkg}{spec}")
335
204
  elif isinstance(spec, dict):
336
- version = spec.get("version", "*")
337
205
  extras = spec.get("extras", [])
338
- if extras:
339
- pkg_with_extras = f"{pkg}[{','.join(extras)}]"
340
- else:
341
- pkg_with_extras = pkg
342
- if version == "*":
343
- pip_packages.append(pkg_with_extras)
344
- else:
345
- pip_packages.append(f"{pkg_with_extras}{version}")
346
-
347
- log(f"\nInstalling to host Python ({sys.executable}):")
348
- if pip_packages:
349
- log(f" PyPI packages: {len(pip_packages)}")
350
- if cfg.cuda_packages:
351
- log(f" CUDA packages: {', '.join(cfg.cuda_packages)}")
352
-
353
- if dry_run:
354
- if pip_packages:
355
- log(f" Would install: {', '.join(pip_packages)}")
356
- log("\n(dry run - no changes made)")
357
- return
206
+ version = spec.get("version", "*")
207
+ name = f"{pkg}[{','.join(extras)}]" if extras else pkg
208
+ pip_packages.append(name if version == "*" else f"{name}{version}")
358
209
 
359
- # Use uv if available, otherwise pip
360
- use_uv = shutil.which("uv") is not None
210
+ log(f"\nInstalling to {sys.executable}")
211
+ if dry_run: return
361
212
 
362
- # Install regular PyPI packages
213
+ use_uv = shutil.which("uv")
363
214
  if pip_packages:
364
- if use_uv:
365
- cmd = ["uv", "pip", "install", "--python", sys.executable] + pip_packages
366
- else:
367
- cmd = [sys.executable, "-m", "pip", "install"] + pip_packages
368
-
369
- log(f" Running: {' '.join(cmd[:4])}...")
370
- result = subprocess.run(cmd, capture_output=True, text=True)
371
- if result.returncode != 0:
372
- log(f" [error] pip install failed: {result.stderr.strip()[:200]}")
373
- else:
374
- log(f" Installed {len(pip_packages)} package(s)")
375
-
376
- # Install CUDA packages from cuda-wheels
215
+ cmd = ["uv", "pip", "install", "--python", sys.executable] + pip_packages if use_uv else [sys.executable, "-m", "pip", "install"] + pip_packages
216
+ subprocess.run(cmd, capture_output=True)
217
+
377
218
  if cfg.cuda_packages:
378
219
  cuda_version = get_recommended_cuda_version()
379
- if not cuda_version:
380
- log(" [warning] No CUDA detected, skipping CUDA packages")
381
- return
382
-
383
- # Get torch version for wheel matching
384
- cuda_mm = ".".join(cuda_version.split(".")[:2])
385
- from .pixi.core import CUDA_TORCH_MAP
386
- torch_version = CUDA_TORCH_MAP.get(cuda_mm, "2.8")
387
-
220
+ if not cuda_version: return
221
+ torch_version = CUDA_TORCH_MAP.get(".".join(cuda_version.split(".")[:2]), "2.8")
388
222
  py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
389
- log(f" CUDA {cuda_version}, PyTorch {torch_version}, Python {py_version}")
390
223
 
391
224
  for package in cfg.cuda_packages:
392
- wheel_url = find_wheel_url(package, torch_version, cuda_version, py_version)
393
- if not wheel_url:
394
- log(f" [error] No wheel found for {package}")
395
- continue
225
+ wheel_url = get_wheel_url(package, torch_version, cuda_version, py_version)
226
+ if wheel_url:
227
+ cmd = ["uv", "pip", "install", "--python", sys.executable, "--no-deps", wheel_url] if use_uv else [sys.executable, "-m", "pip", "install", "--no-deps", wheel_url]
228
+ subprocess.run(cmd, capture_output=True)
396
229
 
397
- log(f" Installing {package}...")
398
- if use_uv:
399
- cmd = ["uv", "pip", "install", "--python", sys.executable, "--no-deps", wheel_url]
400
- else:
401
- cmd = [sys.executable, "-m", "pip", "install", "--no-deps", wheel_url]
402
-
403
- result = subprocess.run(cmd, capture_output=True, text=True)
404
- if result.returncode != 0:
405
- log(f" [error] Failed to install {package}: {result.stderr.strip()[:200]}")
406
- else:
407
- log(f" Installed {package}")
408
230
 
409
-
410
- def _install_isolated_subdirs(
411
- node_dir: Path,
412
- log: Callable[[str], None],
413
- dry_run: bool,
414
- ) -> None:
415
- """Find and install comfy-env.toml in subdirectories."""
416
- from .pixi import pixi_install
417
- from .config.parser import CONFIG_FILE_NAME
418
-
419
- # Find all comfy-env.toml files in subdirectories (not root)
231
+ def _install_isolated_subdirs(node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
420
232
  for config_file in node_dir.rglob(CONFIG_FILE_NAME):
421
- if config_file.parent == node_dir:
422
- continue # Skip root (already installed)
423
-
424
- sub_dir = config_file.parent
425
- relative = sub_dir.relative_to(node_dir)
426
-
427
- log(f"\n[isolated] Installing: {relative}")
428
- sub_cfg = load_config(config_file)
233
+ if config_file.parent == node_dir: continue
234
+ log(f"\n[isolated] {config_file.parent.relative_to(node_dir)}")
235
+ if not dry_run:
236
+ _install_via_pixi(load_config(config_file), config_file.parent, log, dry_run)
429
237
 
430
- if dry_run:
431
- log(f" (dry run)")
432
- continue
433
238
 
434
- pixi_install(sub_cfg, sub_dir, log)
435
-
436
-
437
- def verify_installation(
438
- packages: List[str],
439
- log: Callable[[str], None] = print,
440
- ) -> bool:
441
- """Verify that packages are importable."""
239
+ def verify_installation(packages: List[str], log: Callable[[str], None] = print) -> bool:
442
240
  all_ok = True
443
241
  for package in packages:
444
242
  import_name = package.replace("-", "_").split("[")[0]
@@ -1,10 +1,40 @@
1
1
  """
2
- Isolation module for wrapping ComfyUI nodes to run in isolated environments.
2
+ Isolation layer - Process isolation for node execution.
3
+
4
+ Wraps node classes to execute in isolated subprocess environments.
3
5
  """
4
6
 
5
- from .wrap import wrap_isolated_nodes, wrap_nodes
7
+ from .wrap import (
8
+ wrap_isolated_nodes,
9
+ wrap_nodes,
10
+ )
11
+ from .workers import (
12
+ Worker,
13
+ WorkerError,
14
+ MPWorker,
15
+ SubprocessWorker,
16
+ )
17
+ from .tensor_utils import (
18
+ TensorKeeper,
19
+ keep_tensor,
20
+ keep_tensors_recursive,
21
+ prepare_tensor_for_ipc,
22
+ prepare_for_ipc_recursive,
23
+ )
6
24
 
7
25
  __all__ = [
26
+ # Node wrapping
8
27
  "wrap_isolated_nodes",
9
28
  "wrap_nodes",
29
+ # Workers
30
+ "Worker",
31
+ "WorkerError",
32
+ "MPWorker",
33
+ "SubprocessWorker",
34
+ # Tensor utilities
35
+ "TensorKeeper",
36
+ "keep_tensor",
37
+ "keep_tensors_recursive",
38
+ "prepare_tensor_for_ipc",
39
+ "prepare_for_ipc_recursive",
10
40
  ]