comfy-env 0.1.14__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. comfy_env/__init__.py +115 -62
  2. comfy_env/cli.py +89 -319
  3. comfy_env/config/__init__.py +18 -8
  4. comfy_env/config/parser.py +21 -122
  5. comfy_env/config/types.py +37 -70
  6. comfy_env/detection/__init__.py +77 -0
  7. comfy_env/detection/cuda.py +61 -0
  8. comfy_env/detection/gpu.py +230 -0
  9. comfy_env/detection/platform.py +70 -0
  10. comfy_env/detection/runtime.py +103 -0
  11. comfy_env/environment/__init__.py +53 -0
  12. comfy_env/environment/cache.py +141 -0
  13. comfy_env/environment/libomp.py +41 -0
  14. comfy_env/environment/paths.py +38 -0
  15. comfy_env/environment/setup.py +88 -0
  16. comfy_env/install.py +163 -249
  17. comfy_env/isolation/__init__.py +33 -2
  18. comfy_env/isolation/tensor_utils.py +83 -0
  19. comfy_env/isolation/workers/__init__.py +16 -0
  20. comfy_env/{workers → isolation/workers}/mp.py +1 -1
  21. comfy_env/{workers → isolation/workers}/subprocess.py +2 -2
  22. comfy_env/isolation/wrap.py +149 -409
  23. comfy_env/packages/__init__.py +60 -0
  24. comfy_env/packages/apt.py +36 -0
  25. comfy_env/packages/cuda_wheels.py +97 -0
  26. comfy_env/packages/node_dependencies.py +77 -0
  27. comfy_env/packages/pixi.py +85 -0
  28. comfy_env/packages/toml_generator.py +88 -0
  29. comfy_env-0.1.16.dist-info/METADATA +279 -0
  30. comfy_env-0.1.16.dist-info/RECORD +36 -0
  31. comfy_env/cache.py +0 -331
  32. comfy_env/errors.py +0 -293
  33. comfy_env/nodes.py +0 -187
  34. comfy_env/pixi/__init__.py +0 -48
  35. comfy_env/pixi/core.py +0 -588
  36. comfy_env/pixi/cuda_detection.py +0 -303
  37. comfy_env/pixi/platform/__init__.py +0 -21
  38. comfy_env/pixi/platform/base.py +0 -96
  39. comfy_env/pixi/platform/darwin.py +0 -53
  40. comfy_env/pixi/platform/linux.py +0 -68
  41. comfy_env/pixi/platform/windows.py +0 -284
  42. comfy_env/pixi/resolver.py +0 -198
  43. comfy_env/prestartup.py +0 -192
  44. comfy_env/workers/__init__.py +0 -38
  45. comfy_env/workers/tensor_utils.py +0 -188
  46. comfy_env-0.1.14.dist-info/METADATA +0 -291
  47. comfy_env-0.1.14.dist-info/RECORD +0 -33
  48. /comfy_env/{workers → isolation/workers}/base.py +0 -0
  49. {comfy_env-0.1.14.dist-info → comfy_env-0.1.16.dist-info}/WHEEL +0 -0
  50. {comfy_env-0.1.14.dist-info → comfy_env-0.1.16.dist-info}/entry_points.txt +0 -0
  51. {comfy_env-0.1.14.dist-info → comfy_env-0.1.16.dist-info}/licenses/LICENSE +0 -0
comfy_env/install.py CHANGED
@@ -1,17 +1,17 @@
1
- """
2
- Installation API for comfy-env.
3
-
4
- Example:
5
- from comfy_env import install
6
- install() # Auto-discovers comfy-env.toml and installs
7
- """
1
+ """Installation API for comfy-env."""
8
2
 
9
3
  import inspect
4
+ import os
10
5
  from pathlib import Path
11
6
  from typing import Callable, List, Optional, Set, Union
12
7
 
13
- from .config.types import ComfyEnvConfig, NodeReq
14
- from .config.parser import load_config, discover_config
8
+ from .config import ComfyEnvConfig, NodeDependency, load_config, discover_config, CONFIG_FILE_NAME
9
+
10
+ USE_COMFY_ENV_VAR = "USE_COMFY_ENV"
11
+
12
+
13
+ def _is_comfy_env_enabled() -> bool:
14
+ return os.environ.get(USE_COMFY_ENV_VAR, "1").lower() not in ("0", "false", "no", "off")
15
15
 
16
16
 
17
17
  def install(
@@ -20,27 +20,12 @@ def install(
20
20
  log_callback: Optional[Callable[[str], None]] = None,
21
21
  dry_run: bool = False,
22
22
  ) -> bool:
23
- """
24
- Install dependencies from comfy-env.toml.
25
-
26
- Args:
27
- config: Optional path to comfy-env.toml. Auto-discovered if not provided.
28
- node_dir: Optional node directory. Auto-discovered from caller if not provided.
29
- log_callback: Optional callback for logging. Defaults to print.
30
- dry_run: If True, show what would be installed without installing.
31
-
32
- Returns:
33
- True if installation succeeded.
34
- """
35
- # Auto-discover caller's directory if not provided
23
+ """Install dependencies from comfy-env.toml."""
36
24
  if node_dir is None:
37
- frame = inspect.stack()[1]
38
- caller_file = frame.filename
39
- node_dir = Path(caller_file).parent.resolve()
25
+ node_dir = Path(inspect.stack()[1].filename).parent.resolve()
40
26
 
41
27
  log = log_callback or print
42
28
 
43
- # Load config
44
29
  if config is not None:
45
30
  config_path = Path(config)
46
31
  if not config_path.is_absolute():
@@ -50,279 +35,208 @@ def install(
50
35
  cfg = discover_config(node_dir)
51
36
 
52
37
  if cfg is None:
53
- raise FileNotFoundError(
54
- f"No comfy-env.toml found in {node_dir}. "
55
- "Create comfy-env.toml to define dependencies."
56
- )
57
-
58
- # Install apt packages first (Linux only)
59
- if cfg.apt_packages:
60
- _install_apt_packages(cfg.apt_packages, log, dry_run)
61
-
62
- # Set persistent env vars (for OpenMP settings, etc.)
63
- if cfg.env_vars:
64
- _set_persistent_env_vars(cfg.env_vars, log, dry_run)
65
-
66
- # Install node dependencies
67
- if cfg.node_reqs:
68
- _install_node_dependencies(cfg.node_reqs, node_dir, log, dry_run)
38
+ raise FileNotFoundError(f"No comfy-env.toml found in {node_dir}")
69
39
 
70
- # Install everything via pixi
71
- _install_via_pixi(cfg, node_dir, log, dry_run)
40
+ if cfg.apt_packages: _install_apt_packages(cfg.apt_packages, log, dry_run)
41
+ if cfg.env_vars: _set_persistent_env_vars(cfg.env_vars, log, dry_run)
42
+ if cfg.node_reqs: _install_node_dependencies(cfg.node_reqs, node_dir, log, dry_run)
72
43
 
73
- # Auto-discover and install isolated subdirectory environments
74
- _install_isolated_subdirs(node_dir, log, dry_run)
44
+ if _is_comfy_env_enabled():
45
+ _install_via_pixi(cfg, node_dir, log, dry_run)
46
+ _install_isolated_subdirs(node_dir, log, dry_run)
47
+ else:
48
+ log("\n[comfy-env] Isolation disabled (USE_COMFY_ENV=0)")
49
+ _install_to_host_python(cfg, node_dir, log, dry_run)
75
50
 
76
51
  log("\nInstallation complete!")
77
52
  return True
78
53
 
79
54
 
80
- def _install_apt_packages(
81
- packages: List[str],
82
- log: Callable[[str], None],
83
- dry_run: bool,
84
- ) -> None:
85
- """Install apt packages (Linux only)."""
86
- import os
55
+ def _install_apt_packages(packages: List[str], log: Callable[[str], None], dry_run: bool) -> None:
56
+ from .packages.apt import apt_install
87
57
  import platform
88
- import shutil
89
- import subprocess
90
-
91
58
  if platform.system() != "Linux":
92
- log(f"[apt] Skipping apt packages (not Linux)")
93
59
  return
60
+ log(f"\n[apt] Installing: {', '.join(packages)}")
61
+ if not dry_run:
62
+ apt_install(packages, log)
94
63
 
95
- log(f"\n[apt] Installing {len(packages)} system package(s):")
96
- for pkg in packages:
97
- log(f" - {pkg}")
98
-
99
- if dry_run:
100
- log(" (dry run - no changes made)")
101
- return
102
64
 
103
- # Determine if we need sudo
104
- is_root = os.geteuid() == 0
105
- has_sudo = shutil.which("sudo") is not None
106
- use_sudo = not is_root and has_sudo
107
- prefix = ["sudo"] if use_sudo else []
65
+ def _set_persistent_env_vars(env_vars: dict, log: Callable[[str], None], dry_run: bool) -> None:
66
+ import platform, subprocess
67
+ if not env_vars: return
108
68
 
109
- if not is_root and not has_sudo:
110
- log(f"[apt] Warning: No root access. Install manually:")
111
- log(f" sudo apt-get update && sudo apt-get install -y {' '.join(packages)}")
112
- return
113
-
114
- # Run apt-get update (suppress output, just show errors)
115
- log("[apt] Updating package lists...")
116
- result = subprocess.run(
117
- prefix + ["apt-get", "update"],
118
- capture_output=True,
119
- text=True,
120
- )
121
- if result.returncode != 0:
122
- log(f"[apt] Warning: apt-get update failed: {result.stderr.strip()}")
123
-
124
- # Install each package individually (some may not exist on all distros)
125
- log("[apt] Installing packages...")
126
- installed = []
127
- skipped = []
128
- for pkg in packages:
129
- result = subprocess.run(
130
- prefix + ["apt-get", "install", "-y", pkg],
131
- capture_output=True,
132
- text=True,
133
- )
134
- if result.returncode == 0:
135
- installed.append(pkg)
136
- log(f" [apt] Installed {pkg}")
137
- else:
138
- skipped.append(pkg)
139
- log(f" [apt] Skipped {pkg} (not available)")
140
-
141
- if installed:
142
- log(f"[apt] Installed {len(installed)} package(s)")
143
- if skipped:
144
- log(f"[apt] Skipped {len(skipped)} unavailable package(s)")
145
-
146
-
147
- def _set_persistent_env_vars(
148
- env_vars: dict,
149
- log: Callable[[str], None],
150
- dry_run: bool,
151
- ) -> None:
152
- """Set env vars permanently (survives restarts)."""
153
- import os
154
- import platform
155
- import subprocess
156
- from pathlib import Path
157
-
158
- if not env_vars:
159
- return
69
+ log(f"\n[env] Setting {len(env_vars)} env var(s)")
70
+ for k, v in env_vars.items(): log(f" {k}={v}")
71
+ if dry_run: return
160
72
 
161
73
  system = platform.system()
162
- log(f"\n[env] Setting {len(env_vars)} persistent environment variable(s)...")
163
-
164
- for key, value in env_vars.items():
165
- log(f" - {key}={value}")
166
-
167
- if dry_run:
168
- log(" (dry run - no changes made)")
169
- return
170
-
171
74
  if system == "Windows":
172
- # Windows: use setx (writes to registry)
173
- for key, value in env_vars.items():
174
- result = subprocess.run(
175
- ["setx", key, value],
176
- capture_output=True, text=True
177
- )
178
- if result.returncode == 0:
179
- log(f" [env] Set {key} (Windows registry)")
180
- else:
181
- log(f" [env] Warning: Failed to set {key}: {result.stderr.strip()}")
182
- log("[env] Restart terminal/ComfyUI for changes to take effect")
183
-
184
- elif system == "Darwin": # macOS
185
- # macOS: launchctl for GUI apps + zshrc for terminal
186
- for key, value in env_vars.items():
187
- subprocess.run(["launchctl", "setenv", key, value], capture_output=True)
188
- log(f" [env] Set {key} (launchctl)")
189
-
190
- # Also add to zshrc for terminal (zsh is default on macOS)
75
+ for k, v in env_vars.items():
76
+ subprocess.run(["setx", k, v], capture_output=True)
77
+ elif system == "Darwin":
78
+ for k, v in env_vars.items():
79
+ subprocess.run(["launchctl", "setenv", k, v], capture_output=True)
191
80
  _add_to_shell_profile(env_vars, log)
192
-
193
- else: # Linux
81
+ else:
194
82
  _add_to_shell_profile(env_vars, log)
195
83
 
196
84
 
197
- def _add_to_shell_profile(
198
- env_vars: dict,
199
- log: Callable[[str], None],
200
- ) -> None:
201
- """Add env vars to shell profile (Linux/macOS)."""
202
- import os
203
- from pathlib import Path
204
-
205
- # Determine shell profile
85
+ def _add_to_shell_profile(env_vars: dict, log: Callable[[str], None]) -> None:
206
86
  shell = os.environ.get("SHELL", "/bin/bash")
207
- if "zsh" in shell:
208
- rc_file = Path.home() / ".zshrc"
209
- else:
210
- rc_file = Path.home() / ".bashrc"
211
-
87
+ rc_file = Path.home() / (".zshrc" if "zsh" in shell else ".bashrc")
212
88
  profile_file = Path.home() / ".comfy-env-profile"
213
89
 
214
- # Write env vars to our dedicated file
215
90
  with open(profile_file, "w") as f:
216
- f.write("# Generated by comfy-env - do not edit manually\n")
217
- for key, value in env_vars.items():
218
- f.write(f'export {key}="{value}"\n')
219
- log(f" [env] Wrote {profile_file}")
91
+ f.write("# Generated by comfy-env\n")
92
+ for k, v in env_vars.items():
93
+ f.write(f'export {k}="{v}"\n')
220
94
 
221
- # Add source line to shell rc (only once)
222
95
  source_line = f'source "{profile_file}"'
223
96
  existing = rc_file.read_text() if rc_file.exists() else ""
224
-
225
- if source_line not in existing and str(profile_file) not in existing:
97
+ if source_line not in existing:
226
98
  with open(rc_file, "a") as f:
227
- f.write(f'\n# comfy-env environment variables\n')
228
- f.write(f'{source_line}\n')
229
- log(f" [env] Added source line to {rc_file}")
230
- else:
231
- log(f" [env] Already configured in {rc_file}")
232
-
233
- log("[env] Restart terminal/ComfyUI for changes to take effect")
234
-
99
+ f.write(f'\n# comfy-env\n{source_line}\n')
100
+ log(f" [env] Wrote {profile_file}")
235
101
 
236
- def _install_node_dependencies(
237
- node_reqs: List[NodeReq],
238
- node_dir: Path,
239
- log: Callable[[str], None],
240
- dry_run: bool,
241
- ) -> None:
242
- """Install node dependencies (other ComfyUI custom nodes)."""
243
- from .nodes import install_node_deps
244
102
 
103
+ def _install_node_dependencies(node_reqs: List[NodeDependency], node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
104
+ from .packages.node_dependencies import install_node_dependencies
245
105
  custom_nodes_dir = node_dir.parent
246
106
  log(f"\nInstalling {len(node_reqs)} node dependencies...")
247
-
248
107
  if dry_run:
249
108
  for req in node_reqs:
250
- node_path = custom_nodes_dir / req.name
251
- status = "exists" if node_path.exists() else "would clone"
252
- log(f" {req.name}: {status}")
109
+ log(f" {req.name}: {'exists' if (custom_nodes_dir / req.name).exists() else 'would clone'}")
253
110
  return
111
+ install_node_dependencies(node_reqs, custom_nodes_dir, log, {node_dir.name})
254
112
 
255
- visited: Set[str] = {node_dir.name}
256
- install_node_deps(node_reqs, custom_nodes_dir, log, visited)
257
113
 
114
+ def _install_via_pixi(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
115
+ from .packages.pixi import ensure_pixi, get_pixi_python, pixi_clean
116
+ from .packages.toml_generator import write_pixi_toml
117
+ from .packages.cuda_wheels import get_wheel_url, CUDA_TORCH_MAP
118
+ from .detection import get_recommended_cuda_version
119
+ from .environment.cache import get_central_env_path, write_marker, write_env_metadata, MARKER_FILE, get_cache_dir
120
+ import shutil, subprocess, sys
258
121
 
259
- def _install_via_pixi(
260
- cfg: ComfyEnvConfig,
261
- node_dir: Path,
262
- log: Callable[[str], None],
263
- dry_run: bool,
264
- ) -> None:
265
- """Install all packages via pixi."""
266
- from .pixi import pixi_install
267
-
268
- # Count what we're installing
269
- cuda_count = len(cfg.cuda_packages)
270
-
271
- # Count from passthrough (pixi-native format)
272
122
  deps = cfg.pixi_passthrough.get("dependencies", {})
273
123
  pypi_deps = cfg.pixi_passthrough.get("pypi-dependencies", {})
274
-
275
- if cuda_count == 0 and not deps and not pypi_deps:
124
+ if not cfg.cuda_packages and not deps and not pypi_deps:
276
125
  log("No packages to install")
277
126
  return
278
127
 
279
128
  log(f"\nInstalling via pixi:")
280
- if cuda_count:
281
- log(f" CUDA packages: {', '.join(cfg.cuda_packages)}")
282
- if deps:
283
- log(f" Conda packages: {len(deps)}")
284
- if pypi_deps:
285
- log(f" PyPI packages: {len(pypi_deps)}")
129
+ if cfg.cuda_packages: log(f" CUDA: {', '.join(cfg.cuda_packages)}")
130
+ if deps: log(f" Conda: {len(deps)}")
131
+ if pypi_deps: log(f" PyPI: {len(pypi_deps)}")
132
+ if dry_run: return
133
+
134
+ pixi_clean(node_dir, log)
135
+ (node_dir / ".pixi").mkdir(parents=True, exist_ok=True)
136
+ (node_dir / ".pixi" / "config.toml").write_text("detached-environments = false\n")
137
+
138
+ pixi_path = ensure_pixi(log=log)
139
+ cuda_version = torch_version = None
140
+ if cfg.has_cuda and sys.platform != "darwin":
141
+ cuda_version = get_recommended_cuda_version()
142
+ if cuda_version:
143
+ torch_version = CUDA_TORCH_MAP.get(".".join(cuda_version.split(".")[:2]), "2.8")
144
+
145
+ write_pixi_toml(cfg, node_dir, log)
146
+ log("Running pixi install...")
147
+ result = subprocess.run([str(pixi_path), "install"], cwd=node_dir, capture_output=True, text=True)
148
+ if result.returncode != 0:
149
+ raise RuntimeError(f"pixi install failed: {result.stderr}")
150
+
151
+ if cfg.cuda_packages and cuda_version:
152
+ log(f"Installing CUDA packages...")
153
+ python_path = get_pixi_python(node_dir)
154
+ if not python_path:
155
+ raise RuntimeError("No Python in pixi env")
156
+
157
+ result = subprocess.run([str(python_path), "-c", "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"],
158
+ capture_output=True, text=True)
159
+ py_version = result.stdout.strip() if result.returncode == 0 else f"{sys.version_info.major}.{sys.version_info.minor}"
160
+
161
+ for package in cfg.cuda_packages:
162
+ wheel_url = get_wheel_url(package, torch_version, cuda_version, py_version)
163
+ if not wheel_url:
164
+ raise RuntimeError(f"No wheel for {package}")
165
+ log(f" {package}")
166
+ result = subprocess.run([str(python_path), "-m", "pip", "install", "--no-deps", "--no-cache-dir", wheel_url],
167
+ capture_output=True, text=True)
168
+ if result.returncode != 0:
169
+ raise RuntimeError(f"Failed: {result.stderr}")
170
+
171
+ old_env = node_dir / ".pixi" / "envs" / "default"
172
+ config_path = node_dir / "comfy-env.toml"
173
+ main_node_dir = node_dir
174
+ for parent in node_dir.parents:
175
+ if parent.parent.name == "custom_nodes":
176
+ main_node_dir = parent
177
+ break
178
+
179
+ central_env = get_central_env_path(main_node_dir, config_path)
180
+ if old_env.exists():
181
+ get_cache_dir()
182
+ if central_env.exists(): shutil.rmtree(central_env)
183
+ shutil.move(str(old_env), str(central_env))
184
+ write_marker(config_path, central_env)
185
+ write_env_metadata(central_env, config_path.parent / MARKER_FILE)
186
+ shutil.rmtree(node_dir / ".pixi", ignore_errors=True)
187
+ log(f"Env: {central_env}")
188
+
189
+
190
+ def _install_to_host_python(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
191
+ import shutil, subprocess, sys
192
+ from .packages.cuda_wheels import get_wheel_url, CUDA_TORCH_MAP
193
+ from .detection import get_recommended_cuda_version
286
194
 
287
- if dry_run:
288
- log("\n(dry run - no changes made)")
195
+ pypi_deps = cfg.pixi_passthrough.get("pypi-dependencies", {})
196
+ if not pypi_deps and not cfg.cuda_packages:
197
+ log("No packages to install")
289
198
  return
290
199
 
291
- pixi_install(cfg, node_dir, log)
292
-
293
-
294
- def _install_isolated_subdirs(
295
- node_dir: Path,
296
- log: Callable[[str], None],
297
- dry_run: bool,
298
- ) -> None:
299
- """Find and install comfy-env.toml in subdirectories."""
300
- from .pixi import pixi_install
301
- from .config.parser import CONFIG_FILE_NAME
302
-
303
- # Find all comfy-env.toml files in subdirectories (not root)
200
+ pip_packages = []
201
+ for pkg, spec in pypi_deps.items():
202
+ if isinstance(spec, str):
203
+ pip_packages.append(pkg if spec == "*" else f"{pkg}{spec}")
204
+ elif isinstance(spec, dict):
205
+ extras = spec.get("extras", [])
206
+ version = spec.get("version", "*")
207
+ name = f"{pkg}[{','.join(extras)}]" if extras else pkg
208
+ pip_packages.append(name if version == "*" else f"{name}{version}")
209
+
210
+ log(f"\nInstalling to {sys.executable}")
211
+ if dry_run: return
212
+
213
+ use_uv = shutil.which("uv")
214
+ if pip_packages:
215
+ cmd = ["uv", "pip", "install", "--python", sys.executable] + pip_packages if use_uv else [sys.executable, "-m", "pip", "install"] + pip_packages
216
+ subprocess.run(cmd, capture_output=True)
217
+
218
+ if cfg.cuda_packages:
219
+ cuda_version = get_recommended_cuda_version()
220
+ if not cuda_version: return
221
+ torch_version = CUDA_TORCH_MAP.get(".".join(cuda_version.split(".")[:2]), "2.8")
222
+ py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
223
+
224
+ for package in cfg.cuda_packages:
225
+ wheel_url = get_wheel_url(package, torch_version, cuda_version, py_version)
226
+ if wheel_url:
227
+ cmd = ["uv", "pip", "install", "--python", sys.executable, "--no-deps", wheel_url] if use_uv else [sys.executable, "-m", "pip", "install", "--no-deps", wheel_url]
228
+ subprocess.run(cmd, capture_output=True)
229
+
230
+
231
+ def _install_isolated_subdirs(node_dir: Path, log: Callable[[str], None], dry_run: bool) -> None:
304
232
  for config_file in node_dir.rglob(CONFIG_FILE_NAME):
305
- if config_file.parent == node_dir:
306
- continue # Skip root (already installed)
307
-
308
- sub_dir = config_file.parent
309
- relative = sub_dir.relative_to(node_dir)
310
-
311
- log(f"\n[isolated] Installing: {relative}")
312
- sub_cfg = load_config(config_file)
233
+ if config_file.parent == node_dir: continue
234
+ log(f"\n[isolated] {config_file.parent.relative_to(node_dir)}")
235
+ if not dry_run:
236
+ _install_via_pixi(load_config(config_file), config_file.parent, log, dry_run)
313
237
 
314
- if dry_run:
315
- log(f" (dry run)")
316
- continue
317
238
 
318
- pixi_install(sub_cfg, sub_dir, log)
319
-
320
-
321
- def verify_installation(
322
- packages: List[str],
323
- log: Callable[[str], None] = print,
324
- ) -> bool:
325
- """Verify that packages are importable."""
239
+ def verify_installation(packages: List[str], log: Callable[[str], None] = print) -> bool:
326
240
  all_ok = True
327
241
  for package in packages:
328
242
  import_name = package.replace("-", "_").split("[")[0]
@@ -1,9 +1,40 @@
1
1
  """
2
- Isolation module for wrapping ComfyUI nodes to run in isolated environments.
2
+ Isolation layer - Process isolation for node execution.
3
+
4
+ Wraps node classes to execute in isolated subprocess environments.
3
5
  """
4
6
 
5
- from .wrap import wrap_isolated_nodes
7
+ from .wrap import (
8
+ wrap_isolated_nodes,
9
+ wrap_nodes,
10
+ )
11
+ from .workers import (
12
+ Worker,
13
+ WorkerError,
14
+ MPWorker,
15
+ SubprocessWorker,
16
+ )
17
+ from .tensor_utils import (
18
+ TensorKeeper,
19
+ keep_tensor,
20
+ keep_tensors_recursive,
21
+ prepare_tensor_for_ipc,
22
+ prepare_for_ipc_recursive,
23
+ )
6
24
 
7
25
  __all__ = [
26
+ # Node wrapping
8
27
  "wrap_isolated_nodes",
28
+ "wrap_nodes",
29
+ # Workers
30
+ "Worker",
31
+ "WorkerError",
32
+ "MPWorker",
33
+ "SubprocessWorker",
34
+ # Tensor utilities
35
+ "TensorKeeper",
36
+ "keep_tensor",
37
+ "keep_tensors_recursive",
38
+ "prepare_tensor_for_ipc",
39
+ "prepare_for_ipc_recursive",
9
40
  ]
@@ -0,0 +1,83 @@
1
+ """Tensor utilities for IPC - prevents GC races and handles CUDA re-share."""
2
+
3
+ import collections
4
+ import logging
5
+ import threading
6
+ import time
7
+ from typing import Any
8
+
9
+ logger = logging.getLogger("comfy_env")
10
+
11
+
12
+ class TensorKeeper:
13
+ """Keep tensor references during IPC to prevent premature GC."""
14
+
15
+ def __init__(self, retention_seconds: float = 30.0):
16
+ self.retention_seconds = retention_seconds
17
+ self._keeper: collections.deque = collections.deque()
18
+ self._lock = threading.Lock()
19
+
20
+ def keep(self, t: Any) -> None:
21
+ try:
22
+ import torch
23
+ if not isinstance(t, torch.Tensor): return
24
+ except ImportError: return
25
+
26
+ now = time.time()
27
+ with self._lock:
28
+ self._keeper.append((now, t))
29
+ while self._keeper and now - self._keeper[0][0] > self.retention_seconds:
30
+ self._keeper.popleft()
31
+
32
+ def keep_recursive(self, obj: Any) -> None:
33
+ try:
34
+ import torch
35
+ if isinstance(obj, torch.Tensor): self.keep(obj)
36
+ elif isinstance(obj, (list, tuple)):
37
+ for item in obj: self.keep_recursive(item)
38
+ elif isinstance(obj, dict):
39
+ for v in obj.values(): self.keep_recursive(v)
40
+ except ImportError: pass
41
+
42
+ def __len__(self) -> int:
43
+ with self._lock: return len(self._keeper)
44
+
45
+
46
+ _tensor_keeper = TensorKeeper()
47
+ keep_tensor = lambda t: _tensor_keeper.keep(t)
48
+ keep_tensors_recursive = lambda obj: _tensor_keeper.keep_recursive(obj)
49
+
50
+
51
+ def prepare_tensor_for_ipc(t: Any) -> Any:
52
+ """Clone tensor if it was received via IPC (can't be re-shared)."""
53
+ try:
54
+ import torch
55
+ if not isinstance(t, torch.Tensor) or not t.is_cuda: return t
56
+
57
+ import torch.multiprocessing.reductions as reductions
58
+ try:
59
+ reductions.reduce_tensor(t)
60
+ return t
61
+ except RuntimeError as e:
62
+ if "received from another process" in str(e):
63
+ size_mb = t.numel() * t.element_size() / (1024 * 1024)
64
+ if size_mb > 100:
65
+ logger.warning(f"Cloning large CUDA tensor ({size_mb:.1f}MB) for IPC")
66
+ return t.clone()
67
+ raise
68
+ except ImportError: return t
69
+
70
+
71
+ def prepare_for_ipc_recursive(obj: Any) -> Any:
72
+ """Recursively prepare tensors for IPC and keep references."""
73
+ try:
74
+ import torch
75
+ if isinstance(obj, torch.Tensor):
76
+ prepared = prepare_tensor_for_ipc(obj)
77
+ keep_tensor(prepared)
78
+ return prepared
79
+ elif isinstance(obj, list): return [prepare_for_ipc_recursive(x) for x in obj]
80
+ elif isinstance(obj, tuple): return tuple(prepare_for_ipc_recursive(x) for x in obj)
81
+ elif isinstance(obj, dict): return {k: prepare_for_ipc_recursive(v) for k, v in obj.items()}
82
+ except ImportError: pass
83
+ return obj
@@ -0,0 +1,16 @@
1
+ """
2
+ Workers - Process isolation implementations.
3
+
4
+ Provides multiprocessing and subprocess-based workers for isolated execution.
5
+ """
6
+
7
+ from .base import Worker, WorkerError
8
+ from .mp import MPWorker
9
+ from .subprocess import SubprocessWorker
10
+
11
+ __all__ = [
12
+ "Worker",
13
+ "WorkerError",
14
+ "MPWorker",
15
+ "SubprocessWorker",
16
+ ]
@@ -29,7 +29,7 @@ from queue import Empty as QueueEmpty
29
29
  from typing import Any, Callable, Optional
30
30
 
31
31
  from .base import Worker, WorkerError
32
- from .tensor_utils import prepare_for_ipc_recursive, keep_tensors_recursive
32
+ from ..tensor_utils import prepare_for_ipc_recursive, keep_tensors_recursive
33
33
 
34
34
  logger = logging.getLogger("comfy_env")
35
35
 
@@ -42,7 +42,7 @@ from pathlib import Path
42
42
  from typing import Any, Callable, Dict, List, Optional, Tuple, Union
43
43
 
44
44
  from .base import Worker, WorkerError
45
- from ..pixi import get_pixi_path
45
+ from ...packages.pixi import get_pixi_path
46
46
 
47
47
  # Debug logging (set COMFY_ENV_DEBUG=1 to enable)
48
48
  _DEBUG = os.environ.get("COMFY_ENV_DEBUG", "").lower() in ("1", "true", "yes")
@@ -440,7 +440,7 @@ def _watchdog():
440
440
  f.flush()
441
441
 
442
442
  # Also print
443
- print(f"\\n=== WATCHDOG TICK {tick} ===", flush=True)
443
+ print(f"\\n=== WATCHDOG TICK {tick} (debug only, don't worry) ===", flush=True)
444
444
  print(dump, flush=True)
445
445
  print("=== END ===\\n", flush=True)
446
446