comfy-env 0.0.54__tar.gz → 0.0.55__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {comfy_env-0.0.54 → comfy_env-0.0.55}/PKG-INFO +1 -1
  2. {comfy_env-0.0.54 → comfy_env-0.0.55}/pyproject.toml +1 -1
  3. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/isolation.py +6 -0
  4. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/pixi.py +54 -23
  5. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/stub_imports.py +61 -108
  6. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/workers/venv.py +223 -49
  7. {comfy_env-0.0.54 → comfy_env-0.0.55}/.github/workflows/publish.yml +0 -0
  8. {comfy_env-0.0.54 → comfy_env-0.0.55}/.gitignore +0 -0
  9. {comfy_env-0.0.54 → comfy_env-0.0.55}/LICENSE +0 -0
  10. {comfy_env-0.0.54 → comfy_env-0.0.55}/README.md +0 -0
  11. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/__init__.py +0 -0
  12. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/cli.py +0 -0
  13. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/decorator.py +0 -0
  14. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/__init__.py +0 -0
  15. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/config.py +0 -0
  16. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/config_file.py +0 -0
  17. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/cuda_gpu_detection.py +0 -0
  18. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/manager.py +0 -0
  19. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/platform/__init__.py +0 -0
  20. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/platform/base.py +0 -0
  21. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/platform/darwin.py +0 -0
  22. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/platform/linux.py +0 -0
  23. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/platform/windows.py +0 -0
  24. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/env/security.py +0 -0
  25. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/errors.py +0 -0
  26. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/install.py +0 -0
  27. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/__init__.py +0 -0
  28. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/bridge.py +0 -0
  29. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/protocol.py +0 -0
  30. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/tensor.py +0 -0
  31. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/torch_bridge.py +0 -0
  32. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/transport.py +0 -0
  33. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/ipc/worker.py +0 -0
  34. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/nodes.py +0 -0
  35. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/registry.py +0 -0
  36. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/resolver.py +0 -0
  37. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/stubs/__init__.py +0 -0
  38. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/stubs/comfy/__init__.py +0 -0
  39. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/stubs/comfy/model_management.py +0 -0
  40. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/stubs/comfy/utils.py +0 -0
  41. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/stubs/folder_paths.py +0 -0
  42. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/templates/comfy-env-instructions.txt +0 -0
  43. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/templates/comfy-env.toml +0 -0
  44. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/wheel_sources.yml +0 -0
  45. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/workers/__init__.py +0 -0
  46. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/workers/base.py +0 -0
  47. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/workers/pool.py +0 -0
  48. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/workers/tensor_utils.py +0 -0
  49. {comfy_env-0.0.54 → comfy_env-0.0.55}/src/comfy_env/workers/torch_mp.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: comfy-env
3
- Version: 0.0.54
3
+ Version: 0.0.55
4
4
  Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
5
5
  Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
6
6
  Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "comfy-env"
3
- version = "0.0.54"
3
+ version = "0.0.55"
4
4
  description = "Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation"
5
5
  readme = "README.md"
6
6
  license = {text = "MIT"}
@@ -158,7 +158,11 @@ def _wrap_node_class(
158
158
 
159
159
  @wraps(original_method)
160
160
  def proxy(self, **kwargs):
161
+ print(f"[comfy-env] PROXY CALLED: {cls.__name__}.{func_name}", flush=True)
162
+ print(f"[comfy-env] kwargs keys: {list(kwargs.keys())}", flush=True)
163
+
161
164
  worker = _get_worker(env_name, python_path, working_dir, sys_path)
165
+ print(f"[comfy-env] worker alive: {worker.is_alive()}", flush=True)
162
166
 
163
167
  # Clone tensors for IPC if needed
164
168
  try:
@@ -168,6 +172,7 @@ def _wrap_node_class(
168
172
  except ImportError:
169
173
  pass # No torch available, skip cloning
170
174
 
175
+ print(f"[comfy-env] calling worker.call_method...", flush=True)
171
176
  result = worker.call_method(
172
177
  module_name=module_name,
173
178
  class_name=cls.__name__,
@@ -176,6 +181,7 @@ def _wrap_node_class(
176
181
  kwargs=kwargs,
177
182
  timeout=600.0,
178
183
  )
184
+ print(f"[comfy-env] call_method returned", flush=True)
179
185
 
180
186
  # Clone result tensors
181
187
  try:
@@ -253,19 +253,26 @@ def create_pixi_toml(
253
253
  lines.append('libblas = { version = "*", build = "*mkl" }')
254
254
 
255
255
  for pkg in conda.packages:
256
- # Parse package spec (name=version or name>=version or just name)
257
- if "=" in pkg and not pkg.startswith("="):
258
- # Has version spec
259
- if ">=" in pkg:
260
- name, version = pkg.split(">=", 1)
261
- lines.append(f'{name} = ">={version}"')
262
- elif "==" in pkg:
263
- name, version = pkg.split("==", 1)
264
- lines.append(f'{name} = "=={version}"')
265
- else:
266
- # Single = means exact version in conda
267
- name, version = pkg.split("=", 1)
268
- lines.append(f'{name} = "=={version}"')
256
+ # Parse package spec (name=version or name>=version or name<version or just name)
257
+ if ">=" in pkg:
258
+ name, version = pkg.split(">=", 1)
259
+ lines.append(f'{name} = ">={version}"')
260
+ elif "<=" in pkg:
261
+ name, version = pkg.split("<=", 1)
262
+ lines.append(f'{name} = "<={version}"')
263
+ elif "==" in pkg:
264
+ name, version = pkg.split("==", 1)
265
+ lines.append(f'{name} = "=={version}"')
266
+ elif ">" in pkg:
267
+ name, version = pkg.split(">", 1)
268
+ lines.append(f'{name} = ">{version}"')
269
+ elif "<" in pkg:
270
+ name, version = pkg.split("<", 1)
271
+ lines.append(f'{name} = "<{version}"')
272
+ elif "=" in pkg and not pkg.startswith("="):
273
+ # Single = means exact version in conda
274
+ name, version = pkg.split("=", 1)
275
+ lines.append(f'{name} = "=={version}"')
269
276
  else:
270
277
  # No version, use any
271
278
  lines.append(f'{pkg} = "*"')
@@ -282,16 +289,22 @@ def create_pixi_toml(
282
289
  if local_wheels_dir:
283
290
  local_wheels = list(Path(local_wheels_dir).glob("comfy_env-*.whl"))
284
291
  if local_wheels:
285
- # Use relative path from node_dir
286
- rel_path = os.path.relpath(local_wheels[0], node_dir)
287
- special_deps["comfy-env"] = f'{{ path = "{rel_path}" }}'
292
+ # Copy wheel to node_dir (next to pixi.toml) for simple relative path
293
+ wheel_name = local_wheels[0].name
294
+ wheel_dest = node_dir / wheel_name
295
+ if not wheel_dest.exists():
296
+ shutil.copy(local_wheels[0], wheel_dest)
297
+ # Reference with simple relative path (forward slashes, no backslash issues)
298
+ special_deps["comfy-env"] = f'{{ path = "./{wheel_name}" }}'
288
299
  else:
289
300
  pypi_deps.append("comfy-env")
290
301
  else:
291
302
  # Check for local editable comfy-env at ~/utils/comfy-env
292
303
  local_comfy_env = Path.home() / "utils" / "comfy-env"
293
304
  if local_comfy_env.exists() and (local_comfy_env / "pyproject.toml").exists():
294
- special_deps["comfy-env"] = f'{{ path = "{local_comfy_env}", editable = true }}'
305
+ # Use forward slashes for TOML compatibility
306
+ path_str = local_comfy_env.as_posix()
307
+ special_deps["comfy-env"] = f'{{ path = "{path_str}", editable = true }}'
295
308
  else:
296
309
  pypi_deps.append("comfy-env")
297
310
 
@@ -544,20 +557,38 @@ def pixi_install(
544
557
 
545
558
  log("pixi install completed successfully!")
546
559
 
547
- # Create _env_{name} symlink for compatibility with uv backend
560
+ # Create _env_{name} link for compatibility with uv backend
548
561
  # This ensures code that expects _env_envname/bin/python works with pixi
549
562
  symlink_path = node_dir / f"_env_{env_config.name}"
550
563
  pixi_env_path = node_dir / ".pixi" / "envs" / "default"
551
564
 
552
565
  if pixi_env_path.exists():
553
- # Remove existing symlink or directory if present
554
- if symlink_path.is_symlink():
555
- symlink_path.unlink()
566
+ # Remove existing symlink/junction or directory if present
567
+ if symlink_path.is_symlink() or (sys.platform == "win32" and symlink_path.is_dir()):
568
+ # On Windows, junctions appear as directories but can be removed with rmdir
569
+ try:
570
+ symlink_path.unlink()
571
+ except (OSError, PermissionError):
572
+ # Junction on Windows - remove with rmdir (doesn't delete contents)
573
+ subprocess.run(["cmd", "/c", "rmdir", str(symlink_path)], capture_output=True)
556
574
  elif symlink_path.exists():
557
575
  shutil.rmtree(symlink_path)
558
576
 
559
- symlink_path.symlink_to(pixi_env_path)
560
- log(f"Created symlink: _env_{env_config.name} -> .pixi/envs/default")
577
+ # On Windows, use directory junctions (no admin required) instead of symlinks
578
+ if sys.platform == "win32":
579
+ # mklink /J creates a directory junction (no admin privileges needed)
580
+ result = subprocess.run(
581
+ ["cmd", "/c", "mklink", "/J", str(symlink_path), str(pixi_env_path)],
582
+ capture_output=True,
583
+ text=True
584
+ )
585
+ if result.returncode == 0:
586
+ log(f"Created junction: _env_{env_config.name} -> .pixi/envs/default")
587
+ else:
588
+ log(f"Warning: Failed to create junction: {result.stderr}")
589
+ else:
590
+ symlink_path.symlink_to(pixi_env_path)
591
+ log(f"Created symlink: _env_{env_config.name} -> .pixi/envs/default")
561
592
 
562
593
  return True
563
594
 
@@ -5,9 +5,9 @@ This module provides automatic import stubbing for packages that exist only
5
5
  in the isolated pixi environment, not in the host ComfyUI Python.
6
6
 
7
7
  How it works:
8
- 1. Read package names from comfy-env.toml
9
- 2. Look up their import names from top_level.txt in the pixi environment
10
- 3. Register import hooks that provide stub modules for those imports
8
+ 1. Scan pixi environment's site-packages for installed packages
9
+ 2. Look up import names from top_level.txt in .dist-info directories
10
+ 3. Inject stub modules directly into sys.modules for missing packages
11
11
  4. Stubs allow class definitions to parse without the real packages
12
12
  5. Real packages are used when FUNCTION runs in the isolated worker
13
13
 
@@ -22,13 +22,16 @@ Usage:
22
22
  import sys
23
23
  import types
24
24
  from pathlib import Path
25
- from typing import Dict, List, Optional, Set
25
+ from typing import List, Set
26
+
27
+
28
+ def _log(msg: str) -> None:
29
+ """Log with immediate flush to stderr (visible on Windows subprocess)."""
30
+ print(msg, file=sys.stderr, flush=True)
26
31
 
27
32
 
28
33
  class _StubModule(types.ModuleType):
29
- """
30
- A stub module that accepts any attribute access or call.
31
- """
34
+ """A stub module that accepts any attribute access or call."""
32
35
 
33
36
  def __init__(self, name: str):
34
37
  super().__init__(name)
@@ -46,9 +49,7 @@ class _StubModule(types.ModuleType):
46
49
 
47
50
 
48
51
  class _StubObject:
49
- """
50
- A stub object that accepts any operation.
51
- """
52
+ """A stub object that accepts any operation."""
52
53
 
53
54
  def __init__(self, name: str = "stub"):
54
55
  self._stub_name = name
@@ -99,57 +100,12 @@ class _StubObject:
99
100
  def __contains__(self, item): return False
100
101
 
101
102
 
102
- class _StubFinder:
103
- """Import hook finder that provides stub modules for specified packages."""
104
-
105
- def __init__(self, stub_packages: Set[str]):
106
- self.stub_packages = stub_packages
107
-
108
- def find_module(self, fullname: str, path=None):
109
- top_level = fullname.split('.')[0]
110
- if top_level in self.stub_packages:
111
- return _StubLoader(self.stub_packages)
112
- return None
113
-
114
-
115
- class _StubLoader:
116
- """Import hook loader that creates stub modules."""
117
-
118
- def __init__(self, stub_packages: Set[str]):
119
- self.stub_packages = stub_packages
120
-
121
- def load_module(self, fullname: str):
122
- if fullname in sys.modules:
123
- return sys.modules[fullname]
124
-
125
- module = _StubModule(fullname)
126
- module.__loader__ = self
127
-
128
- if '.' in fullname:
129
- parent = fullname.rsplit('.', 1)[0]
130
- module.__package__ = parent
131
- if parent not in sys.modules:
132
- self.load_module(parent)
133
- else:
134
- module.__package__ = fullname
135
-
136
- sys.modules[fullname] = module
137
- return module
138
-
139
-
140
- def _normalize_package_name(name: str) -> str:
141
- """Normalize package name for comparison (PEP 503)."""
142
- return name.lower().replace('-', '_').replace('.', '_')
143
-
144
-
145
103
  def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
146
104
  """
147
- Get import names by scanning the pixi environment's site-packages.
105
+ Get import names from pixi environment using top_level.txt metadata.
148
106
 
149
- Finds all importable packages by looking for:
150
- 1. Directories with __init__.py (packages)
151
- 2. .py files (single-file modules)
152
- 3. .so/.pyd files (extension modules)
107
+ This properly maps package names to import names (e.g., libigl -> igl,
108
+ PyYAML -> yaml) by reading the canonical top_level.txt files.
153
109
 
154
110
  Returns:
155
111
  Set of import names that should be stubbed.
@@ -159,16 +115,11 @@ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
159
115
  pixi_base = node_dir / ".pixi" / "envs" / "default"
160
116
 
161
117
  # Find site-packages (different paths on Windows vs Linux)
162
- # Linux: .pixi/envs/default/lib/python3.x/site-packages
163
- # Windows: .pixi/envs/default/Lib/site-packages
164
118
  site_packages = None
165
-
166
- # Try Windows path first (Lib/site-packages)
167
119
  win_site = pixi_base / "Lib" / "site-packages"
168
120
  if win_site.exists():
169
121
  site_packages = win_site
170
122
  else:
171
- # Try Linux path (lib/python3.x/site-packages)
172
123
  pixi_lib = pixi_base / "lib"
173
124
  if pixi_lib.exists():
174
125
  python_dirs = list(pixi_lib.glob("python3.*"))
@@ -178,25 +129,44 @@ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
178
129
  if site_packages is None or not site_packages.exists():
179
130
  return import_names
180
131
 
181
- # Scan for importable modules
132
+ _log(f"[comfy-env] Scanning: {site_packages}")
133
+
134
+ # PRIMARY: Read top_level.txt from all .dist-info directories
135
+ for dist_info in site_packages.glob("*.dist-info"):
136
+ top_level_file = dist_info / "top_level.txt"
137
+ if top_level_file.exists():
138
+ try:
139
+ for line in top_level_file.read_text(encoding="utf-8").splitlines():
140
+ name = line.strip()
141
+ if name and not name.startswith('#'):
142
+ # Extract just the top-level name
143
+ top_name = name.replace('\\', '/').split('/')[0]
144
+ if top_name:
145
+ import_names.add(top_name)
146
+ except Exception:
147
+ pass
148
+
149
+ # FALLBACK: Scan for packages/modules not covered by dist-info
182
150
  for item in site_packages.iterdir():
183
151
  name = item.name
184
152
 
185
- # Skip private/internal items
186
153
  if name.startswith('_') or name.startswith('.'):
187
154
  continue
188
-
189
- # Skip dist-info and egg-info directories
190
155
  if name.endswith('.dist-info') or name.endswith('.egg-info'):
191
156
  continue
192
-
193
- # Skip common non-module items
194
157
  if name in {'bin', 'share', 'include', 'etc'}:
195
158
  continue
196
159
 
197
160
  # Package directory (has __init__.py)
161
+ if item.is_dir() and (item / "__init__.py").exists():
162
+ import_names.add(name)
163
+ continue
164
+
165
+ # Namespace package (directory without __init__.py but has submodules)
198
166
  if item.is_dir():
199
- if (item / "__init__.py").exists():
167
+ has_py = any(item.glob("*.py"))
168
+ has_subpkg = any((item / d / "__init__.py").exists() for d in item.iterdir() if d.is_dir())
169
+ if has_py or has_subpkg:
200
170
  import_names.add(name)
201
171
  continue
202
172
 
@@ -206,11 +176,9 @@ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
206
176
  continue
207
177
 
208
178
  # Extension module (.so on Linux, .pyd on Windows)
209
- if '.cpython-' in name and (name.endswith('.so') or name.endswith('.pyd')):
210
- # Extract module name: foo.cpython-311-x86_64-linux-gnu.so -> foo
179
+ if name.endswith('.so') or name.endswith('.pyd'):
211
180
  module_name = name.split('.')[0]
212
181
  import_names.add(module_name)
213
- continue
214
182
 
215
183
  return import_names
216
184
 
@@ -230,14 +198,14 @@ def _filter_to_missing(import_names: Set[str]) -> Set[str]:
230
198
  except ImportError:
231
199
  missing.add(name)
232
200
  except Exception:
233
- # Other errors - don't stub, let real error surface
234
- pass
201
+ # Other errors (DLL load, etc.) - stub these too
202
+ missing.add(name)
235
203
 
236
204
  return missing
237
205
 
238
206
 
239
- # Track whether we've already set up stubs
240
- _stub_finder: Optional[_StubFinder] = None
207
+ # Track what we stubbed for cleanup
208
+ _stubbed_modules: Set[str] = set()
241
209
 
242
210
 
243
211
  def setup_isolated_imports(init_file: str) -> List[str]:
@@ -258,7 +226,7 @@ def setup_isolated_imports(init_file: str) -> List[str]:
258
226
 
259
227
  from .nodes import NODE_CLASS_MAPPINGS # Now works!
260
228
  """
261
- global _stub_finder
229
+ global _stubbed_modules
262
230
 
263
231
  node_dir = Path(init_file).resolve().parent
264
232
 
@@ -266,52 +234,37 @@ def setup_isolated_imports(init_file: str) -> List[str]:
266
234
  pixi_imports = _get_import_names_from_pixi(node_dir)
267
235
 
268
236
  if not pixi_imports:
269
- print("[comfy-env] No pixi environment found, skipping import stubbing")
237
+ _log("[comfy-env] No pixi environment found")
270
238
  return []
271
239
 
272
240
  # Filter to only those missing in host
273
241
  missing = _filter_to_missing(pixi_imports)
274
242
 
275
243
  if not missing:
276
- print("[comfy-env] All pixi packages available in host, no stubbing needed")
244
+ _log("[comfy-env] All packages available in host")
277
245
  return []
278
246
 
279
- # Remove old finder if exists
280
- if _stub_finder is not None:
281
- try:
282
- sys.meta_path.remove(_stub_finder)
283
- except ValueError:
284
- pass
285
-
286
- # Register new finder
287
- _stub_finder = _StubFinder(missing)
288
- sys.meta_path.insert(0, _stub_finder)
247
+ # Direct injection into sys.modules - simple and reliable
248
+ for name in missing:
249
+ if name not in sys.modules:
250
+ sys.modules[name] = _StubModule(name)
251
+ _stubbed_modules.add(name)
289
252
 
290
- stubbed = sorted(missing)
253
+ stubbed = sorted(_stubbed_modules)
291
254
  if len(stubbed) <= 10:
292
- print(f"[comfy-env] Stubbed {len(stubbed)} imports: {', '.join(stubbed)}")
255
+ _log(f"[comfy-env] Injected {len(stubbed)} stubs: {', '.join(stubbed)}")
293
256
  else:
294
- print(f"[comfy-env] Stubbed {len(stubbed)} imports: {', '.join(stubbed[:10])}... and {len(stubbed)-10} more")
257
+ _log(f"[comfy-env] Injected {len(stubbed)} stubs: {', '.join(stubbed[:10])}... +{len(stubbed)-10} more")
295
258
 
296
259
  return stubbed
297
260
 
298
261
 
299
262
  def cleanup_stubs():
300
- """Remove the stub import hooks."""
301
- global _stub_finder
263
+ """Remove injected stub modules from sys.modules."""
264
+ global _stubbed_modules
302
265
 
303
- if _stub_finder is not None:
304
- try:
305
- sys.meta_path.remove(_stub_finder)
306
- except ValueError:
307
- pass
308
-
309
- # Remove stubbed modules from sys.modules
310
- to_remove = [
311
- name for name in sys.modules
312
- if isinstance(sys.modules[name], _StubModule)
313
- ]
314
- for name in to_remove:
266
+ for name in list(_stubbed_modules):
267
+ if name in sys.modules and isinstance(sys.modules[name], _StubModule):
315
268
  del sys.modules[name]
316
269
 
317
- _stub_finder = None
270
+ _stubbed_modules.clear()
@@ -613,7 +613,77 @@ from types import SimpleNamespace
613
613
 
614
614
  # Enable faulthandler to dump traceback on SIGSEGV/SIGABRT/etc
615
615
  faulthandler.enable(file=sys.stderr, all_threads=True)
616
- print("[worker] Faulthandler enabled", flush=True)
616
+
617
+ # Pre-import bpy FIRST to avoid DLL conflicts with numpy/torch/MKL
618
+ # bpy's DLLs must be loaded before other packages load conflicting versions
619
+ try:
620
+ import bpy
621
+ print("[worker] Pre-imported bpy successfully", file=sys.stderr, flush=True)
622
+ except ImportError as e:
623
+ # bpy not available in this environment - that's fine
624
+ pass
625
+ except Exception as e:
626
+ print(f"[worker] bpy pre-import warning: {e}", file=sys.stderr, flush=True)
627
+
628
+ # Watchdog: dump all thread stacks every 60 seconds to catch hangs
629
+ import threading
630
+ import tempfile as _tempfile
631
+ _watchdog_log = os.path.join(_tempfile.gettempdir(), "comfy_worker_watchdog.log")
632
+ def _watchdog():
633
+ import time
634
+ import io
635
+ tick = 0
636
+ while True:
637
+ time.sleep(60)
638
+ tick += 1
639
+ # Capture stack dump to string
640
+ buf = io.StringIO()
641
+ faulthandler.dump_traceback(file=buf, all_threads=True)
642
+ dump = buf.getvalue()
643
+
644
+ # Write to file
645
+ with open(_watchdog_log, "a", encoding="utf-8") as f:
646
+ f.write(f"\\n=== WATCHDOG TICK {tick} ({time.strftime('%H:%M:%S')}) ===\\n")
647
+ f.write(dump)
648
+ f.write("=== END ===\\n")
649
+ f.flush()
650
+ os.fsync(f.fileno())
651
+
652
+ # Also print
653
+ print(f"\\n=== WATCHDOG TICK {tick} ===", flush=True)
654
+ print(dump, flush=True)
655
+ print("=== END ===\\n", flush=True)
656
+
657
+ _watchdog_thread = threading.Thread(target=_watchdog, daemon=True)
658
+ _watchdog_thread.start()
659
+ print(f"[worker] Watchdog started, logging to: {_watchdog_log}", flush=True)
660
+
661
+ # File-based logging for debugging (persists even if stdout/stderr are swallowed)
662
+ import tempfile
663
+ _worker_log_file = os.path.join(tempfile.gettempdir(), "comfy_worker_debug.log")
664
+ def wlog(msg):
665
+ """Log to file only - stdout causes pipe buffer deadlock after many requests."""
666
+ try:
667
+ with open(_worker_log_file, "a", encoding="utf-8") as f:
668
+ import time
669
+ f.write(f"{time.strftime('%H:%M:%S')} {msg}\\n")
670
+ f.flush()
671
+ os.fsync(f.fileno())
672
+ except Exception:
673
+ pass
674
+ # NOTE: Don't print to stdout here! After 50+ requests the pipe buffer
675
+ # fills up and causes deadlock (parent blocked on recv, worker blocked on print)
676
+
677
+ wlog(f"[worker] === Worker starting, log file: {_worker_log_file} ===")
678
+
679
+ # Debug: print PATH at startup
680
+ _path_sep = ";" if sys.platform == "win32" else ":"
681
+ _path_parts = os.environ.get("PATH", "").split(_path_sep)
682
+ print(f"[worker] PATH has {len(_path_parts)} entries:", file=sys.stderr, flush=True)
683
+ for _i, _p in enumerate(_path_parts[:15]):
684
+ print(f"[worker] [{_i}] {_p}", file=sys.stderr, flush=True)
685
+ if len(_path_parts) > 15:
686
+ print(f"[worker] ... and {len(_path_parts) - 15} more", file=sys.stderr, flush=True)
617
687
 
618
688
  # On Windows, add host Python's DLL directories so packages like opencv can find VC++ runtime
619
689
  if sys.platform == "win32":
@@ -633,9 +703,9 @@ if sys.platform == "win32":
633
703
  if _pixi_library_bin and hasattr(os, "add_dll_directory"):
634
704
  try:
635
705
  os.add_dll_directory(_pixi_library_bin)
636
- print(f"[worker] Added pixi Library/bin to DLL search: {_pixi_library_bin}", flush=True)
706
+ wlog(f"[worker] Added pixi Library/bin to DLL search: {_pixi_library_bin}")
637
707
  except Exception as e:
638
- print(f"[worker] Failed to add pixi Library/bin: {e}", flush=True)
708
+ wlog(f"[worker] Failed to add pixi Library/bin: {e}")
639
709
 
640
710
  # =============================================================================
641
711
  # Object Reference System - keep complex objects in worker, pass refs to host
@@ -805,24 +875,25 @@ def _deserialize_isolated_objects(obj):
805
875
 
806
876
 
807
877
  def main():
808
- print("[worker] Starting...", flush=True)
878
+ wlog("[worker] Starting...")
809
879
  # Get socket address from command line
810
880
  if len(sys.argv) < 2:
811
- print("Usage: worker.py <socket_addr>", file=sys.stderr)
881
+ wlog("Usage: worker.py <socket_addr>")
812
882
  sys.exit(1)
813
883
  socket_addr = sys.argv[1]
814
- print(f"[worker] Connecting to {socket_addr}...", flush=True)
884
+ wlog(f"[worker] Connecting to {socket_addr}...")
815
885
 
816
886
  # Connect to host process
817
887
  sock = _connect(socket_addr)
818
888
  transport = SocketTransport(sock)
819
- print("[worker] Connected, waiting for config...", flush=True)
889
+ wlog("[worker] Connected, waiting for config...")
820
890
 
821
891
  # Read config as first message
822
892
  config = transport.recv()
823
893
  if not config:
894
+ wlog("[worker] No config received, exiting")
824
895
  return
825
- print("[worker] Got config, setting up paths...", flush=True)
896
+ wlog("[worker] Got config, setting up paths...")
826
897
 
827
898
  # Setup sys.path
828
899
  for p in config.get("sys_paths", []):
@@ -830,66 +901,77 @@ def main():
830
901
  sys.path.insert(0, p)
831
902
 
832
903
  # Import torch after path setup
833
- print("[worker] Importing torch...", flush=True)
904
+ wlog("[worker] Importing torch...")
834
905
  import torch
835
- print(f"[worker] Torch imported: {torch.__version__}", flush=True)
906
+ wlog(f"[worker] Torch imported: {torch.__version__}")
836
907
 
837
908
  # Signal ready
838
909
  transport.send({"status": "ready"})
839
- print("[worker] Ready, entering request loop...", flush=True)
910
+ wlog("[worker] Ready, entering request loop...")
840
911
 
841
912
  # Process requests
913
+ request_num = 0
842
914
  while True:
915
+ request_num += 1
916
+ wlog(f"[worker] Waiting for request #{request_num}...")
843
917
  try:
844
918
  request = transport.recv()
845
919
  if not request:
920
+ wlog("[worker] Empty request received, exiting loop")
846
921
  break
847
- except Exception:
922
+ except Exception as e:
923
+ wlog(f"[worker] Exception receiving request: {e}")
848
924
  break
849
925
 
850
926
  if request.get("method") == "shutdown":
927
+ wlog("[worker] Shutdown requested")
851
928
  break
852
929
 
930
+ if request.get("method") == "ping":
931
+ # Health check - respond immediately
932
+ transport.send({"status": "pong"})
933
+ continue
934
+
853
935
  try:
854
936
  request_type = request.get("type", "call_module")
855
937
  module_name = request["module"]
856
938
  inputs_path = request.get("inputs_path")
857
939
  outputs_path = request.get("outputs_path")
858
- print(f"[worker] Request: {request_type} {module_name}", flush=True)
940
+ wlog(f"[worker] Request: {request_type} {module_name}")
859
941
 
860
942
  # Load inputs
861
943
  if inputs_path:
862
- print(f"[worker] Loading inputs from {inputs_path}...", flush=True)
944
+ wlog(f"[worker] Loading inputs from {inputs_path}...")
863
945
  inputs = torch.load(inputs_path, weights_only=False)
864
- print(f"[worker] Deserializing isolated objects...", flush=True)
946
+ wlog(f"[worker] Deserializing isolated objects...")
865
947
  inputs = _deserialize_isolated_objects(inputs)
866
948
  # Resolve any object references from previous node calls
867
- print(f"[worker] Resolving object references...", flush=True)
949
+ wlog(f"[worker] Resolving object references...")
868
950
  inputs = _deserialize_input(inputs)
869
- print(f"[worker] Inputs ready: {list(inputs.keys())}", flush=True)
951
+ wlog(f"[worker] Inputs ready: {list(inputs.keys())}")
870
952
  else:
871
953
  inputs = {}
872
954
 
873
955
  # Import module
874
- print(f"[worker] Importing module {module_name}...", flush=True)
956
+ wlog(f"[worker] Importing module {module_name}...")
875
957
  module = __import__(module_name, fromlist=[""])
876
- print(f"[worker] Module imported", flush=True)
958
+ wlog(f"[worker] Module imported")
877
959
 
878
960
  if request_type == "call_method":
879
961
  class_name = request["class_name"]
880
962
  method_name = request["method_name"]
881
963
  self_state = request.get("self_state")
882
- print(f"[worker] Getting class {class_name}...", flush=True)
964
+ wlog(f"[worker] Getting class {class_name}...")
883
965
 
884
966
  cls = getattr(module, class_name)
885
- print(f"[worker] Creating instance...", flush=True)
967
+ wlog(f"[worker] Creating instance...")
886
968
  instance = object.__new__(cls)
887
969
  if self_state:
888
970
  instance.__dict__.update(self_state)
889
- print(f"[worker] Calling {method_name}...", flush=True)
971
+ wlog(f"[worker] Calling {method_name}...")
890
972
  method = getattr(instance, method_name)
891
973
  result = method(**inputs)
892
- print(f"[worker] Method returned", flush=True)
974
+ wlog(f"[worker] Method returned")
893
975
  else:
894
976
  func_name = request["func"]
895
977
  func = getattr(module, func_name)
@@ -1000,13 +1082,53 @@ class PersistentVenvWorker(Worker):
1000
1082
  current = current.parent
1001
1083
  return None
1002
1084
 
1085
+ def _check_socket_health(self) -> bool:
1086
+ """Check if socket connection is healthy using a quick ping."""
1087
+ if not self._transport:
1088
+ return False
1089
+ try:
1090
+ # Send a ping request with short timeout
1091
+ self._transport.send({"method": "ping"})
1092
+ response = self._transport.recv(timeout=2.0)
1093
+ return response is not None and response.get("status") == "pong"
1094
+ except Exception as e:
1095
+ print(f"[{self.name}] Socket health check failed: {e}", file=sys.stderr, flush=True)
1096
+ return False
1097
+
1098
+ def _kill_worker(self) -> None:
1099
+ """Kill the worker process and clean up resources."""
1100
+ if self._process:
1101
+ try:
1102
+ self._process.kill()
1103
+ self._process.wait(timeout=5)
1104
+ except:
1105
+ pass
1106
+ self._process = None
1107
+ if self._transport:
1108
+ try:
1109
+ self._transport.close()
1110
+ except:
1111
+ pass
1112
+ self._transport = None
1113
+ if self._server_socket:
1114
+ try:
1115
+ self._server_socket.close()
1116
+ except:
1117
+ pass
1118
+ self._server_socket = None
1119
+
1003
1120
  def _ensure_started(self):
1004
1121
  """Start persistent worker subprocess if not running."""
1005
1122
  if self._shutdown:
1006
1123
  raise RuntimeError(f"{self.name}: Worker has been shut down")
1007
1124
 
1008
1125
  if self._process is not None and self._process.poll() is None:
1009
- return # Already running
1126
+ # Process is running, but check if socket is healthy
1127
+ if self._transport and self._check_socket_health():
1128
+ return # All good
1129
+ # Socket is dead/unhealthy - restart worker
1130
+ print(f"[{self.name}] Socket unhealthy, restarting worker...", file=sys.stderr, flush=True)
1131
+ self._kill_worker()
1010
1132
 
1011
1133
  # Clean up any previous socket
1012
1134
  if self._transport:
@@ -1041,16 +1163,28 @@ class PersistentVenvWorker(Worker):
1041
1163
  # Pixi has python.exe directly in env dir, not in Scripts/
1042
1164
  env_dir = self.python.parent
1043
1165
  library_bin = env_dir / "Library" / "bin"
1166
+
1167
+ # COMPLETE DLL ISOLATION: Build minimal PATH from scratch
1168
+ # Only include Windows system directories + pixi environment
1169
+ # This prevents DLL conflicts from mingw, conda, etc.
1170
+ windir = os.environ.get("WINDIR", r"C:\Windows")
1171
+ minimal_path_parts = [
1172
+ str(env_dir), # Pixi env (python.exe location)
1173
+ str(env_dir / "Scripts"), # Pixi Scripts
1174
+ str(env_dir / "Lib" / "site-packages" / "bpy"), # bpy DLLs
1175
+ f"{windir}\\System32", # Core Windows DLLs
1176
+ f"{windir}", # Windows directory
1177
+ f"{windir}\\System32\\Wbem", # WMI tools
1178
+ ]
1044
1179
  if library_bin.is_dir():
1045
- existing_path = env.get("PATH", "")
1046
- # Add env dir and Library/bin to PATH
1047
- env["PATH"] = f"{env_dir};{library_bin};{existing_path}"
1048
- # Also pass as env var so worker can use os.add_dll_directory()
1049
- env["COMFYUI_PIXI_LIBRARY_BIN"] = str(library_bin)
1050
- # Allow duplicate OpenMP libraries (MKL's libiomp5md.dll + PyTorch's libomp.dll)
1051
- env["KMP_DUPLICATE_LIB_OK"] = "TRUE"
1052
- # Use UTF-8 encoding for stdout/stderr to handle Unicode symbols
1053
- env["PYTHONIOENCODING"] = "utf-8"
1180
+ minimal_path_parts.insert(1, str(library_bin)) # MKL DLLs
1181
+
1182
+ env["PATH"] = ";".join(minimal_path_parts)
1183
+ env["COMFYUI_PIXI_LIBRARY_BIN"] = str(library_bin) if library_bin.is_dir() else ""
1184
+ # Allow duplicate OpenMP libraries (MKL's libiomp5md.dll + PyTorch's libomp.dll)
1185
+ env["KMP_DUPLICATE_LIB_OK"] = "TRUE"
1186
+ # Use UTF-8 encoding for stdout/stderr to handle Unicode symbols
1187
+ env["PYTHONIOENCODING"] = "utf-8"
1054
1188
 
1055
1189
  # Find ComfyUI base and set env var for folder_paths stub
1056
1190
  comfyui_base = self._find_comfyui_base()
@@ -1062,32 +1196,62 @@ class PersistentVenvWorker(Worker):
1062
1196
  all_sys_path = [str(stubs_dir), str(self.working_dir)] + self.sys_path
1063
1197
 
1064
1198
  # Launch subprocess with the venv Python, passing socket address
1199
+ # For pixi environments, use "pixi run python" to get proper environment activation
1200
+ # (CONDA_PREFIX, Library paths, etc.) which fixes DLL loading issues with bpy
1201
+ is_pixi = '.pixi' in str(self.python)
1202
+ print(f"[PersistentVenvWorker] is_pixi={is_pixi}, python={self.python}", flush=True)
1203
+ if is_pixi:
1204
+ # Find pixi project root (parent of .pixi directory)
1205
+ pixi_project = self.python
1206
+ while pixi_project.name != '.pixi' and pixi_project.parent != pixi_project:
1207
+ pixi_project = pixi_project.parent
1208
+ pixi_project = pixi_project.parent # Go up from .pixi to project root
1209
+ pixi_toml = pixi_project / "pixi.toml"
1210
+ print(f"[PersistentVenvWorker] pixi_toml={pixi_toml}, exists={pixi_toml.exists()}", flush=True)
1211
+
1212
+ if pixi_toml.exists():
1213
+ cmd = ["pixi", "run", "--manifest-path", str(pixi_toml),
1214
+ "python", str(self._worker_script), self._socket_addr]
1215
+ # Clean PATH to remove ct-env entries that have conflicting DLLs
1216
+ # Pixi will add its own environment paths
1217
+ path_sep = ";" if sys.platform == "win32" else ":"
1218
+ current_path = env.get("PATH", "")
1219
+ # Filter out ct-envs and conda/mamba paths that could conflict
1220
+ clean_path_parts = [
1221
+ p for p in current_path.split(path_sep)
1222
+ if not any(x in p.lower() for x in (".ct-envs", "conda", "mamba", "miniforge", "miniconda", "anaconda"))
1223
+ ]
1224
+ env["PATH"] = path_sep.join(clean_path_parts)
1225
+ launch_env = env
1226
+ else:
1227
+ cmd = [str(self.python), str(self._worker_script), self._socket_addr]
1228
+ launch_env = env
1229
+ else:
1230
+ cmd = [str(self.python), str(self._worker_script), self._socket_addr]
1231
+ launch_env = env
1232
+
1233
+ print(f"[PersistentVenvWorker] launching cmd={cmd[:3]}...", flush=True)
1234
+ if launch_env:
1235
+ path_sep = ";" if sys.platform == "win32" else ":"
1236
+ path_parts = launch_env.get("PATH", "").split(path_sep)
1237
+ print(f"[PersistentVenvWorker] PATH has {len(path_parts)} entries:", flush=True)
1238
+ for i, p in enumerate(path_parts[:10]): # Show first 10
1239
+ print(f"[PersistentVenvWorker] [{i}] {p}", flush=True)
1240
+ if len(path_parts) > 10:
1241
+ print(f"[PersistentVenvWorker] ... and {len(path_parts) - 10} more", flush=True)
1065
1242
  self._process = subprocess.Popen(
1066
- [str(self.python), str(self._worker_script), self._socket_addr],
1243
+ cmd,
1067
1244
  stdin=subprocess.DEVNULL,
1068
- stdout=subprocess.PIPE,
1245
+ stdout=subprocess.DEVNULL, # DEVNULL to prevent pipe buffer deadlock
1069
1246
  stderr=subprocess.PIPE, # Capture stderr separately for crash diagnostics
1070
1247
  cwd=str(self.working_dir),
1071
- env=env,
1248
+ env=launch_env,
1072
1249
  )
1073
1250
 
1074
1251
  # Clear stderr buffer for new process
1075
1252
  with self._stderr_lock:
1076
1253
  self._stderr_buffer.clear()
1077
1254
 
1078
- # Start stdout forwarding thread
1079
- def forward_stdout():
1080
- try:
1081
- for line in self._process.stdout:
1082
- if isinstance(line, bytes):
1083
- line = line.decode('utf-8', errors='replace')
1084
- sys.stderr.write(f" {line}")
1085
- sys.stderr.flush()
1086
- except:
1087
- pass
1088
- self._stdout_thread = threading.Thread(target=forward_stdout, daemon=True)
1089
- self._stdout_thread.start()
1090
-
1091
1255
  # Start stderr capture thread (buffer for crash diagnostics)
1092
1256
  def capture_stderr():
1093
1257
  try:
@@ -1224,8 +1388,13 @@ class PersistentVenvWorker(Worker):
1224
1388
  Returns:
1225
1389
  Return value of the method.
1226
1390
  """
1391
+ import sys
1392
+ print(f"[PersistentVenvWorker] call_method: {module_name}.{class_name}.{method_name}", file=sys.stderr, flush=True)
1393
+
1227
1394
  with self._lock:
1395
+ print(f"[PersistentVenvWorker] acquired lock, ensuring started...", file=sys.stderr, flush=True)
1228
1396
  self._ensure_started()
1397
+ print(f"[PersistentVenvWorker] worker started/confirmed", file=sys.stderr, flush=True)
1229
1398
 
1230
1399
  timeout = timeout or 600.0
1231
1400
  call_id = str(uuid.uuid4())[:8]
@@ -1237,8 +1406,11 @@ class PersistentVenvWorker(Worker):
1237
1406
  try:
1238
1407
  # Serialize kwargs
1239
1408
  if kwargs:
1409
+ print(f"[PersistentVenvWorker] serializing kwargs...", file=sys.stderr, flush=True)
1240
1410
  serialized_kwargs = _serialize_for_ipc(kwargs)
1411
+ print(f"[PersistentVenvWorker] saving to {inputs_path}...", file=sys.stderr, flush=True)
1241
1412
  torch.save(serialized_kwargs, str(inputs_path))
1413
+ print(f"[PersistentVenvWorker] saved inputs", file=sys.stderr, flush=True)
1242
1414
 
1243
1415
  # Send request with class info
1244
1416
  request = {
@@ -1250,7 +1422,9 @@ class PersistentVenvWorker(Worker):
1250
1422
  "inputs_path": str(inputs_path) if kwargs else None,
1251
1423
  "outputs_path": str(outputs_path),
1252
1424
  }
1425
+ print(f"[PersistentVenvWorker] sending request via socket...", file=sys.stderr, flush=True)
1253
1426
  response = self._send_request(request, timeout)
1427
+ print(f"[PersistentVenvWorker] got response: {response.get('status')}", file=sys.stderr, flush=True)
1254
1428
 
1255
1429
  if response.get("status") == "error":
1256
1430
  raise WorkerError(
File without changes
File without changes
File without changes