comfy-env 0.1.8__tar.gz → 0.1.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. {comfy_env-0.1.8 → comfy_env-0.1.10}/PKG-INFO +2 -1
  2. {comfy_env-0.1.8 → comfy_env-0.1.10}/pyproject.toml +2 -1
  3. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/isolation/wrap.py +40 -3
  4. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/nodes.py +28 -0
  5. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/core.py +128 -14
  6. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/prestartup.py +13 -2
  7. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/workers/mp.py +58 -0
  8. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/workers/subprocess.py +25 -7
  9. {comfy_env-0.1.8 → comfy_env-0.1.10}/.github/workflows/ci.yml +0 -0
  10. {comfy_env-0.1.8 → comfy_env-0.1.10}/.github/workflows/publish.yml +0 -0
  11. {comfy_env-0.1.8 → comfy_env-0.1.10}/.gitignore +0 -0
  12. {comfy_env-0.1.8 → comfy_env-0.1.10}/LICENSE +0 -0
  13. {comfy_env-0.1.8 → comfy_env-0.1.10}/README.md +0 -0
  14. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/__init__.py +0 -0
  15. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/cli.py +0 -0
  16. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/config/__init__.py +0 -0
  17. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/config/parser.py +0 -0
  18. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/config/types.py +0 -0
  19. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/errors.py +0 -0
  20. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/install.py +0 -0
  21. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/isolation/__init__.py +0 -0
  22. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/__init__.py +0 -0
  23. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/cuda_detection.py +0 -0
  24. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/platform/__init__.py +0 -0
  25. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/platform/base.py +0 -0
  26. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/platform/darwin.py +0 -0
  27. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/platform/linux.py +0 -0
  28. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/platform/windows.py +0 -0
  29. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/pixi/resolver.py +0 -0
  30. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/templates/comfy-env-instructions.txt +0 -0
  31. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/templates/comfy-env.toml +0 -0
  32. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/workers/__init__.py +0 -0
  33. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/workers/base.py +0 -0
  34. {comfy_env-0.1.8 → comfy_env-0.1.10}/src/comfy_env/workers/tensor_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: comfy-env
3
- Version: 0.1.8
3
+ Version: 0.1.10
4
4
  Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
5
5
  Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
6
6
  Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3.11
17
17
  Classifier: Programming Language :: Python :: 3.12
18
18
  Classifier: Programming Language :: Python :: 3.13
19
19
  Requires-Python: >=3.10
20
+ Requires-Dist: pip>=21.0
20
21
  Requires-Dist: tomli-w>=1.0.0
21
22
  Requires-Dist: tomli>=2.0.0; python_version < '3.11'
22
23
  Requires-Dist: uv>=0.4.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "comfy-env"
3
- version = "0.1.8"
3
+ version = "0.1.10"
4
4
  description = "Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation"
5
5
  readme = "README.md"
6
6
  license = {text = "MIT"}
@@ -22,6 +22,7 @@ dependencies = [
22
22
  "tomli>=2.0.0; python_version < '3.11'", # TOML parsing (built-in tomllib for 3.11+)
23
23
  "tomli-w>=1.0.0", # TOML writing (no stdlib equivalent)
24
24
  "uv>=0.4.0", # Fast Python package installer and venv creator
25
+ "pip>=21.0", # Fallback package installer
25
26
  ]
26
27
 
27
28
  [project.optional-dependencies]
@@ -34,6 +34,12 @@ from typing import Any, Dict, Optional
34
34
  # Debug logging (set COMFY_ENV_DEBUG=1 to enable)
35
35
  _DEBUG = os.environ.get("COMFY_ENV_DEBUG", "").lower() in ("1", "true", "yes")
36
36
 
37
+
38
+ def get_env_name(dir_name: str) -> str:
39
+ """Convert directory name to env name: ComfyUI-UniRig → _env_unirig"""
40
+ name = dir_name.lower().replace("-", "_").lstrip("comfyui_")
41
+ return f"_env_{name}"
42
+
37
43
  # Global worker cache (one per isolated environment)
38
44
  _workers: Dict[str, Any] = {}
39
45
  _workers_lock = threading.Lock()
@@ -144,13 +150,27 @@ def _find_env_paths(node_dir: Path) -> tuple[Optional[Path], Optional[Path]]:
144
150
  """
145
151
  import glob
146
152
 
147
- # Check pixi environment first
153
+ # Check _env_<name> directory first (new pattern)
154
+ env_name = get_env_name(node_dir.name)
155
+ env_dir = node_dir / env_name
156
+ if env_dir.exists():
157
+ if sys.platform == "win32":
158
+ site_packages = env_dir / "Lib" / "site-packages"
159
+ lib_dir = env_dir / "Library" / "bin"
160
+ else:
161
+ pattern = str(env_dir / "lib" / "python*" / "site-packages")
162
+ matches = glob.glob(pattern)
163
+ site_packages = Path(matches[0]) if matches else None
164
+ lib_dir = env_dir / "lib"
165
+ if site_packages and site_packages.exists():
166
+ return site_packages, lib_dir if lib_dir.exists() else None
167
+
168
+ # Fallback: Check old .pixi/envs/default (for backward compat)
148
169
  pixi_env = node_dir / ".pixi" / "envs" / "default"
149
170
  if pixi_env.exists():
150
- # Find site-packages (pythonX.Y varies)
151
171
  if sys.platform == "win32":
152
172
  site_packages = pixi_env / "Lib" / "site-packages"
153
- lib_dir = pixi_env / "Library" / "bin" # Windows DLLs
173
+ lib_dir = pixi_env / "Library" / "bin"
154
174
  else:
155
175
  pattern = str(pixi_env / "lib" / "python*" / "site-packages")
156
176
  matches = glob.glob(pattern)
@@ -176,6 +196,12 @@ def _find_env_paths(node_dir: Path) -> tuple[Optional[Path], Optional[Path]]:
176
196
 
177
197
  def _find_env_dir(node_dir: Path) -> Optional[Path]:
178
198
  """Find the environment directory (for cache key)."""
199
+ # Check _env_<name> first
200
+ env_name = get_env_name(node_dir.name)
201
+ env_dir = node_dir / env_name
202
+ if env_dir.exists():
203
+ return env_dir
204
+ # Fallback to old paths
179
205
  pixi_env = node_dir / ".pixi" / "envs" / "default"
180
206
  if pixi_env.exists():
181
207
  return pixi_env
@@ -319,6 +345,13 @@ def wrap_isolated_nodes(
319
345
  if os.environ.get("COMFYUI_ISOLATION_WORKER") == "1":
320
346
  return node_class_mappings
321
347
 
348
+ # Get ComfyUI base path from folder_paths (canonical source)
349
+ try:
350
+ import folder_paths
351
+ comfyui_base = folder_paths.base_path
352
+ except ImportError:
353
+ comfyui_base = None
354
+
322
355
  nodes_dir = Path(nodes_dir).resolve()
323
356
 
324
357
  # Check for comfy-env.toml
@@ -341,6 +374,10 @@ def wrap_isolated_nodes(
341
374
  except Exception:
342
375
  pass # Ignore errors reading config
343
376
 
377
+ # Set COMFYUI_BASE for worker to find ComfyUI modules
378
+ if comfyui_base:
379
+ env_vars["COMFYUI_BASE"] = str(comfyui_base)
380
+
344
381
  # Find environment directory and paths
345
382
  env_dir = _find_env_dir(nodes_dir)
346
383
  site_packages, lib_dir = _find_env_paths(nodes_dir)
@@ -71,6 +71,31 @@ def clone_node(
71
71
  return node_path
72
72
 
73
73
 
74
+ def install_requirements(
75
+ node_dir: Path,
76
+ log: Callable[[str], None],
77
+ ) -> None:
78
+ """
79
+ Install requirements.txt in a node directory if it exists.
80
+
81
+ Args:
82
+ node_dir: Path to the node directory
83
+ log: Logging callback
84
+ """
85
+ requirements_file = node_dir / "requirements.txt"
86
+
87
+ if requirements_file.exists():
88
+ log(f" Installing requirements for {node_dir.name}...")
89
+ result = subprocess.run(
90
+ ["uv", "pip", "install", "-r", str(requirements_file), "--python", sys.executable],
91
+ cwd=node_dir,
92
+ capture_output=True,
93
+ text=True,
94
+ )
95
+ if result.returncode != 0:
96
+ log(f" Warning: requirements.txt install failed for {node_dir.name}: {result.stderr.strip()[:200]}")
97
+
98
+
74
99
  def run_install_script(
75
100
  node_dir: Path,
76
101
  log: Callable[[str], None],
@@ -132,6 +157,9 @@ def install_node_deps(
132
157
  # Clone the repository
133
158
  clone_node(req.repo, req.name, custom_nodes_dir, log)
134
159
 
160
+ # Install requirements.txt if present
161
+ install_requirements(node_path, log)
162
+
135
163
  # Run install.py if present
136
164
  run_install_script(node_path, log)
137
165
 
@@ -40,10 +40,87 @@ CUDA_TORCH_MAP = {
40
40
  "12.1": "2.4",
41
41
  }
42
42
 
43
+ def find_wheel_url(
44
+ package: str,
45
+ torch_version: str,
46
+ cuda_version: str,
47
+ python_version: str,
48
+ ) -> Optional[str]:
49
+ """
50
+ Query cuda-wheels index and return the direct URL for the matching wheel.
51
+
52
+ This bypasses pip's version validation by providing a direct URL,
53
+ which is necessary for wheels where the filename has a local version
54
+ but the internal METADATA doesn't (e.g., flash-attn from mjun0812).
55
+
56
+ Args:
57
+ package: Package name (e.g., "flash-attn")
58
+ torch_version: PyTorch version (e.g., "2.8")
59
+ cuda_version: CUDA version (e.g., "12.8")
60
+ python_version: Python version (e.g., "3.10")
61
+
62
+ Returns:
63
+ Direct URL to the wheel file, or None if no match found.
64
+ """
65
+ cuda_short = cuda_version.replace(".", "")[:3] # "12.8" -> "128"
66
+ torch_short = torch_version.replace(".", "")[:2] # "2.8" -> "28"
67
+ py_tag = f"cp{python_version.replace('.', '')}" # "3.10" -> "cp310"
68
+
69
+ # Platform tag for current system
70
+ if sys.platform == "linux":
71
+ platform_tag = "linux_x86_64"
72
+ elif sys.platform == "win32":
73
+ platform_tag = "win_amd64"
74
+ else:
75
+ platform_tag = None # macOS doesn't typically have CUDA wheels
76
+
77
+ # Local version patterns to match:
78
+ # cuda-wheels style: +cu128torch28
79
+ # PyG style: +pt28cu128
80
+ local_patterns = [
81
+ f"+cu{cuda_short}torch{torch_short}", # cuda-wheels style
82
+ f"+pt{torch_short}cu{cuda_short}", # PyG style
83
+ ]
84
+
85
+ pkg_variants = [package, package.replace("-", "_"), package.replace("_", "-")]
86
+
87
+ for pkg_dir in pkg_variants:
88
+ index_url = f"{CUDA_WHEELS_INDEX}{pkg_dir}/"
89
+ try:
90
+ with urllib.request.urlopen(index_url, timeout=10) as resp:
91
+ html = resp.read().decode("utf-8")
92
+ except Exception:
93
+ continue
94
+
95
+ # Parse href and display name from HTML: <a href="URL">DISPLAY_NAME</a>
96
+ link_pattern = re.compile(r'href="([^"]+\.whl)"[^>]*>([^<]+)</a>', re.IGNORECASE)
97
+
98
+ for match in link_pattern.finditer(html):
99
+ wheel_url = match.group(1)
100
+ display_name = match.group(2)
101
+
102
+ # Match on display name (has normalized torch28 format)
103
+ matches_cuda_torch = any(p in display_name for p in local_patterns)
104
+ matches_python = py_tag in display_name
105
+ matches_platform = platform_tag is None or platform_tag in display_name
106
+
107
+ if matches_cuda_torch and matches_python and matches_platform:
108
+ # Return absolute URL
109
+ if wheel_url.startswith("http"):
110
+ return wheel_url
111
+ # Relative URL - construct absolute
112
+ return f"{CUDA_WHEELS_INDEX}{pkg_dir}/{wheel_url}"
113
+
114
+ return None
115
+
116
+
43
117
  def find_matching_wheel(package: str, torch_version: str, cuda_version: str) -> Optional[str]:
44
118
  """
45
119
  Query cuda-wheels index to find a wheel matching the CUDA/torch version.
46
120
  Returns the full version spec (e.g., "flash-attn===2.8.3+cu128torch2.8") or None.
121
+
122
+ Note: This is used as a fallback for packages with correct wheel metadata.
123
+ For packages with mismatched metadata (like flash-attn), use find_wheel_url() instead.
47
124
  """
48
125
  cuda_short = cuda_version.replace(".", "")[:3] # "12.8" -> "128"
49
126
  torch_short = torch_version.replace(".", "")[:2] # "2.8" -> "28"
@@ -67,11 +144,11 @@ def find_matching_wheel(package: str, torch_version: str, cuda_version: str) ->
67
144
  )
68
145
 
69
146
  # Local version patterns to match:
70
- # flash-attn style: +cu128torch2.8
147
+ # cuda-wheels style: +cu128torch28
71
148
  # PyG style: +pt28cu128
72
149
  local_patterns = [
73
- f"+cu{cuda_short}torch{torch_version}", # flash-attn style
74
- f"+pt{torch_short}cu{cuda_short}", # PyG style
150
+ f"+cu{cuda_short}torch{torch_short}", # cuda-wheels style
151
+ f"+pt{torch_short}cu{cuda_short}", # PyG style
75
152
  ]
76
153
 
77
154
  best_match = None
@@ -204,6 +281,12 @@ def ensure_pixi(
204
281
  return pixi_path
205
282
 
206
283
 
284
+ def get_env_name(dir_name: str) -> str:
285
+ """Convert directory name to env name: ComfyUI-UniRig → _env_unirig"""
286
+ name = dir_name.lower().replace("-", "_").lstrip("comfyui_")
287
+ return f"_env_{name}"
288
+
289
+
207
290
  def clean_pixi_artifacts(node_dir: Path, log: Callable[[str], None] = print) -> None:
208
291
  """Remove previous pixi installation artifacts."""
209
292
  for path in [node_dir / "pixi.toml", node_dir / "pixi.lock"]:
@@ -212,11 +295,21 @@ def clean_pixi_artifacts(node_dir: Path, log: Callable[[str], None] = print) ->
212
295
  pixi_dir = node_dir / ".pixi"
213
296
  if pixi_dir.exists():
214
297
  shutil.rmtree(pixi_dir)
298
+ # Also clean old _env_* directories
299
+ env_name = get_env_name(node_dir.name)
300
+ env_dir = node_dir / env_name
301
+ if env_dir.exists():
302
+ shutil.rmtree(env_dir)
215
303
 
216
304
 
217
305
  def get_pixi_python(node_dir: Path) -> Optional[Path]:
218
306
  """Get path to Python in the pixi environment."""
219
- env_dir = node_dir / ".pixi" / "envs" / "default"
307
+ # Check new _env_<name> location first
308
+ env_name = get_env_name(node_dir.name)
309
+ env_dir = node_dir / env_name
310
+ if not env_dir.exists():
311
+ # Fallback to old .pixi path
312
+ env_dir = node_dir / ".pixi" / "envs" / "default"
220
313
  if sys.platform == "win32":
221
314
  python_path = env_dir / "python.exe"
222
315
  else:
@@ -394,29 +487,35 @@ def pixi_install(
394
487
  log(f"pixi install failed:\n{result.stderr}")
395
488
  raise RuntimeError(f"pixi install failed: {result.stderr}")
396
489
 
397
- # Install CUDA packages with --find-links (searches all known sources)
490
+ # Install CUDA packages via direct URL or find-links fallback
398
491
  if cfg.cuda_packages and cuda_version:
399
492
  log(f"Installing CUDA packages: {cfg.cuda_packages}")
400
493
  python_path = get_pixi_python(node_dir)
401
494
  if not python_path:
402
495
  raise RuntimeError("Could not find Python in pixi environment")
403
496
 
497
+ # Get Python version from the pixi environment
498
+ py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
499
+
404
500
  for package in cfg.cuda_packages:
501
+ # Find direct wheel URL (bypasses metadata validation)
502
+ wheel_url = find_wheel_url(package, torch_version, cuda_version, py_version)
503
+
504
+ if not wheel_url:
505
+ raise RuntimeError(
506
+ f"No wheel found for {package} with CUDA {cuda_version}, "
507
+ f"torch {torch_version}, Python {py_version}. "
508
+ f"Check cuda-wheels index."
509
+ )
510
+
511
+ log(f" Installing {package} from {wheel_url}")
405
512
  pip_cmd = [
406
513
  str(python_path), "-m", "pip", "install",
407
- "--no-index", # Only use --find-links, don't query PyPI
408
514
  "--no-deps",
409
515
  "--no-cache-dir",
516
+ wheel_url,
410
517
  ]
411
518
 
412
- # Add all find-links sources
413
- for url in get_all_find_links(package, torch_version, cuda_version):
414
- pip_cmd.extend(["--find-links", url])
415
-
416
- # Get package spec with local version for CUDA/torch compatibility
417
- pkg_spec = get_package_spec(package, torch_version, cuda_version)
418
- log(f" Installing {pkg_spec}")
419
- pip_cmd.append(pkg_spec)
420
519
  result = subprocess.run(pip_cmd, capture_output=True, text=True)
421
520
  if result.returncode != 0:
422
521
  log(f"CUDA package install failed for {package}:\n{result.stderr}")
@@ -424,5 +523,20 @@ def pixi_install(
424
523
 
425
524
  log("CUDA packages installed")
426
525
 
526
+ # Move environment from .pixi/envs/default to _env_<name>
527
+ old_env = node_dir / ".pixi" / "envs" / "default"
528
+ env_name = get_env_name(node_dir.name)
529
+ new_env = node_dir / env_name
530
+
531
+ if old_env.exists():
532
+ if new_env.exists():
533
+ shutil.rmtree(new_env) # Clean old env
534
+ shutil.move(str(old_env), str(new_env))
535
+ # Clean up .pixi directory (keep pixi.toml and pixi.lock)
536
+ pixi_dir = node_dir / ".pixi"
537
+ if pixi_dir.exists():
538
+ shutil.rmtree(pixi_dir)
539
+ log(f"Moved environment to {new_env}")
540
+
427
541
  log("Installation complete!")
428
542
  return True
@@ -11,6 +11,12 @@ from pathlib import Path
11
11
  from typing import Optional, Dict
12
12
 
13
13
 
14
+ def get_env_name(dir_name: str) -> str:
15
+ """Convert directory name to env name: ComfyUI-UniRig → _env_unirig"""
16
+ name = dir_name.lower().replace("-", "_").lstrip("comfyui_")
17
+ return f"_env_{name}"
18
+
19
+
14
20
  def _load_env_vars(config_path: str) -> Dict[str, str]:
15
21
  """
16
22
  Load [env_vars] section from comfy-env.toml.
@@ -121,10 +127,15 @@ def setup_env(node_dir: Optional[str] = None) -> None:
121
127
  for key, value in env_vars.items():
122
128
  os.environ[key] = value
123
129
 
124
- pixi_env = os.path.join(node_dir, ".pixi", "envs", "default")
130
+ # Check _env_<name> first, then fallback to old .pixi path
131
+ env_name = get_env_name(os.path.basename(node_dir))
132
+ pixi_env = os.path.join(node_dir, env_name)
125
133
 
126
134
  if not os.path.exists(pixi_env):
127
- return # No pixi environment
135
+ # Fallback to old .pixi path
136
+ pixi_env = os.path.join(node_dir, ".pixi", "envs", "default")
137
+ if not os.path.exists(pixi_env):
138
+ return # No environment found
128
139
 
129
140
  if sys.platform == "win32":
130
141
  # Windows: add to PATH for DLL loading
@@ -40,6 +40,53 @@ _SHUTDOWN = object()
40
40
  _CALL_METHOD = "call_method"
41
41
 
42
42
 
43
+ # ---------------------------------------------------------------------------
44
+ # Tensor file transfer - avoids CUDA IPC issues with cudaMallocAsync
45
+ # ---------------------------------------------------------------------------
46
+
47
+ def _save_tensors_to_files(obj, file_registry=None):
48
+ """Recursively save torch tensors to temp files for IPC."""
49
+ if file_registry is None:
50
+ file_registry = []
51
+
52
+ try:
53
+ import torch
54
+ if isinstance(obj, torch.Tensor):
55
+ import tempfile
56
+ f = tempfile.NamedTemporaryFile(suffix='.pt', delete=False)
57
+ torch.save(obj.cpu(), f.name) # Always save as CPU tensor
58
+ f.close()
59
+ file_registry.append(f.name)
60
+ return {"__tensor_file__": f.name, "dtype": str(obj.dtype), "device": str(obj.device)}
61
+ except ImportError:
62
+ pass
63
+
64
+ if isinstance(obj, dict):
65
+ return {k: _save_tensors_to_files(v, file_registry) for k, v in obj.items()}
66
+ elif isinstance(obj, list):
67
+ return [_save_tensors_to_files(v, file_registry) for v in obj]
68
+ elif isinstance(obj, tuple):
69
+ return tuple(_save_tensors_to_files(v, file_registry) for v in obj)
70
+ return obj
71
+
72
+
73
+ def _load_tensors_from_files(obj):
74
+ """Recursively load torch tensors from temp files."""
75
+ if isinstance(obj, dict):
76
+ if "__tensor_file__" in obj:
77
+ import os
78
+ import torch
79
+ tensor = torch.load(obj["__tensor_file__"], weights_only=True)
80
+ os.unlink(obj["__tensor_file__"]) # Cleanup temp file
81
+ return tensor
82
+ return {k: _load_tensors_from_files(v) for k, v in obj.items()}
83
+ elif isinstance(obj, list):
84
+ return [_load_tensors_from_files(v) for v in obj]
85
+ elif isinstance(obj, tuple):
86
+ return tuple(_load_tensors_from_files(v) for v in obj)
87
+ return obj
88
+
89
+
43
90
  def _dump_worker_env(worker_name: str = "unknown", print_to_terminal: bool = False):
44
91
  """Dump worker environment to .comfy-env/logs/ (always) and optionally print."""
45
92
  import json
@@ -205,14 +252,20 @@ def _worker_loop(queue_in, queue_out, sys_path_additions=None, lib_path=None, en
205
252
  # Handle method call protocol
206
253
  if isinstance(item, tuple) and len(item) == 6 and item[0] == _CALL_METHOD:
207
254
  _, module_name, class_name, method_name, self_state, kwargs = item
255
+ # Load tensors from files (saved by host to avoid cudaMallocAsync IPC issues)
256
+ kwargs = _load_tensors_from_files(kwargs)
208
257
  result = _execute_method_call(
209
258
  module_name, class_name, method_name, self_state, kwargs
210
259
  )
260
+ # Save tensors to files to avoid CUDA IPC issues with cudaMallocAsync
261
+ result = _save_tensors_to_files(result)
211
262
  queue_out.put(("ok", result))
212
263
  else:
213
264
  # Direct function call (legacy)
214
265
  func, args, kwargs = item
215
266
  result = func(*args, **kwargs)
267
+ # Save tensors to files to avoid CUDA IPC issues with cudaMallocAsync
268
+ result = _save_tensors_to_files(result)
216
269
  queue_out.put(("ok", result))
217
270
 
218
271
  except Exception as e:
@@ -646,6 +699,9 @@ class MPWorker(Worker):
646
699
  """
647
700
  self._ensure_started()
648
701
 
702
+ # Save tensors to files to avoid CUDA IPC issues with cudaMallocAsync
703
+ kwargs = _save_tensors_to_files(kwargs)
704
+
649
705
  # Send method call request using protocol
650
706
  self._queue_in.put((
651
707
  _CALL_METHOD,
@@ -672,6 +728,8 @@ class MPWorker(Worker):
672
728
 
673
729
  # Handle response
674
730
  if status == "ok":
731
+ # Load tensors from temp files
732
+ result = _load_tensors_from_files(result)
675
733
  return result
676
734
  elif status == "error":
677
735
  msg, tb = result
@@ -221,10 +221,12 @@ def _to_shm(obj, registry, visited=None):
221
221
  visited[obj_id] = result
222
222
  return result
223
223
 
224
- # torch.Tensor → convert to numpy → shared memory
224
+ # torch.Tensor → convert to numpy → shared memory (with marker to restore type)
225
225
  if t == 'Tensor':
226
226
  arr = obj.detach().cpu().numpy()
227
- return _to_shm(arr, registry, visited)
227
+ result = _to_shm(arr, registry, visited)
228
+ result["__was_tensor__"] = True
229
+ return result
228
230
 
229
231
  # trimesh.Trimesh → vertices + faces arrays → shared memory
230
232
  if t == 'Trimesh':
@@ -279,13 +281,17 @@ def _from_shm(obj, unlink=True):
279
281
  return [_from_shm(v, unlink) for v in obj]
280
282
  return obj
281
283
 
282
- # numpy array
284
+ # numpy array (or tensor that was converted to numpy)
283
285
  if "__shm_np__" in obj:
284
286
  block = shm.SharedMemory(name=obj["__shm_np__"])
285
287
  arr = np.ndarray(tuple(obj["shape"]), dtype=np.dtype(obj["dtype"]), buffer=block.buf).copy()
286
288
  block.close()
287
289
  if unlink:
288
290
  block.unlink()
291
+ # Convert back to tensor if it was originally a tensor
292
+ if obj.get("__was_tensor__"):
293
+ import torch
294
+ return torch.from_numpy(arr)
289
295
  return arr
290
296
 
291
297
  # trimesh
@@ -544,7 +550,9 @@ def _to_shm(obj, registry, visited=None):
544
550
 
545
551
  if t == 'Tensor':
546
552
  arr = obj.detach().cpu().numpy()
547
- return _to_shm(arr, registry, visited)
553
+ result = _to_shm(arr, registry, visited)
554
+ result["__was_tensor__"] = True
555
+ return result
548
556
 
549
557
  if t == 'Trimesh':
550
558
  verts = np.ascontiguousarray(obj.vertices, dtype=np.float64)
@@ -587,6 +595,10 @@ def _from_shm(obj):
587
595
  block = shm.SharedMemory(name=obj["__shm_np__"])
588
596
  arr = np.ndarray(tuple(obj["shape"]), dtype=np.dtype(obj["dtype"]), buffer=block.buf).copy()
589
597
  block.close()
598
+ # Convert back to tensor if it was originally a tensor
599
+ if obj.get("__was_tensor__"):
600
+ import torch
601
+ return torch.from_numpy(arr)
590
602
  return arr
591
603
  if "__shm_trimesh__" in obj:
592
604
  import trimesh
@@ -1044,15 +1056,21 @@ class SubprocessWorker(Worker):
1044
1056
 
1045
1057
  def _find_comfyui_base(self) -> Optional[Path]:
1046
1058
  """Find ComfyUI base directory."""
1047
- # Check common child directories (for test environments)
1048
- # Also check parent's children (isolated venv is sibling to .comfy-test-env)
1059
+ # Use folder_paths.base_path (canonical source) if available
1060
+ try:
1061
+ import folder_paths
1062
+ return Path(folder_paths.base_path)
1063
+ except ImportError:
1064
+ pass
1065
+
1066
+ # Fallback: Check common child directories (for test environments)
1049
1067
  for base in [self.working_dir, self.working_dir.parent]:
1050
1068
  for child in [".comfy-test-env/ComfyUI", "ComfyUI"]:
1051
1069
  candidate = base / child
1052
1070
  if (candidate / "main.py").exists() and (candidate / "comfy").exists():
1053
1071
  return candidate
1054
1072
 
1055
- # Walk up from working_dir (standard ComfyUI custom_nodes layout)
1073
+ # Fallback: Walk up from working_dir (standard ComfyUI custom_nodes layout)
1056
1074
  current = self.working_dir.resolve()
1057
1075
  for _ in range(10):
1058
1076
  if (current / "main.py").exists() and (current / "comfy").exists():
File without changes
File without changes
File without changes