comfy-env 0.0.54__tar.gz → 0.0.56__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {comfy_env-0.0.54 → comfy_env-0.0.56}/PKG-INFO +1 -1
- {comfy_env-0.0.54 → comfy_env-0.0.56}/pyproject.toml +1 -1
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/isolation.py +13 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/pixi.py +54 -23
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/stub_imports.py +61 -108
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/workers/venv.py +247 -49
- {comfy_env-0.0.54 → comfy_env-0.0.56}/.github/workflows/publish.yml +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/.gitignore +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/LICENSE +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/README.md +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/cli.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/decorator.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/config.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/config_file.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/cuda_gpu_detection.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/manager.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/platform/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/platform/base.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/platform/darwin.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/platform/linux.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/platform/windows.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/env/security.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/errors.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/install.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/bridge.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/protocol.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/tensor.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/torch_bridge.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/transport.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/ipc/worker.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/nodes.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/registry.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/resolver.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/stubs/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/stubs/comfy/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/stubs/comfy/model_management.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/stubs/comfy/utils.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/stubs/folder_paths.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/templates/comfy-env-instructions.txt +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/templates/comfy-env.toml +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/wheel_sources.yml +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/workers/__init__.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/workers/base.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/workers/pool.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/workers/tensor_utils.py +0 -0
- {comfy_env-0.0.54 → comfy_env-0.0.56}/src/comfy_env/workers/torch_mp.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: comfy-env
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.56
|
|
4
4
|
Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
|
|
5
5
|
Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
|
|
6
6
|
Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
|
|
@@ -30,6 +30,9 @@ from functools import wraps
|
|
|
30
30
|
from pathlib import Path
|
|
31
31
|
from typing import Any, Dict, Optional
|
|
32
32
|
|
|
33
|
+
# Debug logging (set COMFY_ENV_DEBUG=1 to enable)
|
|
34
|
+
_DEBUG = os.environ.get("COMFY_ENV_DEBUG", "").lower() in ("1", "true", "yes")
|
|
35
|
+
|
|
33
36
|
# Global worker cache (one per isolated environment)
|
|
34
37
|
_workers: Dict[str, Any] = {}
|
|
35
38
|
_workers_lock = threading.Lock()
|
|
@@ -158,7 +161,13 @@ def _wrap_node_class(
|
|
|
158
161
|
|
|
159
162
|
@wraps(original_method)
|
|
160
163
|
def proxy(self, **kwargs):
|
|
164
|
+
if _DEBUG:
|
|
165
|
+
print(f"[comfy-env] PROXY CALLED: {cls.__name__}.{func_name}", flush=True)
|
|
166
|
+
print(f"[comfy-env] kwargs keys: {list(kwargs.keys())}", flush=True)
|
|
167
|
+
|
|
161
168
|
worker = _get_worker(env_name, python_path, working_dir, sys_path)
|
|
169
|
+
if _DEBUG:
|
|
170
|
+
print(f"[comfy-env] worker alive: {worker.is_alive()}", flush=True)
|
|
162
171
|
|
|
163
172
|
# Clone tensors for IPC if needed
|
|
164
173
|
try:
|
|
@@ -168,6 +177,8 @@ def _wrap_node_class(
|
|
|
168
177
|
except ImportError:
|
|
169
178
|
pass # No torch available, skip cloning
|
|
170
179
|
|
|
180
|
+
if _DEBUG:
|
|
181
|
+
print(f"[comfy-env] calling worker.call_method...", flush=True)
|
|
171
182
|
result = worker.call_method(
|
|
172
183
|
module_name=module_name,
|
|
173
184
|
class_name=cls.__name__,
|
|
@@ -176,6 +187,8 @@ def _wrap_node_class(
|
|
|
176
187
|
kwargs=kwargs,
|
|
177
188
|
timeout=600.0,
|
|
178
189
|
)
|
|
190
|
+
if _DEBUG:
|
|
191
|
+
print(f"[comfy-env] call_method returned", flush=True)
|
|
179
192
|
|
|
180
193
|
# Clone result tensors
|
|
181
194
|
try:
|
|
@@ -253,19 +253,26 @@ def create_pixi_toml(
|
|
|
253
253
|
lines.append('libblas = { version = "*", build = "*mkl" }')
|
|
254
254
|
|
|
255
255
|
for pkg in conda.packages:
|
|
256
|
-
# Parse package spec (name=version or name>=version or just name)
|
|
257
|
-
if "
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
256
|
+
# Parse package spec (name=version or name>=version or name<version or just name)
|
|
257
|
+
if ">=" in pkg:
|
|
258
|
+
name, version = pkg.split(">=", 1)
|
|
259
|
+
lines.append(f'{name} = ">={version}"')
|
|
260
|
+
elif "<=" in pkg:
|
|
261
|
+
name, version = pkg.split("<=", 1)
|
|
262
|
+
lines.append(f'{name} = "<={version}"')
|
|
263
|
+
elif "==" in pkg:
|
|
264
|
+
name, version = pkg.split("==", 1)
|
|
265
|
+
lines.append(f'{name} = "=={version}"')
|
|
266
|
+
elif ">" in pkg:
|
|
267
|
+
name, version = pkg.split(">", 1)
|
|
268
|
+
lines.append(f'{name} = ">{version}"')
|
|
269
|
+
elif "<" in pkg:
|
|
270
|
+
name, version = pkg.split("<", 1)
|
|
271
|
+
lines.append(f'{name} = "<{version}"')
|
|
272
|
+
elif "=" in pkg and not pkg.startswith("="):
|
|
273
|
+
# Single = means exact version in conda
|
|
274
|
+
name, version = pkg.split("=", 1)
|
|
275
|
+
lines.append(f'{name} = "=={version}"')
|
|
269
276
|
else:
|
|
270
277
|
# No version, use any
|
|
271
278
|
lines.append(f'{pkg} = "*"')
|
|
@@ -282,16 +289,22 @@ def create_pixi_toml(
|
|
|
282
289
|
if local_wheels_dir:
|
|
283
290
|
local_wheels = list(Path(local_wheels_dir).glob("comfy_env-*.whl"))
|
|
284
291
|
if local_wheels:
|
|
285
|
-
#
|
|
286
|
-
|
|
287
|
-
|
|
292
|
+
# Copy wheel to node_dir (next to pixi.toml) for simple relative path
|
|
293
|
+
wheel_name = local_wheels[0].name
|
|
294
|
+
wheel_dest = node_dir / wheel_name
|
|
295
|
+
if not wheel_dest.exists():
|
|
296
|
+
shutil.copy(local_wheels[0], wheel_dest)
|
|
297
|
+
# Reference with simple relative path (forward slashes, no backslash issues)
|
|
298
|
+
special_deps["comfy-env"] = f'{{ path = "./{wheel_name}" }}'
|
|
288
299
|
else:
|
|
289
300
|
pypi_deps.append("comfy-env")
|
|
290
301
|
else:
|
|
291
302
|
# Check for local editable comfy-env at ~/utils/comfy-env
|
|
292
303
|
local_comfy_env = Path.home() / "utils" / "comfy-env"
|
|
293
304
|
if local_comfy_env.exists() and (local_comfy_env / "pyproject.toml").exists():
|
|
294
|
-
|
|
305
|
+
# Use forward slashes for TOML compatibility
|
|
306
|
+
path_str = local_comfy_env.as_posix()
|
|
307
|
+
special_deps["comfy-env"] = f'{{ path = "{path_str}", editable = true }}'
|
|
295
308
|
else:
|
|
296
309
|
pypi_deps.append("comfy-env")
|
|
297
310
|
|
|
@@ -544,20 +557,38 @@ def pixi_install(
|
|
|
544
557
|
|
|
545
558
|
log("pixi install completed successfully!")
|
|
546
559
|
|
|
547
|
-
# Create _env_{name}
|
|
560
|
+
# Create _env_{name} link for compatibility with uv backend
|
|
548
561
|
# This ensures code that expects _env_envname/bin/python works with pixi
|
|
549
562
|
symlink_path = node_dir / f"_env_{env_config.name}"
|
|
550
563
|
pixi_env_path = node_dir / ".pixi" / "envs" / "default"
|
|
551
564
|
|
|
552
565
|
if pixi_env_path.exists():
|
|
553
|
-
# Remove existing symlink or directory if present
|
|
554
|
-
if symlink_path.is_symlink():
|
|
555
|
-
|
|
566
|
+
# Remove existing symlink/junction or directory if present
|
|
567
|
+
if symlink_path.is_symlink() or (sys.platform == "win32" and symlink_path.is_dir()):
|
|
568
|
+
# On Windows, junctions appear as directories but can be removed with rmdir
|
|
569
|
+
try:
|
|
570
|
+
symlink_path.unlink()
|
|
571
|
+
except (OSError, PermissionError):
|
|
572
|
+
# Junction on Windows - remove with rmdir (doesn't delete contents)
|
|
573
|
+
subprocess.run(["cmd", "/c", "rmdir", str(symlink_path)], capture_output=True)
|
|
556
574
|
elif symlink_path.exists():
|
|
557
575
|
shutil.rmtree(symlink_path)
|
|
558
576
|
|
|
559
|
-
|
|
560
|
-
|
|
577
|
+
# On Windows, use directory junctions (no admin required) instead of symlinks
|
|
578
|
+
if sys.platform == "win32":
|
|
579
|
+
# mklink /J creates a directory junction (no admin privileges needed)
|
|
580
|
+
result = subprocess.run(
|
|
581
|
+
["cmd", "/c", "mklink", "/J", str(symlink_path), str(pixi_env_path)],
|
|
582
|
+
capture_output=True,
|
|
583
|
+
text=True
|
|
584
|
+
)
|
|
585
|
+
if result.returncode == 0:
|
|
586
|
+
log(f"Created junction: _env_{env_config.name} -> .pixi/envs/default")
|
|
587
|
+
else:
|
|
588
|
+
log(f"Warning: Failed to create junction: {result.stderr}")
|
|
589
|
+
else:
|
|
590
|
+
symlink_path.symlink_to(pixi_env_path)
|
|
591
|
+
log(f"Created symlink: _env_{env_config.name} -> .pixi/envs/default")
|
|
561
592
|
|
|
562
593
|
return True
|
|
563
594
|
|
|
@@ -5,9 +5,9 @@ This module provides automatic import stubbing for packages that exist only
|
|
|
5
5
|
in the isolated pixi environment, not in the host ComfyUI Python.
|
|
6
6
|
|
|
7
7
|
How it works:
|
|
8
|
-
1.
|
|
9
|
-
2. Look up
|
|
10
|
-
3.
|
|
8
|
+
1. Scan pixi environment's site-packages for installed packages
|
|
9
|
+
2. Look up import names from top_level.txt in .dist-info directories
|
|
10
|
+
3. Inject stub modules directly into sys.modules for missing packages
|
|
11
11
|
4. Stubs allow class definitions to parse without the real packages
|
|
12
12
|
5. Real packages are used when FUNCTION runs in the isolated worker
|
|
13
13
|
|
|
@@ -22,13 +22,16 @@ Usage:
|
|
|
22
22
|
import sys
|
|
23
23
|
import types
|
|
24
24
|
from pathlib import Path
|
|
25
|
-
from typing import
|
|
25
|
+
from typing import List, Set
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _log(msg: str) -> None:
|
|
29
|
+
"""Log with immediate flush to stderr (visible on Windows subprocess)."""
|
|
30
|
+
print(msg, file=sys.stderr, flush=True)
|
|
26
31
|
|
|
27
32
|
|
|
28
33
|
class _StubModule(types.ModuleType):
|
|
29
|
-
"""
|
|
30
|
-
A stub module that accepts any attribute access or call.
|
|
31
|
-
"""
|
|
34
|
+
"""A stub module that accepts any attribute access or call."""
|
|
32
35
|
|
|
33
36
|
def __init__(self, name: str):
|
|
34
37
|
super().__init__(name)
|
|
@@ -46,9 +49,7 @@ class _StubModule(types.ModuleType):
|
|
|
46
49
|
|
|
47
50
|
|
|
48
51
|
class _StubObject:
|
|
49
|
-
"""
|
|
50
|
-
A stub object that accepts any operation.
|
|
51
|
-
"""
|
|
52
|
+
"""A stub object that accepts any operation."""
|
|
52
53
|
|
|
53
54
|
def __init__(self, name: str = "stub"):
|
|
54
55
|
self._stub_name = name
|
|
@@ -99,57 +100,12 @@ class _StubObject:
|
|
|
99
100
|
def __contains__(self, item): return False
|
|
100
101
|
|
|
101
102
|
|
|
102
|
-
class _StubFinder:
|
|
103
|
-
"""Import hook finder that provides stub modules for specified packages."""
|
|
104
|
-
|
|
105
|
-
def __init__(self, stub_packages: Set[str]):
|
|
106
|
-
self.stub_packages = stub_packages
|
|
107
|
-
|
|
108
|
-
def find_module(self, fullname: str, path=None):
|
|
109
|
-
top_level = fullname.split('.')[0]
|
|
110
|
-
if top_level in self.stub_packages:
|
|
111
|
-
return _StubLoader(self.stub_packages)
|
|
112
|
-
return None
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
class _StubLoader:
|
|
116
|
-
"""Import hook loader that creates stub modules."""
|
|
117
|
-
|
|
118
|
-
def __init__(self, stub_packages: Set[str]):
|
|
119
|
-
self.stub_packages = stub_packages
|
|
120
|
-
|
|
121
|
-
def load_module(self, fullname: str):
|
|
122
|
-
if fullname in sys.modules:
|
|
123
|
-
return sys.modules[fullname]
|
|
124
|
-
|
|
125
|
-
module = _StubModule(fullname)
|
|
126
|
-
module.__loader__ = self
|
|
127
|
-
|
|
128
|
-
if '.' in fullname:
|
|
129
|
-
parent = fullname.rsplit('.', 1)[0]
|
|
130
|
-
module.__package__ = parent
|
|
131
|
-
if parent not in sys.modules:
|
|
132
|
-
self.load_module(parent)
|
|
133
|
-
else:
|
|
134
|
-
module.__package__ = fullname
|
|
135
|
-
|
|
136
|
-
sys.modules[fullname] = module
|
|
137
|
-
return module
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
def _normalize_package_name(name: str) -> str:
|
|
141
|
-
"""Normalize package name for comparison (PEP 503)."""
|
|
142
|
-
return name.lower().replace('-', '_').replace('.', '_')
|
|
143
|
-
|
|
144
|
-
|
|
145
103
|
def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
|
|
146
104
|
"""
|
|
147
|
-
Get import names
|
|
105
|
+
Get import names from pixi environment using top_level.txt metadata.
|
|
148
106
|
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
2. .py files (single-file modules)
|
|
152
|
-
3. .so/.pyd files (extension modules)
|
|
107
|
+
This properly maps package names to import names (e.g., libigl -> igl,
|
|
108
|
+
PyYAML -> yaml) by reading the canonical top_level.txt files.
|
|
153
109
|
|
|
154
110
|
Returns:
|
|
155
111
|
Set of import names that should be stubbed.
|
|
@@ -159,16 +115,11 @@ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
|
|
|
159
115
|
pixi_base = node_dir / ".pixi" / "envs" / "default"
|
|
160
116
|
|
|
161
117
|
# Find site-packages (different paths on Windows vs Linux)
|
|
162
|
-
# Linux: .pixi/envs/default/lib/python3.x/site-packages
|
|
163
|
-
# Windows: .pixi/envs/default/Lib/site-packages
|
|
164
118
|
site_packages = None
|
|
165
|
-
|
|
166
|
-
# Try Windows path first (Lib/site-packages)
|
|
167
119
|
win_site = pixi_base / "Lib" / "site-packages"
|
|
168
120
|
if win_site.exists():
|
|
169
121
|
site_packages = win_site
|
|
170
122
|
else:
|
|
171
|
-
# Try Linux path (lib/python3.x/site-packages)
|
|
172
123
|
pixi_lib = pixi_base / "lib"
|
|
173
124
|
if pixi_lib.exists():
|
|
174
125
|
python_dirs = list(pixi_lib.glob("python3.*"))
|
|
@@ -178,25 +129,44 @@ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
|
|
|
178
129
|
if site_packages is None or not site_packages.exists():
|
|
179
130
|
return import_names
|
|
180
131
|
|
|
181
|
-
|
|
132
|
+
_log(f"[comfy-env] Scanning: {site_packages}")
|
|
133
|
+
|
|
134
|
+
# PRIMARY: Read top_level.txt from all .dist-info directories
|
|
135
|
+
for dist_info in site_packages.glob("*.dist-info"):
|
|
136
|
+
top_level_file = dist_info / "top_level.txt"
|
|
137
|
+
if top_level_file.exists():
|
|
138
|
+
try:
|
|
139
|
+
for line in top_level_file.read_text(encoding="utf-8").splitlines():
|
|
140
|
+
name = line.strip()
|
|
141
|
+
if name and not name.startswith('#'):
|
|
142
|
+
# Extract just the top-level name
|
|
143
|
+
top_name = name.replace('\\', '/').split('/')[0]
|
|
144
|
+
if top_name:
|
|
145
|
+
import_names.add(top_name)
|
|
146
|
+
except Exception:
|
|
147
|
+
pass
|
|
148
|
+
|
|
149
|
+
# FALLBACK: Scan for packages/modules not covered by dist-info
|
|
182
150
|
for item in site_packages.iterdir():
|
|
183
151
|
name = item.name
|
|
184
152
|
|
|
185
|
-
# Skip private/internal items
|
|
186
153
|
if name.startswith('_') or name.startswith('.'):
|
|
187
154
|
continue
|
|
188
|
-
|
|
189
|
-
# Skip dist-info and egg-info directories
|
|
190
155
|
if name.endswith('.dist-info') or name.endswith('.egg-info'):
|
|
191
156
|
continue
|
|
192
|
-
|
|
193
|
-
# Skip common non-module items
|
|
194
157
|
if name in {'bin', 'share', 'include', 'etc'}:
|
|
195
158
|
continue
|
|
196
159
|
|
|
197
160
|
# Package directory (has __init__.py)
|
|
161
|
+
if item.is_dir() and (item / "__init__.py").exists():
|
|
162
|
+
import_names.add(name)
|
|
163
|
+
continue
|
|
164
|
+
|
|
165
|
+
# Namespace package (directory without __init__.py but has submodules)
|
|
198
166
|
if item.is_dir():
|
|
199
|
-
|
|
167
|
+
has_py = any(item.glob("*.py"))
|
|
168
|
+
has_subpkg = any((item / d / "__init__.py").exists() for d in item.iterdir() if d.is_dir())
|
|
169
|
+
if has_py or has_subpkg:
|
|
200
170
|
import_names.add(name)
|
|
201
171
|
continue
|
|
202
172
|
|
|
@@ -206,11 +176,9 @@ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
|
|
|
206
176
|
continue
|
|
207
177
|
|
|
208
178
|
# Extension module (.so on Linux, .pyd on Windows)
|
|
209
|
-
if
|
|
210
|
-
# Extract module name: foo.cpython-311-x86_64-linux-gnu.so -> foo
|
|
179
|
+
if name.endswith('.so') or name.endswith('.pyd'):
|
|
211
180
|
module_name = name.split('.')[0]
|
|
212
181
|
import_names.add(module_name)
|
|
213
|
-
continue
|
|
214
182
|
|
|
215
183
|
return import_names
|
|
216
184
|
|
|
@@ -230,14 +198,14 @@ def _filter_to_missing(import_names: Set[str]) -> Set[str]:
|
|
|
230
198
|
except ImportError:
|
|
231
199
|
missing.add(name)
|
|
232
200
|
except Exception:
|
|
233
|
-
# Other errors
|
|
234
|
-
|
|
201
|
+
# Other errors (DLL load, etc.) - stub these too
|
|
202
|
+
missing.add(name)
|
|
235
203
|
|
|
236
204
|
return missing
|
|
237
205
|
|
|
238
206
|
|
|
239
|
-
# Track
|
|
240
|
-
|
|
207
|
+
# Track what we stubbed for cleanup
|
|
208
|
+
_stubbed_modules: Set[str] = set()
|
|
241
209
|
|
|
242
210
|
|
|
243
211
|
def setup_isolated_imports(init_file: str) -> List[str]:
|
|
@@ -258,7 +226,7 @@ def setup_isolated_imports(init_file: str) -> List[str]:
|
|
|
258
226
|
|
|
259
227
|
from .nodes import NODE_CLASS_MAPPINGS # Now works!
|
|
260
228
|
"""
|
|
261
|
-
global
|
|
229
|
+
global _stubbed_modules
|
|
262
230
|
|
|
263
231
|
node_dir = Path(init_file).resolve().parent
|
|
264
232
|
|
|
@@ -266,52 +234,37 @@ def setup_isolated_imports(init_file: str) -> List[str]:
|
|
|
266
234
|
pixi_imports = _get_import_names_from_pixi(node_dir)
|
|
267
235
|
|
|
268
236
|
if not pixi_imports:
|
|
269
|
-
|
|
237
|
+
_log("[comfy-env] No pixi environment found")
|
|
270
238
|
return []
|
|
271
239
|
|
|
272
240
|
# Filter to only those missing in host
|
|
273
241
|
missing = _filter_to_missing(pixi_imports)
|
|
274
242
|
|
|
275
243
|
if not missing:
|
|
276
|
-
|
|
244
|
+
_log("[comfy-env] All packages available in host")
|
|
277
245
|
return []
|
|
278
246
|
|
|
279
|
-
#
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
sys.
|
|
283
|
-
|
|
284
|
-
pass
|
|
285
|
-
|
|
286
|
-
# Register new finder
|
|
287
|
-
_stub_finder = _StubFinder(missing)
|
|
288
|
-
sys.meta_path.insert(0, _stub_finder)
|
|
247
|
+
# Direct injection into sys.modules - simple and reliable
|
|
248
|
+
for name in missing:
|
|
249
|
+
if name not in sys.modules:
|
|
250
|
+
sys.modules[name] = _StubModule(name)
|
|
251
|
+
_stubbed_modules.add(name)
|
|
289
252
|
|
|
290
|
-
stubbed = sorted(
|
|
253
|
+
stubbed = sorted(_stubbed_modules)
|
|
291
254
|
if len(stubbed) <= 10:
|
|
292
|
-
|
|
255
|
+
_log(f"[comfy-env] Injected {len(stubbed)} stubs: {', '.join(stubbed)}")
|
|
293
256
|
else:
|
|
294
|
-
|
|
257
|
+
_log(f"[comfy-env] Injected {len(stubbed)} stubs: {', '.join(stubbed[:10])}... +{len(stubbed)-10} more")
|
|
295
258
|
|
|
296
259
|
return stubbed
|
|
297
260
|
|
|
298
261
|
|
|
299
262
|
def cleanup_stubs():
|
|
300
|
-
"""Remove
|
|
301
|
-
global
|
|
263
|
+
"""Remove injected stub modules from sys.modules."""
|
|
264
|
+
global _stubbed_modules
|
|
302
265
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
sys.meta_path.remove(_stub_finder)
|
|
306
|
-
except ValueError:
|
|
307
|
-
pass
|
|
308
|
-
|
|
309
|
-
# Remove stubbed modules from sys.modules
|
|
310
|
-
to_remove = [
|
|
311
|
-
name for name in sys.modules
|
|
312
|
-
if isinstance(sys.modules[name], _StubModule)
|
|
313
|
-
]
|
|
314
|
-
for name in to_remove:
|
|
266
|
+
for name in list(_stubbed_modules):
|
|
267
|
+
if name in sys.modules and isinstance(sys.modules[name], _StubModule):
|
|
315
268
|
del sys.modules[name]
|
|
316
269
|
|
|
317
|
-
|
|
270
|
+
_stubbed_modules.clear()
|
|
@@ -41,7 +41,10 @@ from pathlib import Path
|
|
|
41
41
|
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
|
42
42
|
|
|
43
43
|
from .base import Worker, WorkerError
|
|
44
|
+
from ..pixi import get_pixi_path
|
|
44
45
|
|
|
46
|
+
# Debug logging (set COMFY_ENV_DEBUG=1 to enable)
|
|
47
|
+
_DEBUG = os.environ.get("COMFY_ENV_DEBUG", "").lower() in ("1", "true", "yes")
|
|
45
48
|
|
|
46
49
|
# =============================================================================
|
|
47
50
|
# Socket IPC utilities - cross-platform with TCP fallback
|
|
@@ -613,7 +616,84 @@ from types import SimpleNamespace
|
|
|
613
616
|
|
|
614
617
|
# Enable faulthandler to dump traceback on SIGSEGV/SIGABRT/etc
|
|
615
618
|
faulthandler.enable(file=sys.stderr, all_threads=True)
|
|
616
|
-
|
|
619
|
+
|
|
620
|
+
# Debug logging (set COMFY_ENV_DEBUG=1 to enable)
|
|
621
|
+
_DEBUG = os.environ.get("COMFY_ENV_DEBUG", "").lower() in ("1", "true", "yes")
|
|
622
|
+
|
|
623
|
+
# Pre-import bpy FIRST to avoid DLL conflicts with numpy/torch/MKL
|
|
624
|
+
# bpy's DLLs must be loaded before other packages load conflicting versions
|
|
625
|
+
try:
|
|
626
|
+
import bpy
|
|
627
|
+
if _DEBUG:
|
|
628
|
+
print("[worker] Pre-imported bpy successfully", file=sys.stderr, flush=True)
|
|
629
|
+
except ImportError as e:
|
|
630
|
+
# bpy not available in this environment - that's fine
|
|
631
|
+
pass
|
|
632
|
+
except Exception as e:
|
|
633
|
+
if _DEBUG:
|
|
634
|
+
print(f"[worker] bpy pre-import warning: {e}", file=sys.stderr, flush=True)
|
|
635
|
+
|
|
636
|
+
# Watchdog: dump all thread stacks every 60 seconds to catch hangs
|
|
637
|
+
import threading
|
|
638
|
+
import tempfile as _tempfile
|
|
639
|
+
_watchdog_log = os.path.join(_tempfile.gettempdir(), "comfy_worker_watchdog.log")
|
|
640
|
+
def _watchdog():
|
|
641
|
+
import time
|
|
642
|
+
import io
|
|
643
|
+
tick = 0
|
|
644
|
+
while True:
|
|
645
|
+
time.sleep(60)
|
|
646
|
+
tick += 1
|
|
647
|
+
# Capture stack dump to string
|
|
648
|
+
buf = io.StringIO()
|
|
649
|
+
faulthandler.dump_traceback(file=buf, all_threads=True)
|
|
650
|
+
dump = buf.getvalue()
|
|
651
|
+
|
|
652
|
+
# Write to file
|
|
653
|
+
with open(_watchdog_log, "a", encoding="utf-8") as f:
|
|
654
|
+
f.write(f"\\n=== WATCHDOG TICK {tick} ({time.strftime('%H:%M:%S')}) ===\\n")
|
|
655
|
+
f.write(dump)
|
|
656
|
+
f.write("=== END ===\\n")
|
|
657
|
+
f.flush()
|
|
658
|
+
os.fsync(f.fileno())
|
|
659
|
+
|
|
660
|
+
# Also print
|
|
661
|
+
print(f"\\n=== WATCHDOG TICK {tick} ===", flush=True)
|
|
662
|
+
print(dump, flush=True)
|
|
663
|
+
print("=== END ===\\n", flush=True)
|
|
664
|
+
|
|
665
|
+
_watchdog_thread = threading.Thread(target=_watchdog, daemon=True)
|
|
666
|
+
_watchdog_thread.start()
|
|
667
|
+
if _DEBUG:
|
|
668
|
+
print(f"[worker] Watchdog started, logging to: {_watchdog_log}", flush=True)
|
|
669
|
+
|
|
670
|
+
# File-based logging for debugging (persists even if stdout/stderr are swallowed)
|
|
671
|
+
import tempfile
|
|
672
|
+
_worker_log_file = os.path.join(tempfile.gettempdir(), "comfy_worker_debug.log")
|
|
673
|
+
def wlog(msg):
|
|
674
|
+
"""Log to file only - stdout causes pipe buffer deadlock after many requests."""
|
|
675
|
+
try:
|
|
676
|
+
with open(_worker_log_file, "a", encoding="utf-8") as f:
|
|
677
|
+
import time
|
|
678
|
+
f.write(f"{time.strftime('%H:%M:%S')} {msg}\\n")
|
|
679
|
+
f.flush()
|
|
680
|
+
os.fsync(f.fileno())
|
|
681
|
+
except Exception:
|
|
682
|
+
pass
|
|
683
|
+
# NOTE: Don't print to stdout here! After 50+ requests the pipe buffer
|
|
684
|
+
# fills up and causes deadlock (parent blocked on recv, worker blocked on print)
|
|
685
|
+
|
|
686
|
+
wlog(f"[worker] === Worker starting, log file: {_worker_log_file} ===")
|
|
687
|
+
|
|
688
|
+
# Debug: print PATH at startup (only if debug enabled)
|
|
689
|
+
if _DEBUG:
|
|
690
|
+
_path_sep = ";" if sys.platform == "win32" else ":"
|
|
691
|
+
_path_parts = os.environ.get("PATH", "").split(_path_sep)
|
|
692
|
+
print(f"[worker] PATH has {len(_path_parts)} entries:", file=sys.stderr, flush=True)
|
|
693
|
+
for _i, _p in enumerate(_path_parts[:15]):
|
|
694
|
+
print(f"[worker] [{_i}] {_p}", file=sys.stderr, flush=True)
|
|
695
|
+
if len(_path_parts) > 15:
|
|
696
|
+
print(f"[worker] ... and {len(_path_parts) - 15} more", file=sys.stderr, flush=True)
|
|
617
697
|
|
|
618
698
|
# On Windows, add host Python's DLL directories so packages like opencv can find VC++ runtime
|
|
619
699
|
if sys.platform == "win32":
|
|
@@ -633,9 +713,9 @@ if sys.platform == "win32":
|
|
|
633
713
|
if _pixi_library_bin and hasattr(os, "add_dll_directory"):
|
|
634
714
|
try:
|
|
635
715
|
os.add_dll_directory(_pixi_library_bin)
|
|
636
|
-
|
|
716
|
+
wlog(f"[worker] Added pixi Library/bin to DLL search: {_pixi_library_bin}")
|
|
637
717
|
except Exception as e:
|
|
638
|
-
|
|
718
|
+
wlog(f"[worker] Failed to add pixi Library/bin: {e}")
|
|
639
719
|
|
|
640
720
|
# =============================================================================
|
|
641
721
|
# Object Reference System - keep complex objects in worker, pass refs to host
|
|
@@ -805,24 +885,25 @@ def _deserialize_isolated_objects(obj):
|
|
|
805
885
|
|
|
806
886
|
|
|
807
887
|
def main():
|
|
808
|
-
|
|
888
|
+
wlog("[worker] Starting...")
|
|
809
889
|
# Get socket address from command line
|
|
810
890
|
if len(sys.argv) < 2:
|
|
811
|
-
|
|
891
|
+
wlog("Usage: worker.py <socket_addr>")
|
|
812
892
|
sys.exit(1)
|
|
813
893
|
socket_addr = sys.argv[1]
|
|
814
|
-
|
|
894
|
+
wlog(f"[worker] Connecting to {socket_addr}...")
|
|
815
895
|
|
|
816
896
|
# Connect to host process
|
|
817
897
|
sock = _connect(socket_addr)
|
|
818
898
|
transport = SocketTransport(sock)
|
|
819
|
-
|
|
899
|
+
wlog("[worker] Connected, waiting for config...")
|
|
820
900
|
|
|
821
901
|
# Read config as first message
|
|
822
902
|
config = transport.recv()
|
|
823
903
|
if not config:
|
|
904
|
+
wlog("[worker] No config received, exiting")
|
|
824
905
|
return
|
|
825
|
-
|
|
906
|
+
wlog("[worker] Got config, setting up paths...")
|
|
826
907
|
|
|
827
908
|
# Setup sys.path
|
|
828
909
|
for p in config.get("sys_paths", []):
|
|
@@ -830,66 +911,77 @@ def main():
|
|
|
830
911
|
sys.path.insert(0, p)
|
|
831
912
|
|
|
832
913
|
# Import torch after path setup
|
|
833
|
-
|
|
914
|
+
wlog("[worker] Importing torch...")
|
|
834
915
|
import torch
|
|
835
|
-
|
|
916
|
+
wlog(f"[worker] Torch imported: {torch.__version__}")
|
|
836
917
|
|
|
837
918
|
# Signal ready
|
|
838
919
|
transport.send({"status": "ready"})
|
|
839
|
-
|
|
920
|
+
wlog("[worker] Ready, entering request loop...")
|
|
840
921
|
|
|
841
922
|
# Process requests
|
|
923
|
+
request_num = 0
|
|
842
924
|
while True:
|
|
925
|
+
request_num += 1
|
|
926
|
+
wlog(f"[worker] Waiting for request #{request_num}...")
|
|
843
927
|
try:
|
|
844
928
|
request = transport.recv()
|
|
845
929
|
if not request:
|
|
930
|
+
wlog("[worker] Empty request received, exiting loop")
|
|
846
931
|
break
|
|
847
|
-
except Exception:
|
|
932
|
+
except Exception as e:
|
|
933
|
+
wlog(f"[worker] Exception receiving request: {e}")
|
|
848
934
|
break
|
|
849
935
|
|
|
850
936
|
if request.get("method") == "shutdown":
|
|
937
|
+
wlog("[worker] Shutdown requested")
|
|
851
938
|
break
|
|
852
939
|
|
|
940
|
+
if request.get("method") == "ping":
|
|
941
|
+
# Health check - respond immediately
|
|
942
|
+
transport.send({"status": "pong"})
|
|
943
|
+
continue
|
|
944
|
+
|
|
853
945
|
try:
|
|
854
946
|
request_type = request.get("type", "call_module")
|
|
855
947
|
module_name = request["module"]
|
|
856
948
|
inputs_path = request.get("inputs_path")
|
|
857
949
|
outputs_path = request.get("outputs_path")
|
|
858
|
-
|
|
950
|
+
wlog(f"[worker] Request: {request_type} {module_name}")
|
|
859
951
|
|
|
860
952
|
# Load inputs
|
|
861
953
|
if inputs_path:
|
|
862
|
-
|
|
954
|
+
wlog(f"[worker] Loading inputs from {inputs_path}...")
|
|
863
955
|
inputs = torch.load(inputs_path, weights_only=False)
|
|
864
|
-
|
|
956
|
+
wlog(f"[worker] Deserializing isolated objects...")
|
|
865
957
|
inputs = _deserialize_isolated_objects(inputs)
|
|
866
958
|
# Resolve any object references from previous node calls
|
|
867
|
-
|
|
959
|
+
wlog(f"[worker] Resolving object references...")
|
|
868
960
|
inputs = _deserialize_input(inputs)
|
|
869
|
-
|
|
961
|
+
wlog(f"[worker] Inputs ready: {list(inputs.keys())}")
|
|
870
962
|
else:
|
|
871
963
|
inputs = {}
|
|
872
964
|
|
|
873
965
|
# Import module
|
|
874
|
-
|
|
966
|
+
wlog(f"[worker] Importing module {module_name}...")
|
|
875
967
|
module = __import__(module_name, fromlist=[""])
|
|
876
|
-
|
|
968
|
+
wlog(f"[worker] Module imported")
|
|
877
969
|
|
|
878
970
|
if request_type == "call_method":
|
|
879
971
|
class_name = request["class_name"]
|
|
880
972
|
method_name = request["method_name"]
|
|
881
973
|
self_state = request.get("self_state")
|
|
882
|
-
|
|
974
|
+
wlog(f"[worker] Getting class {class_name}...")
|
|
883
975
|
|
|
884
976
|
cls = getattr(module, class_name)
|
|
885
|
-
|
|
977
|
+
wlog(f"[worker] Creating instance...")
|
|
886
978
|
instance = object.__new__(cls)
|
|
887
979
|
if self_state:
|
|
888
980
|
instance.__dict__.update(self_state)
|
|
889
|
-
|
|
981
|
+
wlog(f"[worker] Calling {method_name}...")
|
|
890
982
|
method = getattr(instance, method_name)
|
|
891
983
|
result = method(**inputs)
|
|
892
|
-
|
|
984
|
+
wlog(f"[worker] Method returned")
|
|
893
985
|
else:
|
|
894
986
|
func_name = request["func"]
|
|
895
987
|
func = getattr(module, func_name)
|
|
@@ -1000,13 +1092,53 @@ class PersistentVenvWorker(Worker):
|
|
|
1000
1092
|
current = current.parent
|
|
1001
1093
|
return None
|
|
1002
1094
|
|
|
1095
|
+
def _check_socket_health(self) -> bool:
|
|
1096
|
+
"""Check if socket connection is healthy using a quick ping."""
|
|
1097
|
+
if not self._transport:
|
|
1098
|
+
return False
|
|
1099
|
+
try:
|
|
1100
|
+
# Send a ping request with short timeout
|
|
1101
|
+
self._transport.send({"method": "ping"})
|
|
1102
|
+
response = self._transport.recv(timeout=2.0)
|
|
1103
|
+
return response is not None and response.get("status") == "pong"
|
|
1104
|
+
except Exception as e:
|
|
1105
|
+
print(f"[{self.name}] Socket health check failed: {e}", file=sys.stderr, flush=True)
|
|
1106
|
+
return False
|
|
1107
|
+
|
|
1108
|
+
def _kill_worker(self) -> None:
|
|
1109
|
+
"""Kill the worker process and clean up resources."""
|
|
1110
|
+
if self._process:
|
|
1111
|
+
try:
|
|
1112
|
+
self._process.kill()
|
|
1113
|
+
self._process.wait(timeout=5)
|
|
1114
|
+
except:
|
|
1115
|
+
pass
|
|
1116
|
+
self._process = None
|
|
1117
|
+
if self._transport:
|
|
1118
|
+
try:
|
|
1119
|
+
self._transport.close()
|
|
1120
|
+
except:
|
|
1121
|
+
pass
|
|
1122
|
+
self._transport = None
|
|
1123
|
+
if self._server_socket:
|
|
1124
|
+
try:
|
|
1125
|
+
self._server_socket.close()
|
|
1126
|
+
except:
|
|
1127
|
+
pass
|
|
1128
|
+
self._server_socket = None
|
|
1129
|
+
|
|
1003
1130
|
def _ensure_started(self):
|
|
1004
1131
|
"""Start persistent worker subprocess if not running."""
|
|
1005
1132
|
if self._shutdown:
|
|
1006
1133
|
raise RuntimeError(f"{self.name}: Worker has been shut down")
|
|
1007
1134
|
|
|
1008
1135
|
if self._process is not None and self._process.poll() is None:
|
|
1009
|
-
|
|
1136
|
+
# Process is running, but check if socket is healthy
|
|
1137
|
+
if self._transport and self._check_socket_health():
|
|
1138
|
+
return # All good
|
|
1139
|
+
# Socket is dead/unhealthy - restart worker
|
|
1140
|
+
print(f"[{self.name}] Socket unhealthy, restarting worker...", file=sys.stderr, flush=True)
|
|
1141
|
+
self._kill_worker()
|
|
1010
1142
|
|
|
1011
1143
|
# Clean up any previous socket
|
|
1012
1144
|
if self._transport:
|
|
@@ -1041,16 +1173,28 @@ class PersistentVenvWorker(Worker):
|
|
|
1041
1173
|
# Pixi has python.exe directly in env dir, not in Scripts/
|
|
1042
1174
|
env_dir = self.python.parent
|
|
1043
1175
|
library_bin = env_dir / "Library" / "bin"
|
|
1176
|
+
|
|
1177
|
+
# COMPLETE DLL ISOLATION: Build minimal PATH from scratch
|
|
1178
|
+
# Only include Windows system directories + pixi environment
|
|
1179
|
+
# This prevents DLL conflicts from mingw, conda, etc.
|
|
1180
|
+
windir = os.environ.get("WINDIR", r"C:\Windows")
|
|
1181
|
+
minimal_path_parts = [
|
|
1182
|
+
str(env_dir), # Pixi env (python.exe location)
|
|
1183
|
+
str(env_dir / "Scripts"), # Pixi Scripts
|
|
1184
|
+
str(env_dir / "Lib" / "site-packages" / "bpy"), # bpy DLLs
|
|
1185
|
+
f"{windir}\\System32", # Core Windows DLLs
|
|
1186
|
+
f"{windir}", # Windows directory
|
|
1187
|
+
f"{windir}\\System32\\Wbem", # WMI tools
|
|
1188
|
+
]
|
|
1044
1189
|
if library_bin.is_dir():
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
env["PYTHONIOENCODING"] = "utf-8"
|
|
1190
|
+
minimal_path_parts.insert(1, str(library_bin)) # MKL DLLs
|
|
1191
|
+
|
|
1192
|
+
env["PATH"] = ";".join(minimal_path_parts)
|
|
1193
|
+
env["COMFYUI_PIXI_LIBRARY_BIN"] = str(library_bin) if library_bin.is_dir() else ""
|
|
1194
|
+
# Allow duplicate OpenMP libraries (MKL's libiomp5md.dll + PyTorch's libomp.dll)
|
|
1195
|
+
env["KMP_DUPLICATE_LIB_OK"] = "TRUE"
|
|
1196
|
+
# Use UTF-8 encoding for stdout/stderr to handle Unicode symbols
|
|
1197
|
+
env["PYTHONIOENCODING"] = "utf-8"
|
|
1054
1198
|
|
|
1055
1199
|
# Find ComfyUI base and set env var for folder_paths stub
|
|
1056
1200
|
comfyui_base = self._find_comfyui_base()
|
|
@@ -1062,32 +1206,68 @@ class PersistentVenvWorker(Worker):
|
|
|
1062
1206
|
all_sys_path = [str(stubs_dir), str(self.working_dir)] + self.sys_path
|
|
1063
1207
|
|
|
1064
1208
|
# Launch subprocess with the venv Python, passing socket address
|
|
1209
|
+
# For pixi environments, use "pixi run python" to get proper environment activation
|
|
1210
|
+
# (CONDA_PREFIX, Library paths, etc.) which fixes DLL loading issues with bpy
|
|
1211
|
+
is_pixi = '.pixi' in str(self.python)
|
|
1212
|
+
if _DEBUG:
|
|
1213
|
+
print(f"[PersistentVenvWorker] is_pixi={is_pixi}, python={self.python}", flush=True)
|
|
1214
|
+
if is_pixi:
|
|
1215
|
+
# Find pixi project root (parent of .pixi directory)
|
|
1216
|
+
pixi_project = self.python
|
|
1217
|
+
while pixi_project.name != '.pixi' and pixi_project.parent != pixi_project:
|
|
1218
|
+
pixi_project = pixi_project.parent
|
|
1219
|
+
pixi_project = pixi_project.parent # Go up from .pixi to project root
|
|
1220
|
+
pixi_toml = pixi_project / "pixi.toml"
|
|
1221
|
+
if _DEBUG:
|
|
1222
|
+
print(f"[PersistentVenvWorker] pixi_toml={pixi_toml}, exists={pixi_toml.exists()}", flush=True)
|
|
1223
|
+
|
|
1224
|
+
if pixi_toml.exists():
|
|
1225
|
+
pixi_exe = get_pixi_path()
|
|
1226
|
+
if pixi_exe is None:
|
|
1227
|
+
raise WorkerError("pixi not found - required for isolated environment execution")
|
|
1228
|
+
cmd = [str(pixi_exe), "run", "--manifest-path", str(pixi_toml),
|
|
1229
|
+
"python", str(self._worker_script), self._socket_addr]
|
|
1230
|
+
# Clean PATH to remove ct-env entries that have conflicting DLLs
|
|
1231
|
+
# Pixi will add its own environment paths
|
|
1232
|
+
path_sep = ";" if sys.platform == "win32" else ":"
|
|
1233
|
+
current_path = env.get("PATH", "")
|
|
1234
|
+
# Filter out ct-envs and conda/mamba paths that could conflict
|
|
1235
|
+
clean_path_parts = [
|
|
1236
|
+
p for p in current_path.split(path_sep)
|
|
1237
|
+
if not any(x in p.lower() for x in (".ct-envs", "conda", "mamba", "miniforge", "miniconda", "anaconda"))
|
|
1238
|
+
]
|
|
1239
|
+
env["PATH"] = path_sep.join(clean_path_parts)
|
|
1240
|
+
launch_env = env
|
|
1241
|
+
else:
|
|
1242
|
+
cmd = [str(self.python), str(self._worker_script), self._socket_addr]
|
|
1243
|
+
launch_env = env
|
|
1244
|
+
else:
|
|
1245
|
+
cmd = [str(self.python), str(self._worker_script), self._socket_addr]
|
|
1246
|
+
launch_env = env
|
|
1247
|
+
|
|
1248
|
+
if _DEBUG:
|
|
1249
|
+
print(f"[PersistentVenvWorker] launching cmd={cmd[:3]}...", flush=True)
|
|
1250
|
+
if launch_env:
|
|
1251
|
+
path_sep = ";" if sys.platform == "win32" else ":"
|
|
1252
|
+
path_parts = launch_env.get("PATH", "").split(path_sep)
|
|
1253
|
+
print(f"[PersistentVenvWorker] PATH has {len(path_parts)} entries:", flush=True)
|
|
1254
|
+
for i, p in enumerate(path_parts[:10]): # Show first 10
|
|
1255
|
+
print(f"[PersistentVenvWorker] [{i}] {p}", flush=True)
|
|
1256
|
+
if len(path_parts) > 10:
|
|
1257
|
+
print(f"[PersistentVenvWorker] ... and {len(path_parts) - 10} more", flush=True)
|
|
1065
1258
|
self._process = subprocess.Popen(
|
|
1066
|
-
|
|
1259
|
+
cmd,
|
|
1067
1260
|
stdin=subprocess.DEVNULL,
|
|
1068
|
-
stdout=subprocess.
|
|
1261
|
+
stdout=subprocess.DEVNULL, # DEVNULL to prevent pipe buffer deadlock
|
|
1069
1262
|
stderr=subprocess.PIPE, # Capture stderr separately for crash diagnostics
|
|
1070
1263
|
cwd=str(self.working_dir),
|
|
1071
|
-
env=
|
|
1264
|
+
env=launch_env,
|
|
1072
1265
|
)
|
|
1073
1266
|
|
|
1074
1267
|
# Clear stderr buffer for new process
|
|
1075
1268
|
with self._stderr_lock:
|
|
1076
1269
|
self._stderr_buffer.clear()
|
|
1077
1270
|
|
|
1078
|
-
# Start stdout forwarding thread
|
|
1079
|
-
def forward_stdout():
|
|
1080
|
-
try:
|
|
1081
|
-
for line in self._process.stdout:
|
|
1082
|
-
if isinstance(line, bytes):
|
|
1083
|
-
line = line.decode('utf-8', errors='replace')
|
|
1084
|
-
sys.stderr.write(f" {line}")
|
|
1085
|
-
sys.stderr.flush()
|
|
1086
|
-
except:
|
|
1087
|
-
pass
|
|
1088
|
-
self._stdout_thread = threading.Thread(target=forward_stdout, daemon=True)
|
|
1089
|
-
self._stdout_thread.start()
|
|
1090
|
-
|
|
1091
1271
|
# Start stderr capture thread (buffer for crash diagnostics)
|
|
1092
1272
|
def capture_stderr():
|
|
1093
1273
|
try:
|
|
@@ -1224,8 +1404,16 @@ class PersistentVenvWorker(Worker):
|
|
|
1224
1404
|
Returns:
|
|
1225
1405
|
Return value of the method.
|
|
1226
1406
|
"""
|
|
1407
|
+
import sys
|
|
1408
|
+
if _DEBUG:
|
|
1409
|
+
print(f"[PersistentVenvWorker] call_method: {module_name}.{class_name}.{method_name}", file=sys.stderr, flush=True)
|
|
1410
|
+
|
|
1227
1411
|
with self._lock:
|
|
1412
|
+
if _DEBUG:
|
|
1413
|
+
print(f"[PersistentVenvWorker] acquired lock, ensuring started...", file=sys.stderr, flush=True)
|
|
1228
1414
|
self._ensure_started()
|
|
1415
|
+
if _DEBUG:
|
|
1416
|
+
print(f"[PersistentVenvWorker] worker started/confirmed", file=sys.stderr, flush=True)
|
|
1229
1417
|
|
|
1230
1418
|
timeout = timeout or 600.0
|
|
1231
1419
|
call_id = str(uuid.uuid4())[:8]
|
|
@@ -1237,8 +1425,14 @@ class PersistentVenvWorker(Worker):
|
|
|
1237
1425
|
try:
|
|
1238
1426
|
# Serialize kwargs
|
|
1239
1427
|
if kwargs:
|
|
1428
|
+
if _DEBUG:
|
|
1429
|
+
print(f"[PersistentVenvWorker] serializing kwargs...", file=sys.stderr, flush=True)
|
|
1240
1430
|
serialized_kwargs = _serialize_for_ipc(kwargs)
|
|
1431
|
+
if _DEBUG:
|
|
1432
|
+
print(f"[PersistentVenvWorker] saving to {inputs_path}...", file=sys.stderr, flush=True)
|
|
1241
1433
|
torch.save(serialized_kwargs, str(inputs_path))
|
|
1434
|
+
if _DEBUG:
|
|
1435
|
+
print(f"[PersistentVenvWorker] saved inputs", file=sys.stderr, flush=True)
|
|
1242
1436
|
|
|
1243
1437
|
# Send request with class info
|
|
1244
1438
|
request = {
|
|
@@ -1250,7 +1444,11 @@ class PersistentVenvWorker(Worker):
|
|
|
1250
1444
|
"inputs_path": str(inputs_path) if kwargs else None,
|
|
1251
1445
|
"outputs_path": str(outputs_path),
|
|
1252
1446
|
}
|
|
1447
|
+
if _DEBUG:
|
|
1448
|
+
print(f"[PersistentVenvWorker] sending request via socket...", file=sys.stderr, flush=True)
|
|
1253
1449
|
response = self._send_request(request, timeout)
|
|
1450
|
+
if _DEBUG:
|
|
1451
|
+
print(f"[PersistentVenvWorker] got response: {response.get('status')}", file=sys.stderr, flush=True)
|
|
1254
1452
|
|
|
1255
1453
|
if response.get("status") == "error":
|
|
1256
1454
|
raise WorkerError(
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|