comfy-env 0.0.49__tar.gz → 0.0.51__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {comfy_env-0.0.49 → comfy_env-0.0.51}/PKG-INFO +62 -4
- {comfy_env-0.0.49 → comfy_env-0.0.51}/README.md +61 -3
- {comfy_env-0.0.49 → comfy_env-0.0.51}/pyproject.toml +1 -1
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/__init__.py +7 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/decorator.py +252 -1
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/config.py +2 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/config_file.py +4 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/install.py +38 -104
- comfy_env-0.0.51/src/comfy_env/isolation.py +297 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/pixi.py +131 -24
- comfy_env-0.0.51/src/comfy_env/stub_imports.py +310 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/templates/comfy-env-instructions.txt +31 -10
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/templates/comfy-env.toml +36 -61
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/workers/venv.py +179 -12
- {comfy_env-0.0.49 → comfy_env-0.0.51}/.github/workflows/publish.yml +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/.gitignore +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/LICENSE +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/cli.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/__init__.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/cuda_gpu_detection.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/manager.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/platform/__init__.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/platform/base.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/platform/darwin.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/platform/linux.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/platform/windows.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/env/security.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/errors.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/__init__.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/bridge.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/protocol.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/tensor.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/torch_bridge.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/transport.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/ipc/worker.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/nodes.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/registry.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/resolver.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/stubs/__init__.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/stubs/comfy/__init__.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/stubs/comfy/model_management.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/stubs/comfy/utils.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/stubs/folder_paths.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/wheel_sources.yml +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/workers/__init__.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/workers/base.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/workers/pool.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/workers/tensor_utils.py +0 -0
- {comfy_env-0.0.49 → comfy_env-0.0.51}/src/comfy_env/workers/torch_mp.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: comfy-env
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.51
|
|
4
4
|
Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
|
|
5
5
|
Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
|
|
6
6
|
Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
|
|
@@ -84,9 +84,47 @@ from comfy_env import install
|
|
|
84
84
|
install()
|
|
85
85
|
```
|
|
86
86
|
|
|
87
|
-
### Process Isolation (Type 1 - Separate
|
|
87
|
+
### Process Isolation (Type 1 - Separate Environment)
|
|
88
88
|
|
|
89
|
-
For nodes that need completely separate dependencies
|
|
89
|
+
For nodes that need completely separate dependencies (different Python version, conda packages, conflicting libraries).
|
|
90
|
+
|
|
91
|
+
#### Recommended: Pack-Wide Isolation
|
|
92
|
+
|
|
93
|
+
For node packs where ALL nodes run in the same isolated environment:
|
|
94
|
+
|
|
95
|
+
**Step 1: Configure comfy-env.toml**
|
|
96
|
+
|
|
97
|
+
```toml
|
|
98
|
+
[mypack]
|
|
99
|
+
python = "3.11"
|
|
100
|
+
isolated = true # All nodes run in this env
|
|
101
|
+
|
|
102
|
+
[mypack.conda]
|
|
103
|
+
packages = ["cgal"] # Conda packages (uses pixi)
|
|
104
|
+
|
|
105
|
+
[mypack.packages]
|
|
106
|
+
requirements = ["trimesh[easy]>=4.0", "bpy>=4.2"]
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
**Step 2: Enable in __init__.py**
|
|
110
|
+
|
|
111
|
+
```python
|
|
112
|
+
from comfy_env import setup_isolated_imports, enable_isolation
|
|
113
|
+
|
|
114
|
+
# Setup import stubs BEFORE importing nodes
|
|
115
|
+
setup_isolated_imports(__file__)
|
|
116
|
+
|
|
117
|
+
from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
|
118
|
+
|
|
119
|
+
# Enable isolation for all nodes
|
|
120
|
+
enable_isolation(NODE_CLASS_MAPPINGS)
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
**That's it!** All nodes run in an isolated Python 3.11 environment with their own dependencies.
|
|
124
|
+
|
|
125
|
+
#### Alternative: Per-Node Isolation
|
|
126
|
+
|
|
127
|
+
For cases where different nodes need different environments:
|
|
90
128
|
|
|
91
129
|
```python
|
|
92
130
|
from comfy_env import isolated
|
|
@@ -266,7 +304,27 @@ vars_dict = env.as_dict()
|
|
|
266
304
|
# {'cuda_version': '12.8', 'cuda_short': '128', 'torch_mm': '28', ...}
|
|
267
305
|
```
|
|
268
306
|
|
|
269
|
-
###
|
|
307
|
+
### enable_isolation()
|
|
308
|
+
|
|
309
|
+
```python
|
|
310
|
+
from comfy_env import enable_isolation
|
|
311
|
+
|
|
312
|
+
enable_isolation(NODE_CLASS_MAPPINGS)
|
|
313
|
+
```
|
|
314
|
+
|
|
315
|
+
Wraps all node classes so their FUNCTION methods run in the isolated environment specified in comfy-env.toml. Requires `isolated = true` in the environment config.
|
|
316
|
+
|
|
317
|
+
### setup_isolated_imports()
|
|
318
|
+
|
|
319
|
+
```python
|
|
320
|
+
from comfy_env import setup_isolated_imports
|
|
321
|
+
|
|
322
|
+
setup_isolated_imports(__file__)
|
|
323
|
+
```
|
|
324
|
+
|
|
325
|
+
Sets up import stubs for packages that exist only in the isolated pixi environment. Call this BEFORE importing your nodes module. Packages available in both host and isolated environment are not stubbed.
|
|
326
|
+
|
|
327
|
+
### Workers (for custom isolation)
|
|
270
328
|
|
|
271
329
|
```python
|
|
272
330
|
from comfy_env import TorchMPWorker
|
|
@@ -56,9 +56,47 @@ from comfy_env import install
|
|
|
56
56
|
install()
|
|
57
57
|
```
|
|
58
58
|
|
|
59
|
-
### Process Isolation (Type 1 - Separate
|
|
59
|
+
### Process Isolation (Type 1 - Separate Environment)
|
|
60
60
|
|
|
61
|
-
For nodes that need completely separate dependencies
|
|
61
|
+
For nodes that need completely separate dependencies (different Python version, conda packages, conflicting libraries).
|
|
62
|
+
|
|
63
|
+
#### Recommended: Pack-Wide Isolation
|
|
64
|
+
|
|
65
|
+
For node packs where ALL nodes run in the same isolated environment:
|
|
66
|
+
|
|
67
|
+
**Step 1: Configure comfy-env.toml**
|
|
68
|
+
|
|
69
|
+
```toml
|
|
70
|
+
[mypack]
|
|
71
|
+
python = "3.11"
|
|
72
|
+
isolated = true # All nodes run in this env
|
|
73
|
+
|
|
74
|
+
[mypack.conda]
|
|
75
|
+
packages = ["cgal"] # Conda packages (uses pixi)
|
|
76
|
+
|
|
77
|
+
[mypack.packages]
|
|
78
|
+
requirements = ["trimesh[easy]>=4.0", "bpy>=4.2"]
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
**Step 2: Enable in __init__.py**
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
from comfy_env import setup_isolated_imports, enable_isolation
|
|
85
|
+
|
|
86
|
+
# Setup import stubs BEFORE importing nodes
|
|
87
|
+
setup_isolated_imports(__file__)
|
|
88
|
+
|
|
89
|
+
from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
|
90
|
+
|
|
91
|
+
# Enable isolation for all nodes
|
|
92
|
+
enable_isolation(NODE_CLASS_MAPPINGS)
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
**That's it!** All nodes run in an isolated Python 3.11 environment with their own dependencies.
|
|
96
|
+
|
|
97
|
+
#### Alternative: Per-Node Isolation
|
|
98
|
+
|
|
99
|
+
For cases where different nodes need different environments:
|
|
62
100
|
|
|
63
101
|
```python
|
|
64
102
|
from comfy_env import isolated
|
|
@@ -238,7 +276,27 @@ vars_dict = env.as_dict()
|
|
|
238
276
|
# {'cuda_version': '12.8', 'cuda_short': '128', 'torch_mm': '28', ...}
|
|
239
277
|
```
|
|
240
278
|
|
|
241
|
-
###
|
|
279
|
+
### enable_isolation()
|
|
280
|
+
|
|
281
|
+
```python
|
|
282
|
+
from comfy_env import enable_isolation
|
|
283
|
+
|
|
284
|
+
enable_isolation(NODE_CLASS_MAPPINGS)
|
|
285
|
+
```
|
|
286
|
+
|
|
287
|
+
Wraps all node classes so their FUNCTION methods run in the isolated environment specified in comfy-env.toml. Requires `isolated = true` in the environment config.
|
|
288
|
+
|
|
289
|
+
### setup_isolated_imports()
|
|
290
|
+
|
|
291
|
+
```python
|
|
292
|
+
from comfy_env import setup_isolated_imports
|
|
293
|
+
|
|
294
|
+
setup_isolated_imports(__file__)
|
|
295
|
+
```
|
|
296
|
+
|
|
297
|
+
Sets up import stubs for packages that exist only in the isolated pixi environment. Call this BEFORE importing your nodes module. Packages available in both host and isolated environment are not stubbed.
|
|
298
|
+
|
|
299
|
+
### Workers (for custom isolation)
|
|
242
300
|
|
|
243
301
|
```python
|
|
244
302
|
from comfy_env import TorchMPWorker
|
|
@@ -34,6 +34,8 @@ from .env.security import (
|
|
|
34
34
|
from .ipc.bridge import WorkerBridge
|
|
35
35
|
from .ipc.worker import BaseWorker, register
|
|
36
36
|
from .decorator import isolated, shutdown_all_processes
|
|
37
|
+
from .isolation import enable_isolation
|
|
38
|
+
from .stub_imports import setup_isolated_imports, cleanup_stubs
|
|
37
39
|
|
|
38
40
|
# New in-place installation API
|
|
39
41
|
from .install import install, verify_installation
|
|
@@ -144,6 +146,11 @@ __all__ = [
|
|
|
144
146
|
# Legacy Decorator API
|
|
145
147
|
"isolated",
|
|
146
148
|
"shutdown_all_processes",
|
|
149
|
+
# New: Enable isolation for entire node pack
|
|
150
|
+
"enable_isolation",
|
|
151
|
+
# Import stubbing for isolated packages
|
|
152
|
+
"setup_isolated_imports",
|
|
153
|
+
"cleanup_stubs",
|
|
147
154
|
]
|
|
148
155
|
|
|
149
156
|
# Add torch-based IPC if available
|
|
@@ -41,7 +41,7 @@ import time
|
|
|
41
41
|
from dataclasses import dataclass
|
|
42
42
|
from functools import wraps
|
|
43
43
|
from pathlib import Path
|
|
44
|
-
from typing import Any, Callable, Dict, List, Optional, Union
|
|
44
|
+
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
|
45
45
|
|
|
46
46
|
logger = logging.getLogger("comfy_env")
|
|
47
47
|
|
|
@@ -447,3 +447,254 @@ def isolated(
|
|
|
447
447
|
return cls
|
|
448
448
|
|
|
449
449
|
return decorator
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
# ---------------------------------------------------------------------------
|
|
453
|
+
# The @auto_isolate Decorator (Function-level)
|
|
454
|
+
# ---------------------------------------------------------------------------
|
|
455
|
+
|
|
456
|
+
def _parse_import_error(e: ImportError) -> Optional[str]:
|
|
457
|
+
"""Extract the module name from an ImportError."""
|
|
458
|
+
# Python's ImportError has a 'name' attribute with the module name
|
|
459
|
+
if hasattr(e, 'name') and e.name:
|
|
460
|
+
return e.name
|
|
461
|
+
|
|
462
|
+
# Fallback: parse from message "No module named 'xxx'"
|
|
463
|
+
msg = str(e)
|
|
464
|
+
if "No module named" in msg:
|
|
465
|
+
# Extract 'xxx' from "No module named 'xxx'" or "No module named 'xxx.yyy'"
|
|
466
|
+
import re
|
|
467
|
+
match = re.search(r"No module named ['\"]([^'\"\.]+)", msg)
|
|
468
|
+
if match:
|
|
469
|
+
return match.group(1)
|
|
470
|
+
|
|
471
|
+
return None
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
def _find_env_for_module(
|
|
475
|
+
module_name: str,
|
|
476
|
+
source_file: Path,
|
|
477
|
+
) -> Optional[Tuple[str, Path, Path]]:
|
|
478
|
+
"""
|
|
479
|
+
Find which isolated environment contains the given module.
|
|
480
|
+
|
|
481
|
+
Searches comfy-env.toml configs starting from the source file's directory,
|
|
482
|
+
looking for the module in cuda packages, requirements, etc.
|
|
483
|
+
|
|
484
|
+
Args:
|
|
485
|
+
module_name: The module that failed to import (e.g., "cumesh")
|
|
486
|
+
source_file: Path to the source file containing the function
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
Tuple of (env_name, python_path, node_dir) or None if not found
|
|
490
|
+
"""
|
|
491
|
+
from .env.config_file import discover_config, CONFIG_FILE_NAMES
|
|
492
|
+
|
|
493
|
+
# Normalize module name (cumesh, pytorch3d, etc.)
|
|
494
|
+
module_lower = module_name.lower().replace("-", "_").replace(".", "_")
|
|
495
|
+
|
|
496
|
+
# Search for config file starting from source file's directory
|
|
497
|
+
node_dir = source_file.parent
|
|
498
|
+
while node_dir != node_dir.parent:
|
|
499
|
+
for config_name in CONFIG_FILE_NAMES:
|
|
500
|
+
config_path = node_dir / config_name
|
|
501
|
+
if config_path.exists():
|
|
502
|
+
# Found a config, check if it has our module
|
|
503
|
+
config = discover_config(node_dir)
|
|
504
|
+
if config is None:
|
|
505
|
+
continue
|
|
506
|
+
|
|
507
|
+
# Check all environments in the config
|
|
508
|
+
for env_name, env_config in config.envs.items():
|
|
509
|
+
# Check cuda/no_deps_requirements
|
|
510
|
+
if env_config.no_deps_requirements:
|
|
511
|
+
for req in env_config.no_deps_requirements:
|
|
512
|
+
req_name = req.split("==")[0].split(">=")[0].split("<")[0].strip()
|
|
513
|
+
req_lower = req_name.lower().replace("-", "_")
|
|
514
|
+
if req_lower == module_lower:
|
|
515
|
+
# Found it! Get the python path
|
|
516
|
+
env_path = node_dir / f"_env_{env_name}"
|
|
517
|
+
if not env_path.exists():
|
|
518
|
+
# Try pixi path
|
|
519
|
+
env_path = node_dir / ".pixi" / "envs" / "default"
|
|
520
|
+
|
|
521
|
+
if env_path.exists():
|
|
522
|
+
python_path = env_path / "bin" / "python"
|
|
523
|
+
if not python_path.exists():
|
|
524
|
+
python_path = env_path / "Scripts" / "python.exe"
|
|
525
|
+
if python_path.exists():
|
|
526
|
+
return (env_name, python_path, node_dir)
|
|
527
|
+
|
|
528
|
+
# Check regular requirements too
|
|
529
|
+
if env_config.requirements:
|
|
530
|
+
for req in env_config.requirements:
|
|
531
|
+
req_name = req.split("==")[0].split(">=")[0].split("<")[0].split("[")[0].strip()
|
|
532
|
+
req_lower = req_name.lower().replace("-", "_")
|
|
533
|
+
if req_lower == module_lower:
|
|
534
|
+
env_path = node_dir / f"_env_{env_name}"
|
|
535
|
+
if not env_path.exists():
|
|
536
|
+
env_path = node_dir / ".pixi" / "envs" / "default"
|
|
537
|
+
|
|
538
|
+
if env_path.exists():
|
|
539
|
+
python_path = env_path / "bin" / "python"
|
|
540
|
+
if not python_path.exists():
|
|
541
|
+
python_path = env_path / "Scripts" / "python.exe"
|
|
542
|
+
if python_path.exists():
|
|
543
|
+
return (env_name, python_path, node_dir)
|
|
544
|
+
|
|
545
|
+
# Config found but module not in it, stop searching
|
|
546
|
+
break
|
|
547
|
+
|
|
548
|
+
node_dir = node_dir.parent
|
|
549
|
+
|
|
550
|
+
return None
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
# Cache for auto_isolate workers
|
|
554
|
+
_auto_isolate_workers: Dict[str, Any] = {}
|
|
555
|
+
_auto_isolate_lock = threading.Lock()
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
def _get_auto_isolate_worker(env_name: str, python_path: Path, node_dir: Path):
|
|
559
|
+
"""Get or create a worker for auto_isolate."""
|
|
560
|
+
cache_key = f"{env_name}:{python_path}"
|
|
561
|
+
|
|
562
|
+
with _auto_isolate_lock:
|
|
563
|
+
if cache_key in _auto_isolate_workers:
|
|
564
|
+
worker = _auto_isolate_workers[cache_key]
|
|
565
|
+
if worker.is_alive():
|
|
566
|
+
return worker
|
|
567
|
+
|
|
568
|
+
# Create new PersistentVenvWorker
|
|
569
|
+
from .workers.venv import PersistentVenvWorker
|
|
570
|
+
|
|
571
|
+
worker = PersistentVenvWorker(
|
|
572
|
+
python=str(python_path),
|
|
573
|
+
working_dir=node_dir,
|
|
574
|
+
sys_path=[str(node_dir)],
|
|
575
|
+
name=f"auto-{env_name}",
|
|
576
|
+
)
|
|
577
|
+
|
|
578
|
+
_auto_isolate_workers[cache_key] = worker
|
|
579
|
+
return worker
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def auto_isolate(func: Callable) -> Callable:
|
|
583
|
+
"""
|
|
584
|
+
Decorator that automatically runs a function in an isolated environment
|
|
585
|
+
when an ImportError occurs for a package that exists in the isolated env.
|
|
586
|
+
|
|
587
|
+
This provides seamless isolation - just write normal code with imports,
|
|
588
|
+
and if the import fails in the host environment but the package is
|
|
589
|
+
configured in comfy-env.toml, the function automatically retries in
|
|
590
|
+
the isolated environment.
|
|
591
|
+
|
|
592
|
+
Example:
|
|
593
|
+
from comfy_env import auto_isolate
|
|
594
|
+
|
|
595
|
+
@auto_isolate
|
|
596
|
+
def process_with_cumesh(mesh, target_faces):
|
|
597
|
+
import cumesh # If this fails, function retries in isolated env
|
|
598
|
+
import torch
|
|
599
|
+
|
|
600
|
+
v = torch.tensor(mesh.vertices).cuda()
|
|
601
|
+
f = torch.tensor(mesh.faces).cuda()
|
|
602
|
+
|
|
603
|
+
cm = cumesh.CuMesh()
|
|
604
|
+
cm.init(v, f)
|
|
605
|
+
cm.simplify(target_faces)
|
|
606
|
+
|
|
607
|
+
result_v, result_f = cm.read()
|
|
608
|
+
return result_v.cpu().numpy(), result_f.cpu().numpy()
|
|
609
|
+
|
|
610
|
+
How it works:
|
|
611
|
+
1. Function runs normally in the host environment
|
|
612
|
+
2. If ImportError occurs, decorator catches it
|
|
613
|
+
3. Extracts the module name from the error (e.g., "cumesh")
|
|
614
|
+
4. Searches comfy-env.toml for which env has that module
|
|
615
|
+
5. Re-runs the entire function in that isolated environment
|
|
616
|
+
6. Returns the result as if nothing happened
|
|
617
|
+
|
|
618
|
+
Benefits:
|
|
619
|
+
- Zero overhead when imports succeed (fast path)
|
|
620
|
+
- Auto-detects which environment to use from the failed import
|
|
621
|
+
- Function is the isolation boundary (clean, debuggable)
|
|
622
|
+
- Works with any import pattern (top of function, conditional, etc.)
|
|
623
|
+
|
|
624
|
+
Note:
|
|
625
|
+
Arguments and return values are serialized via torch.save/load,
|
|
626
|
+
so they should be tensors, numpy arrays, or pickle-able objects.
|
|
627
|
+
"""
|
|
628
|
+
# Get source file for environment detection
|
|
629
|
+
source_file = Path(inspect.getfile(func))
|
|
630
|
+
|
|
631
|
+
@wraps(func)
|
|
632
|
+
def wrapper(*args, **kwargs):
|
|
633
|
+
try:
|
|
634
|
+
# Fast path: try running in host environment
|
|
635
|
+
return func(*args, **kwargs)
|
|
636
|
+
|
|
637
|
+
except ImportError as e:
|
|
638
|
+
# Extract module name from error
|
|
639
|
+
module_name = _parse_import_error(e)
|
|
640
|
+
if module_name is None:
|
|
641
|
+
# Can't determine module, re-raise
|
|
642
|
+
raise
|
|
643
|
+
|
|
644
|
+
# Find which env has this module
|
|
645
|
+
env_info = _find_env_for_module(module_name, source_file)
|
|
646
|
+
if env_info is None:
|
|
647
|
+
# Module not in any known isolated env, re-raise
|
|
648
|
+
raise
|
|
649
|
+
|
|
650
|
+
env_name, python_path, node_dir = env_info
|
|
651
|
+
|
|
652
|
+
_log(env_name, f"Import '{module_name}' failed in host, retrying in isolated env...")
|
|
653
|
+
_log(env_name, f" Python: {python_path}")
|
|
654
|
+
|
|
655
|
+
# Get or create worker
|
|
656
|
+
worker = _get_auto_isolate_worker(env_name, python_path, node_dir)
|
|
657
|
+
|
|
658
|
+
# Prepare arguments - convert numpy arrays to lists for IPC
|
|
659
|
+
import numpy as np
|
|
660
|
+
|
|
661
|
+
def convert_for_ipc(obj):
|
|
662
|
+
if isinstance(obj, np.ndarray):
|
|
663
|
+
return obj.tolist()
|
|
664
|
+
elif hasattr(obj, 'vertices') and hasattr(obj, 'faces'):
|
|
665
|
+
# Trimesh-like object - convert to dict
|
|
666
|
+
return {
|
|
667
|
+
'__trimesh__': True,
|
|
668
|
+
'vertices': obj.vertices.tolist() if hasattr(obj.vertices, 'tolist') else list(obj.vertices),
|
|
669
|
+
'faces': obj.faces.tolist() if hasattr(obj.faces, 'tolist') else list(obj.faces),
|
|
670
|
+
}
|
|
671
|
+
elif isinstance(obj, (list, tuple)):
|
|
672
|
+
converted = [convert_for_ipc(x) for x in obj]
|
|
673
|
+
return type(obj)(converted) if isinstance(obj, tuple) else converted
|
|
674
|
+
elif isinstance(obj, dict):
|
|
675
|
+
return {k: convert_for_ipc(v) for k, v in obj.items()}
|
|
676
|
+
return obj
|
|
677
|
+
|
|
678
|
+
converted_args = [convert_for_ipc(arg) for arg in args]
|
|
679
|
+
converted_kwargs = {k: convert_for_ipc(v) for k, v in kwargs.items()}
|
|
680
|
+
|
|
681
|
+
# Call via worker
|
|
682
|
+
start_time = time.time()
|
|
683
|
+
|
|
684
|
+
result = worker.call_module(
|
|
685
|
+
module=source_file.stem,
|
|
686
|
+
func=func.__name__,
|
|
687
|
+
*converted_args,
|
|
688
|
+
**converted_kwargs,
|
|
689
|
+
)
|
|
690
|
+
|
|
691
|
+
elapsed = time.time() - start_time
|
|
692
|
+
_log(env_name, f"← {func.__name__} completed in isolated env [{elapsed:.2f}s]")
|
|
693
|
+
|
|
694
|
+
return result
|
|
695
|
+
|
|
696
|
+
# Mark the function as auto-isolate enabled
|
|
697
|
+
wrapper._auto_isolate = True
|
|
698
|
+
wrapper._source_file = source_file
|
|
699
|
+
|
|
700
|
+
return wrapper
|
|
@@ -154,6 +154,8 @@ class IsolatedEnv:
|
|
|
154
154
|
worker_script: Optional[str] = None # e.g., "worker.py" -> worker.py
|
|
155
155
|
# Conda configuration (uses pixi backend when present)
|
|
156
156
|
conda: Optional["CondaConfig"] = None
|
|
157
|
+
# Runtime isolation - run node FUNCTION methods in isolated subprocess
|
|
158
|
+
isolated: bool = False
|
|
157
159
|
|
|
158
160
|
def __post_init__(self):
|
|
159
161
|
"""Validate and normalize configuration."""
|
|
@@ -572,6 +572,9 @@ def _parse_single_env(name: str, env_data: Dict[str, Any], base_dir: Path) -> Is
|
|
|
572
572
|
elif isinstance(darwin_section, list):
|
|
573
573
|
darwin_reqs = darwin_section
|
|
574
574
|
|
|
575
|
+
# Parse isolated flag for runtime process isolation
|
|
576
|
+
isolated = env_data.get("isolated", False)
|
|
577
|
+
|
|
575
578
|
return IsolatedEnv(
|
|
576
579
|
name=name,
|
|
577
580
|
python=python,
|
|
@@ -583,6 +586,7 @@ def _parse_single_env(name: str, env_data: Dict[str, Any], base_dir: Path) -> Is
|
|
|
583
586
|
linux_requirements=linux_reqs,
|
|
584
587
|
darwin_requirements=darwin_reqs,
|
|
585
588
|
conda=conda_config,
|
|
589
|
+
isolated=isolated,
|
|
586
590
|
)
|
|
587
591
|
|
|
588
592
|
|