comfy-env 0.0.64__py3-none-any.whl → 0.0.66__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +70 -122
- comfy_env/cli.py +78 -7
- comfy_env/config/__init__.py +19 -0
- comfy_env/config/parser.py +151 -0
- comfy_env/config/types.py +64 -0
- comfy_env/install.py +83 -361
- comfy_env/isolation/__init__.py +9 -0
- comfy_env/isolation/wrap.py +351 -0
- comfy_env/nodes.py +2 -2
- comfy_env/pixi/__init__.py +48 -0
- comfy_env/pixi/core.py +356 -0
- comfy_env/{resolver.py → pixi/resolver.py} +1 -14
- comfy_env/prestartup.py +60 -0
- comfy_env/templates/comfy-env-instructions.txt +30 -87
- comfy_env/templates/comfy-env.toml +68 -136
- comfy_env/workers/__init__.py +21 -32
- comfy_env/workers/base.py +1 -1
- comfy_env/workers/{torch_mp.py → mp.py} +47 -14
- comfy_env/workers/{venv.py → subprocess.py} +405 -441
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/METADATA +2 -1
- comfy_env-0.0.66.dist-info/RECORD +34 -0
- comfy_env/decorator.py +0 -700
- comfy_env/env/__init__.py +0 -47
- comfy_env/env/config.py +0 -201
- comfy_env/env/config_file.py +0 -740
- comfy_env/env/manager.py +0 -636
- comfy_env/env/security.py +0 -267
- comfy_env/ipc/__init__.py +0 -55
- comfy_env/ipc/bridge.py +0 -476
- comfy_env/ipc/protocol.py +0 -265
- comfy_env/ipc/tensor.py +0 -371
- comfy_env/ipc/torch_bridge.py +0 -401
- comfy_env/ipc/transport.py +0 -318
- comfy_env/ipc/worker.py +0 -221
- comfy_env/isolation.py +0 -310
- comfy_env/pixi.py +0 -760
- comfy_env/stub_imports.py +0 -270
- comfy_env/stubs/__init__.py +0 -1
- comfy_env/stubs/comfy/__init__.py +0 -6
- comfy_env/stubs/comfy/model_management.py +0 -58
- comfy_env/stubs/comfy/utils.py +0 -29
- comfy_env/stubs/folder_paths.py +0 -71
- comfy_env/workers/pool.py +0 -241
- comfy_env-0.0.64.dist-info/RECORD +0 -48
- /comfy_env/{env/cuda_gpu_detection.py → pixi/cuda_detection.py} +0 -0
- /comfy_env/{env → pixi}/platform/__init__.py +0 -0
- /comfy_env/{env → pixi}/platform/base.py +0 -0
- /comfy_env/{env → pixi}/platform/darwin.py +0 -0
- /comfy_env/{env → pixi}/platform/linux.py +0 -0
- /comfy_env/{env → pixi}/platform/windows.py +0 -0
- /comfy_env/{registry.py → pixi/registry.py} +0 -0
- /comfy_env/{wheel_sources.yml → pixi/wheel_sources.yml} +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/WHEEL +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/licenses/LICENSE +0 -0
comfy_env/stub_imports.py
DELETED
|
@@ -1,270 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Import stub system for isolated node packs.
|
|
3
|
-
|
|
4
|
-
This module provides automatic import stubbing for packages that exist only
|
|
5
|
-
in the isolated pixi environment, not in the host ComfyUI Python.
|
|
6
|
-
|
|
7
|
-
How it works:
|
|
8
|
-
1. Scan pixi environment's site-packages for installed packages
|
|
9
|
-
2. Look up import names from top_level.txt in .dist-info directories
|
|
10
|
-
3. Inject stub modules directly into sys.modules for missing packages
|
|
11
|
-
4. Stubs allow class definitions to parse without the real packages
|
|
12
|
-
5. Real packages are used when FUNCTION runs in the isolated worker
|
|
13
|
-
|
|
14
|
-
Usage:
|
|
15
|
-
# In node pack's __init__.py, BEFORE importing nodes:
|
|
16
|
-
from comfy_env import setup_isolated_imports
|
|
17
|
-
setup_isolated_imports(__file__)
|
|
18
|
-
|
|
19
|
-
from .nodes import NODE_CLASS_MAPPINGS # Now works!
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
import sys
|
|
23
|
-
import types
|
|
24
|
-
from pathlib import Path
|
|
25
|
-
from typing import List, Set
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def _log(msg: str) -> None:
|
|
29
|
-
"""Log with immediate flush to stderr (visible on Windows subprocess)."""
|
|
30
|
-
print(msg, file=sys.stderr, flush=True)
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
class _StubModule(types.ModuleType):
|
|
34
|
-
"""A stub module that accepts any attribute access or call."""
|
|
35
|
-
|
|
36
|
-
def __init__(self, name: str):
|
|
37
|
-
super().__init__(name)
|
|
38
|
-
self.__path__ = [] # Make it a package
|
|
39
|
-
self.__file__ = f"<stub:{name}>"
|
|
40
|
-
self._stub_name = name
|
|
41
|
-
|
|
42
|
-
def __getattr__(self, name: str):
|
|
43
|
-
if name.startswith('_'):
|
|
44
|
-
raise AttributeError(name)
|
|
45
|
-
return _StubObject(f"{self._stub_name}.{name}")
|
|
46
|
-
|
|
47
|
-
def __repr__(self):
|
|
48
|
-
return f"<StubModule '{self._stub_name}'>"
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
class _StubObject:
|
|
52
|
-
"""A stub object that accepts any operation."""
|
|
53
|
-
|
|
54
|
-
def __init__(self, name: str = "stub"):
|
|
55
|
-
self._stub_name = name
|
|
56
|
-
|
|
57
|
-
def __getattr__(self, name: str):
|
|
58
|
-
if name.startswith('_'):
|
|
59
|
-
raise AttributeError(name)
|
|
60
|
-
return _StubObject(f"{self._stub_name}.{name}")
|
|
61
|
-
|
|
62
|
-
def __call__(self, *args, **kwargs):
|
|
63
|
-
return _StubObject(f"{self._stub_name}()")
|
|
64
|
-
|
|
65
|
-
def __iter__(self):
|
|
66
|
-
return iter([])
|
|
67
|
-
|
|
68
|
-
def __len__(self):
|
|
69
|
-
return 0
|
|
70
|
-
|
|
71
|
-
def __bool__(self):
|
|
72
|
-
return False
|
|
73
|
-
|
|
74
|
-
def __enter__(self):
|
|
75
|
-
return self
|
|
76
|
-
|
|
77
|
-
def __exit__(self, *args):
|
|
78
|
-
pass
|
|
79
|
-
|
|
80
|
-
def __repr__(self):
|
|
81
|
-
return f"<Stub '{self._stub_name}'>"
|
|
82
|
-
|
|
83
|
-
def __add__(self, other): return self
|
|
84
|
-
def __radd__(self, other): return self
|
|
85
|
-
def __sub__(self, other): return self
|
|
86
|
-
def __rsub__(self, other): return self
|
|
87
|
-
def __mul__(self, other): return self
|
|
88
|
-
def __rmul__(self, other): return self
|
|
89
|
-
def __truediv__(self, other): return self
|
|
90
|
-
def __rtruediv__(self, other): return self
|
|
91
|
-
def __eq__(self, other): return False
|
|
92
|
-
def __ne__(self, other): return True
|
|
93
|
-
def __lt__(self, other): return False
|
|
94
|
-
def __le__(self, other): return False
|
|
95
|
-
def __gt__(self, other): return False
|
|
96
|
-
def __ge__(self, other): return False
|
|
97
|
-
def __hash__(self): return hash(self._stub_name)
|
|
98
|
-
def __getitem__(self, key): return _StubObject(f"{self._stub_name}[{key}]")
|
|
99
|
-
def __setitem__(self, key, value): pass
|
|
100
|
-
def __contains__(self, item): return False
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
|
|
104
|
-
"""
|
|
105
|
-
Get import names from pixi environment using top_level.txt metadata.
|
|
106
|
-
|
|
107
|
-
This properly maps package names to import names (e.g., libigl -> igl,
|
|
108
|
-
PyYAML -> yaml) by reading the canonical top_level.txt files.
|
|
109
|
-
|
|
110
|
-
Returns:
|
|
111
|
-
Set of import names that should be stubbed.
|
|
112
|
-
"""
|
|
113
|
-
import_names = set()
|
|
114
|
-
|
|
115
|
-
pixi_base = node_dir / ".pixi" / "envs" / "default"
|
|
116
|
-
|
|
117
|
-
# Find site-packages (different paths on Windows vs Linux)
|
|
118
|
-
site_packages = None
|
|
119
|
-
win_site = pixi_base / "Lib" / "site-packages"
|
|
120
|
-
if win_site.exists():
|
|
121
|
-
site_packages = win_site
|
|
122
|
-
else:
|
|
123
|
-
pixi_lib = pixi_base / "lib"
|
|
124
|
-
if pixi_lib.exists():
|
|
125
|
-
python_dirs = list(pixi_lib.glob("python3.*"))
|
|
126
|
-
if python_dirs:
|
|
127
|
-
site_packages = python_dirs[0] / "site-packages"
|
|
128
|
-
|
|
129
|
-
if site_packages is None or not site_packages.exists():
|
|
130
|
-
return import_names
|
|
131
|
-
|
|
132
|
-
_log(f"[comfy-env] Scanning: {site_packages}")
|
|
133
|
-
|
|
134
|
-
# PRIMARY: Read top_level.txt from all .dist-info directories
|
|
135
|
-
for dist_info in site_packages.glob("*.dist-info"):
|
|
136
|
-
top_level_file = dist_info / "top_level.txt"
|
|
137
|
-
if top_level_file.exists():
|
|
138
|
-
try:
|
|
139
|
-
for line in top_level_file.read_text(encoding="utf-8").splitlines():
|
|
140
|
-
name = line.strip()
|
|
141
|
-
if name and not name.startswith('#'):
|
|
142
|
-
# Extract just the top-level name
|
|
143
|
-
top_name = name.replace('\\', '/').split('/')[0]
|
|
144
|
-
if top_name:
|
|
145
|
-
import_names.add(top_name)
|
|
146
|
-
except Exception:
|
|
147
|
-
pass
|
|
148
|
-
|
|
149
|
-
# FALLBACK: Scan for packages/modules not covered by dist-info
|
|
150
|
-
for item in site_packages.iterdir():
|
|
151
|
-
name = item.name
|
|
152
|
-
|
|
153
|
-
if name.startswith('_') or name.startswith('.'):
|
|
154
|
-
continue
|
|
155
|
-
if name.endswith('.dist-info') or name.endswith('.egg-info'):
|
|
156
|
-
continue
|
|
157
|
-
if name in {'bin', 'share', 'include', 'etc'}:
|
|
158
|
-
continue
|
|
159
|
-
|
|
160
|
-
# Package directory (has __init__.py)
|
|
161
|
-
if item.is_dir() and (item / "__init__.py").exists():
|
|
162
|
-
import_names.add(name)
|
|
163
|
-
continue
|
|
164
|
-
|
|
165
|
-
# Namespace package (directory without __init__.py but has submodules)
|
|
166
|
-
if item.is_dir():
|
|
167
|
-
has_py = any(item.glob("*.py"))
|
|
168
|
-
has_subpkg = any((item / d / "__init__.py").exists() for d in item.iterdir() if d.is_dir())
|
|
169
|
-
if has_py or has_subpkg:
|
|
170
|
-
import_names.add(name)
|
|
171
|
-
continue
|
|
172
|
-
|
|
173
|
-
# Single-file module (.py)
|
|
174
|
-
if name.endswith('.py'):
|
|
175
|
-
import_names.add(name[:-3])
|
|
176
|
-
continue
|
|
177
|
-
|
|
178
|
-
# Extension module (.so on Linux, .pyd on Windows)
|
|
179
|
-
if name.endswith('.so') or name.endswith('.pyd'):
|
|
180
|
-
module_name = name.split('.')[0]
|
|
181
|
-
import_names.add(module_name)
|
|
182
|
-
|
|
183
|
-
return import_names
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
def _filter_to_missing(import_names: Set[str]) -> Set[str]:
|
|
187
|
-
"""Filter to only imports not available in host Python."""
|
|
188
|
-
missing = set()
|
|
189
|
-
|
|
190
|
-
for name in import_names:
|
|
191
|
-
# Skip if already in sys.modules
|
|
192
|
-
if name in sys.modules:
|
|
193
|
-
continue
|
|
194
|
-
|
|
195
|
-
# Try to import
|
|
196
|
-
try:
|
|
197
|
-
__import__(name)
|
|
198
|
-
except ImportError:
|
|
199
|
-
missing.add(name)
|
|
200
|
-
except Exception:
|
|
201
|
-
# Other errors (DLL load, etc.) - stub these too
|
|
202
|
-
missing.add(name)
|
|
203
|
-
|
|
204
|
-
return missing
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
# Track what we stubbed for cleanup
|
|
208
|
-
_stubbed_modules: Set[str] = set()
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
def setup_isolated_imports(init_file: str) -> List[str]:
|
|
212
|
-
"""
|
|
213
|
-
Set up import stubs for packages in the pixi environment but not in host Python.
|
|
214
|
-
|
|
215
|
-
Call this BEFORE importing your nodes module.
|
|
216
|
-
|
|
217
|
-
Args:
|
|
218
|
-
init_file: The __file__ of the calling module (usually __file__ from __init__.py)
|
|
219
|
-
|
|
220
|
-
Returns:
|
|
221
|
-
List of import names that were stubbed.
|
|
222
|
-
|
|
223
|
-
Example:
|
|
224
|
-
from comfy_env import setup_isolated_imports
|
|
225
|
-
setup_isolated_imports(__file__)
|
|
226
|
-
|
|
227
|
-
from .nodes import NODE_CLASS_MAPPINGS # Now works!
|
|
228
|
-
"""
|
|
229
|
-
global _stubbed_modules
|
|
230
|
-
|
|
231
|
-
node_dir = Path(init_file).resolve().parent
|
|
232
|
-
|
|
233
|
-
# Get all import names from pixi environment
|
|
234
|
-
pixi_imports = _get_import_names_from_pixi(node_dir)
|
|
235
|
-
|
|
236
|
-
if not pixi_imports:
|
|
237
|
-
_log("[comfy-env] No pixi environment found")
|
|
238
|
-
return []
|
|
239
|
-
|
|
240
|
-
# Filter to only those missing in host
|
|
241
|
-
missing = _filter_to_missing(pixi_imports)
|
|
242
|
-
|
|
243
|
-
if not missing:
|
|
244
|
-
_log("[comfy-env] All packages available in host")
|
|
245
|
-
return []
|
|
246
|
-
|
|
247
|
-
# Direct injection into sys.modules - simple and reliable
|
|
248
|
-
for name in missing:
|
|
249
|
-
if name not in sys.modules:
|
|
250
|
-
sys.modules[name] = _StubModule(name)
|
|
251
|
-
_stubbed_modules.add(name)
|
|
252
|
-
|
|
253
|
-
stubbed = sorted(_stubbed_modules)
|
|
254
|
-
if len(stubbed) <= 10:
|
|
255
|
-
_log(f"[comfy-env] Injected {len(stubbed)} stubs: {', '.join(stubbed)}")
|
|
256
|
-
else:
|
|
257
|
-
_log(f"[comfy-env] Injected {len(stubbed)} stubs: {', '.join(stubbed[:10])}... +{len(stubbed)-10} more")
|
|
258
|
-
|
|
259
|
-
return stubbed
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
def cleanup_stubs():
|
|
263
|
-
"""Remove injected stub modules from sys.modules."""
|
|
264
|
-
global _stubbed_modules
|
|
265
|
-
|
|
266
|
-
for name in list(_stubbed_modules):
|
|
267
|
-
if name in sys.modules and isinstance(sys.modules[name], _StubModule):
|
|
268
|
-
del sys.modules[name]
|
|
269
|
-
|
|
270
|
-
_stubbed_modules.clear()
|
comfy_env/stubs/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
# ComfyUI stubs for isolated workers
|
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Stub for comfy.model_management in isolated worker processes.
|
|
3
|
-
|
|
4
|
-
Provides device detection and memory management functions without
|
|
5
|
-
requiring the full ComfyUI installation.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import torch
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def get_torch_device():
|
|
12
|
-
"""Return the best available torch device."""
|
|
13
|
-
if torch.cuda.is_available():
|
|
14
|
-
return torch.device("cuda")
|
|
15
|
-
elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available():
|
|
16
|
-
return torch.device("mps")
|
|
17
|
-
return torch.device("cpu")
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def get_free_memory(device=None, torch_free_too=False):
|
|
21
|
-
"""Return free VRAM in bytes."""
|
|
22
|
-
if device is None:
|
|
23
|
-
device = get_torch_device()
|
|
24
|
-
if device.type == "cuda":
|
|
25
|
-
free, total = torch.cuda.mem_get_info(device)
|
|
26
|
-
return free
|
|
27
|
-
return 0
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def get_total_memory(device=None, torch_total_too=False):
|
|
31
|
-
"""Return total VRAM in bytes."""
|
|
32
|
-
if device is None:
|
|
33
|
-
device = get_torch_device()
|
|
34
|
-
if device.type == "cuda":
|
|
35
|
-
free, total = torch.cuda.mem_get_info(device)
|
|
36
|
-
return total
|
|
37
|
-
return 0
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def soft_empty_cache(force=False):
|
|
41
|
-
"""Clear CUDA cache."""
|
|
42
|
-
if torch.cuda.is_available():
|
|
43
|
-
torch.cuda.empty_cache()
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
def unload_all_models():
|
|
47
|
-
"""No-op in isolated worker - models managed by the node itself."""
|
|
48
|
-
pass
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
def interrupt_current_processing(value=True):
|
|
52
|
-
"""No-op in isolated worker."""
|
|
53
|
-
pass
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def processing_interrupted():
|
|
57
|
-
"""Always returns False in isolated worker."""
|
|
58
|
-
return False
|
comfy_env/stubs/comfy/utils.py
DELETED
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Stub for comfy.utils in isolated worker processes.
|
|
3
|
-
|
|
4
|
-
Provides utility classes like ProgressBar without requiring
|
|
5
|
-
the full ComfyUI installation.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class ProgressBar:
|
|
10
|
-
"""
|
|
11
|
-
No-op progress bar for isolated workers.
|
|
12
|
-
|
|
13
|
-
In isolated subprocess, we can't update the main ComfyUI progress bar,
|
|
14
|
-
so this just tracks progress internally. Nodes can still use the same API.
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
def __init__(self, total):
|
|
18
|
-
self.total = total
|
|
19
|
-
self.current = 0
|
|
20
|
-
|
|
21
|
-
def update(self, value):
|
|
22
|
-
"""Increment progress by value."""
|
|
23
|
-
self.current += value
|
|
24
|
-
|
|
25
|
-
def update_absolute(self, value, total=None, preview=None):
|
|
26
|
-
"""Set progress to absolute value."""
|
|
27
|
-
self.current = value
|
|
28
|
-
if total is not None:
|
|
29
|
-
self.total = total
|
comfy_env/stubs/folder_paths.py
DELETED
|
@@ -1,71 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Minimal folder_paths stub for isolated worker processes.
|
|
3
|
-
|
|
4
|
-
Provides the same interface as ComfyUI's folder_paths module
|
|
5
|
-
without importing any ComfyUI dependencies.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import os
|
|
9
|
-
from pathlib import Path
|
|
10
|
-
|
|
11
|
-
_comfyui_base = None
|
|
12
|
-
|
|
13
|
-
def _find_comfyui_base():
|
|
14
|
-
"""Find ComfyUI base from COMFYUI_BASE env var, child dirs, or by walking up."""
|
|
15
|
-
global _comfyui_base
|
|
16
|
-
if _comfyui_base:
|
|
17
|
-
return _comfyui_base
|
|
18
|
-
|
|
19
|
-
# Check env var first
|
|
20
|
-
if os.environ.get("COMFYUI_BASE"):
|
|
21
|
-
_comfyui_base = Path(os.environ["COMFYUI_BASE"])
|
|
22
|
-
return _comfyui_base
|
|
23
|
-
|
|
24
|
-
# Check common child directories (for test environments)
|
|
25
|
-
# Also check parent's children (isolated venv is sibling to .comfy-test-env)
|
|
26
|
-
cwd = Path.cwd().resolve()
|
|
27
|
-
for base in [cwd, cwd.parent]:
|
|
28
|
-
for child in [".comfy-test-env/ComfyUI", "ComfyUI"]:
|
|
29
|
-
candidate = base / child
|
|
30
|
-
if (candidate / "main.py").exists() and (candidate / "comfy").exists():
|
|
31
|
-
_comfyui_base = candidate
|
|
32
|
-
return _comfyui_base
|
|
33
|
-
|
|
34
|
-
# Walk up from cwd looking for ComfyUI
|
|
35
|
-
current = cwd
|
|
36
|
-
for _ in range(10):
|
|
37
|
-
if (current / "main.py").exists() and (current / "comfy").exists():
|
|
38
|
-
_comfyui_base = current
|
|
39
|
-
return _comfyui_base
|
|
40
|
-
current = current.parent
|
|
41
|
-
|
|
42
|
-
return None
|
|
43
|
-
|
|
44
|
-
# Models directory
|
|
45
|
-
@property
|
|
46
|
-
def models_dir():
|
|
47
|
-
base = _find_comfyui_base()
|
|
48
|
-
return str(base / "models") if base else None
|
|
49
|
-
|
|
50
|
-
# Make models_dir work as both attribute and property
|
|
51
|
-
class _ModuleProxy:
|
|
52
|
-
@property
|
|
53
|
-
def models_dir(self):
|
|
54
|
-
base = _find_comfyui_base()
|
|
55
|
-
return str(base / "models") if base else None
|
|
56
|
-
|
|
57
|
-
def get_output_directory(self):
|
|
58
|
-
base = _find_comfyui_base()
|
|
59
|
-
return str(base / "output") if base else None
|
|
60
|
-
|
|
61
|
-
def get_input_directory(self):
|
|
62
|
-
base = _find_comfyui_base()
|
|
63
|
-
return str(base / "input") if base else None
|
|
64
|
-
|
|
65
|
-
def get_temp_directory(self):
|
|
66
|
-
base = _find_comfyui_base()
|
|
67
|
-
return str(base / "temp") if base else None
|
|
68
|
-
|
|
69
|
-
# Replace module with proxy instance
|
|
70
|
-
import sys
|
|
71
|
-
sys.modules[__name__] = _ModuleProxy()
|
comfy_env/workers/pool.py
DELETED
|
@@ -1,241 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
WorkerPool - Global registry and management of named workers.
|
|
3
|
-
|
|
4
|
-
Provides a simple API for getting workers by name:
|
|
5
|
-
|
|
6
|
-
from comfy_env.workers import get_worker
|
|
7
|
-
|
|
8
|
-
worker = get_worker("sam3d")
|
|
9
|
-
result = worker.call_module("my_module", "my_func", image=tensor)
|
|
10
|
-
|
|
11
|
-
Workers are registered at startup and reused across calls:
|
|
12
|
-
|
|
13
|
-
from comfy_env.workers import register_worker, TorchMPWorker
|
|
14
|
-
|
|
15
|
-
register_worker("default", TorchMPWorker())
|
|
16
|
-
register_worker("sam3d", PersistentVenvWorker(
|
|
17
|
-
python="/path/to/venv/bin/python",
|
|
18
|
-
working_dir="/path/to/nodes",
|
|
19
|
-
))
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
import atexit
|
|
23
|
-
import threading
|
|
24
|
-
from typing import Dict, Optional, Union
|
|
25
|
-
from pathlib import Path
|
|
26
|
-
|
|
27
|
-
from .base import Worker
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
class WorkerPool:
|
|
31
|
-
"""
|
|
32
|
-
Singleton pool of named workers.
|
|
33
|
-
|
|
34
|
-
Manages worker lifecycle, provides access by name, handles cleanup.
|
|
35
|
-
"""
|
|
36
|
-
|
|
37
|
-
_instance: Optional["WorkerPool"] = None
|
|
38
|
-
_lock = threading.Lock()
|
|
39
|
-
|
|
40
|
-
def __new__(cls):
|
|
41
|
-
if cls._instance is None:
|
|
42
|
-
with cls._lock:
|
|
43
|
-
if cls._instance is None:
|
|
44
|
-
cls._instance = super().__new__(cls)
|
|
45
|
-
cls._instance._initialized = False
|
|
46
|
-
return cls._instance
|
|
47
|
-
|
|
48
|
-
def __init__(self):
|
|
49
|
-
if self._initialized:
|
|
50
|
-
return
|
|
51
|
-
self._initialized = True
|
|
52
|
-
self._workers: Dict[str, Worker] = {}
|
|
53
|
-
self._factories: Dict[str, callable] = {}
|
|
54
|
-
self._worker_lock = threading.Lock()
|
|
55
|
-
|
|
56
|
-
def register(
|
|
57
|
-
self,
|
|
58
|
-
name: str,
|
|
59
|
-
worker: Optional[Worker] = None,
|
|
60
|
-
factory: Optional[callable] = None,
|
|
61
|
-
) -> None:
|
|
62
|
-
"""
|
|
63
|
-
Register a worker or worker factory.
|
|
64
|
-
|
|
65
|
-
Args:
|
|
66
|
-
name: Name to register under.
|
|
67
|
-
worker: Pre-created worker instance.
|
|
68
|
-
factory: Callable that creates worker on first use (lazy).
|
|
69
|
-
|
|
70
|
-
Only one of worker or factory should be provided.
|
|
71
|
-
"""
|
|
72
|
-
if worker is not None and factory is not None:
|
|
73
|
-
raise ValueError("Provide either worker or factory, not both")
|
|
74
|
-
if worker is None and factory is None:
|
|
75
|
-
raise ValueError("Must provide worker or factory")
|
|
76
|
-
|
|
77
|
-
with self._worker_lock:
|
|
78
|
-
# Shutdown existing worker if replacing
|
|
79
|
-
if name in self._workers:
|
|
80
|
-
try:
|
|
81
|
-
self._workers[name].shutdown()
|
|
82
|
-
except:
|
|
83
|
-
pass
|
|
84
|
-
|
|
85
|
-
if worker is not None:
|
|
86
|
-
self._workers[name] = worker
|
|
87
|
-
self._factories.pop(name, None)
|
|
88
|
-
else:
|
|
89
|
-
self._factories[name] = factory
|
|
90
|
-
self._workers.pop(name, None)
|
|
91
|
-
|
|
92
|
-
def get(self, name: str) -> Worker:
|
|
93
|
-
"""
|
|
94
|
-
Get a worker by name.
|
|
95
|
-
|
|
96
|
-
Args:
|
|
97
|
-
name: Registered worker name.
|
|
98
|
-
|
|
99
|
-
Returns:
|
|
100
|
-
The worker instance.
|
|
101
|
-
|
|
102
|
-
Raises:
|
|
103
|
-
KeyError: If no worker registered with that name.
|
|
104
|
-
"""
|
|
105
|
-
with self._worker_lock:
|
|
106
|
-
# Check for existing worker
|
|
107
|
-
if name in self._workers:
|
|
108
|
-
worker = self._workers[name]
|
|
109
|
-
if worker.is_alive():
|
|
110
|
-
return worker
|
|
111
|
-
# Worker died, try to recreate from factory
|
|
112
|
-
if name not in self._factories:
|
|
113
|
-
raise RuntimeError(f"Worker '{name}' died and no factory to recreate")
|
|
114
|
-
|
|
115
|
-
# Create from factory
|
|
116
|
-
if name in self._factories:
|
|
117
|
-
worker = self._factories[name]()
|
|
118
|
-
self._workers[name] = worker
|
|
119
|
-
return worker
|
|
120
|
-
|
|
121
|
-
raise KeyError(f"No worker registered with name: {name}")
|
|
122
|
-
|
|
123
|
-
def shutdown(self, name: Optional[str] = None) -> None:
|
|
124
|
-
"""
|
|
125
|
-
Shutdown workers.
|
|
126
|
-
|
|
127
|
-
Args:
|
|
128
|
-
name: If provided, shutdown only this worker.
|
|
129
|
-
If None, shutdown all workers.
|
|
130
|
-
"""
|
|
131
|
-
with self._worker_lock:
|
|
132
|
-
if name is not None:
|
|
133
|
-
if name in self._workers:
|
|
134
|
-
try:
|
|
135
|
-
self._workers[name].shutdown()
|
|
136
|
-
except:
|
|
137
|
-
pass
|
|
138
|
-
del self._workers[name]
|
|
139
|
-
else:
|
|
140
|
-
for worker in self._workers.values():
|
|
141
|
-
try:
|
|
142
|
-
worker.shutdown()
|
|
143
|
-
except:
|
|
144
|
-
pass
|
|
145
|
-
self._workers.clear()
|
|
146
|
-
|
|
147
|
-
def list_workers(self) -> Dict[str, str]:
|
|
148
|
-
"""
|
|
149
|
-
List all registered workers.
|
|
150
|
-
|
|
151
|
-
Returns:
|
|
152
|
-
Dict of name -> status string.
|
|
153
|
-
"""
|
|
154
|
-
with self._worker_lock:
|
|
155
|
-
result = {}
|
|
156
|
-
for name, worker in self._workers.items():
|
|
157
|
-
status = "alive" if worker.is_alive() else "dead"
|
|
158
|
-
result[name] = f"{type(worker).__name__} ({status})"
|
|
159
|
-
for name in self._factories:
|
|
160
|
-
if name not in result:
|
|
161
|
-
result[name] = f"factory (not started)"
|
|
162
|
-
return result
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
# Global pool instance
|
|
166
|
-
_pool = WorkerPool()
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
def get_worker(name: str) -> Worker:
|
|
170
|
-
"""
|
|
171
|
-
Get a worker by name from the global pool.
|
|
172
|
-
|
|
173
|
-
Args:
|
|
174
|
-
name: Registered worker name.
|
|
175
|
-
|
|
176
|
-
Returns:
|
|
177
|
-
Worker instance.
|
|
178
|
-
|
|
179
|
-
Example:
|
|
180
|
-
worker = get_worker("sam3d")
|
|
181
|
-
result = worker.call_module("my_module", "my_func", image=tensor)
|
|
182
|
-
"""
|
|
183
|
-
return _pool.get(name)
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
def register_worker(
|
|
187
|
-
name: str,
|
|
188
|
-
worker: Optional[Worker] = None,
|
|
189
|
-
factory: Optional[callable] = None,
|
|
190
|
-
) -> None:
|
|
191
|
-
"""
|
|
192
|
-
Register a worker in the global pool.
|
|
193
|
-
|
|
194
|
-
Args:
|
|
195
|
-
name: Name to register under.
|
|
196
|
-
worker: Pre-created worker instance.
|
|
197
|
-
factory: Callable that creates worker on demand.
|
|
198
|
-
|
|
199
|
-
Example:
|
|
200
|
-
# Register pre-created worker
|
|
201
|
-
register_worker("default", TorchMPWorker())
|
|
202
|
-
|
|
203
|
-
# Register factory for lazy creation
|
|
204
|
-
register_worker("sam3d", factory=lambda: PersistentVenvWorker(
|
|
205
|
-
python="/path/to/venv/bin/python",
|
|
206
|
-
))
|
|
207
|
-
"""
|
|
208
|
-
_pool.register(name, worker=worker, factory=factory)
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
def shutdown_workers(name: Optional[str] = None) -> None:
|
|
212
|
-
"""
|
|
213
|
-
Shutdown workers in the global pool.
|
|
214
|
-
|
|
215
|
-
Args:
|
|
216
|
-
name: If provided, shutdown only this worker.
|
|
217
|
-
If None, shutdown all workers.
|
|
218
|
-
"""
|
|
219
|
-
_pool.shutdown(name)
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
def list_workers() -> Dict[str, str]:
|
|
223
|
-
"""
|
|
224
|
-
List all registered workers.
|
|
225
|
-
|
|
226
|
-
Returns:
|
|
227
|
-
Dict of name -> status description.
|
|
228
|
-
"""
|
|
229
|
-
return _pool.list_workers()
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
# Register default worker (TorchMPWorker) on import
|
|
233
|
-
def _register_default():
|
|
234
|
-
from .torch_mp import TorchMPWorker
|
|
235
|
-
register_worker("default", factory=lambda: TorchMPWorker(name="default"))
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
_register_default()
|
|
239
|
-
|
|
240
|
-
# Cleanup on exit
|
|
241
|
-
atexit.register(lambda: shutdown_workers())
|