comfy-env 0.0.64__py3-none-any.whl → 0.0.66__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +70 -122
- comfy_env/cli.py +78 -7
- comfy_env/config/__init__.py +19 -0
- comfy_env/config/parser.py +151 -0
- comfy_env/config/types.py +64 -0
- comfy_env/install.py +83 -361
- comfy_env/isolation/__init__.py +9 -0
- comfy_env/isolation/wrap.py +351 -0
- comfy_env/nodes.py +2 -2
- comfy_env/pixi/__init__.py +48 -0
- comfy_env/pixi/core.py +356 -0
- comfy_env/{resolver.py → pixi/resolver.py} +1 -14
- comfy_env/prestartup.py +60 -0
- comfy_env/templates/comfy-env-instructions.txt +30 -87
- comfy_env/templates/comfy-env.toml +68 -136
- comfy_env/workers/__init__.py +21 -32
- comfy_env/workers/base.py +1 -1
- comfy_env/workers/{torch_mp.py → mp.py} +47 -14
- comfy_env/workers/{venv.py → subprocess.py} +405 -441
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/METADATA +2 -1
- comfy_env-0.0.66.dist-info/RECORD +34 -0
- comfy_env/decorator.py +0 -700
- comfy_env/env/__init__.py +0 -47
- comfy_env/env/config.py +0 -201
- comfy_env/env/config_file.py +0 -740
- comfy_env/env/manager.py +0 -636
- comfy_env/env/security.py +0 -267
- comfy_env/ipc/__init__.py +0 -55
- comfy_env/ipc/bridge.py +0 -476
- comfy_env/ipc/protocol.py +0 -265
- comfy_env/ipc/tensor.py +0 -371
- comfy_env/ipc/torch_bridge.py +0 -401
- comfy_env/ipc/transport.py +0 -318
- comfy_env/ipc/worker.py +0 -221
- comfy_env/isolation.py +0 -310
- comfy_env/pixi.py +0 -760
- comfy_env/stub_imports.py +0 -270
- comfy_env/stubs/__init__.py +0 -1
- comfy_env/stubs/comfy/__init__.py +0 -6
- comfy_env/stubs/comfy/model_management.py +0 -58
- comfy_env/stubs/comfy/utils.py +0 -29
- comfy_env/stubs/folder_paths.py +0 -71
- comfy_env/workers/pool.py +0 -241
- comfy_env-0.0.64.dist-info/RECORD +0 -48
- /comfy_env/{env/cuda_gpu_detection.py → pixi/cuda_detection.py} +0 -0
- /comfy_env/{env → pixi}/platform/__init__.py +0 -0
- /comfy_env/{env → pixi}/platform/base.py +0 -0
- /comfy_env/{env → pixi}/platform/darwin.py +0 -0
- /comfy_env/{env → pixi}/platform/linux.py +0 -0
- /comfy_env/{env → pixi}/platform/windows.py +0 -0
- /comfy_env/{registry.py → pixi/registry.py} +0 -0
- /comfy_env/{wheel_sources.yml → pixi/wheel_sources.yml} +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/WHEEL +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/licenses/LICENSE +0 -0
comfy_env/pixi/core.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pixi integration for comfy-env.
|
|
3
|
+
|
|
4
|
+
Pixi is a fast package manager that supports both conda and pip packages.
|
|
5
|
+
All dependencies go through pixi for unified management.
|
|
6
|
+
|
|
7
|
+
See: https://pixi.sh/
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import copy
|
|
11
|
+
import platform
|
|
12
|
+
import shutil
|
|
13
|
+
import stat
|
|
14
|
+
import subprocess
|
|
15
|
+
import sys
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Any, Callable, Dict, List, Optional
|
|
18
|
+
|
|
19
|
+
from ..config.types import ComfyEnvConfig
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# Pixi download URLs by platform
|
|
23
|
+
PIXI_URLS = {
|
|
24
|
+
("Linux", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-unknown-linux-musl",
|
|
25
|
+
("Linux", "aarch64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-unknown-linux-musl",
|
|
26
|
+
("Darwin", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-apple-darwin",
|
|
27
|
+
("Darwin", "arm64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-apple-darwin",
|
|
28
|
+
("Windows", "AMD64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-pc-windows-msvc.exe",
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
# CUDA wheels index
|
|
32
|
+
CUDA_WHEELS_INDEX = "https://pozzettiandrea.github.io/cuda-wheels/"
|
|
33
|
+
|
|
34
|
+
# CUDA version -> PyTorch version mapping
|
|
35
|
+
CUDA_TORCH_MAP = {
|
|
36
|
+
"12.8": "2.8",
|
|
37
|
+
"12.4": "2.4",
|
|
38
|
+
"12.1": "2.4",
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_current_platform() -> str:
|
|
43
|
+
"""Get the current platform string for pixi."""
|
|
44
|
+
if sys.platform == "linux":
|
|
45
|
+
return "linux-64"
|
|
46
|
+
elif sys.platform == "darwin":
|
|
47
|
+
return "osx-arm64" if platform.machine() == "arm64" else "osx-64"
|
|
48
|
+
elif sys.platform == "win32":
|
|
49
|
+
return "win-64"
|
|
50
|
+
return "linux-64"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_pixi_path() -> Optional[Path]:
|
|
54
|
+
"""Find the pixi executable."""
|
|
55
|
+
pixi_cmd = shutil.which("pixi")
|
|
56
|
+
if pixi_cmd:
|
|
57
|
+
return Path(pixi_cmd)
|
|
58
|
+
|
|
59
|
+
home = Path.home()
|
|
60
|
+
candidates = [
|
|
61
|
+
home / ".pixi" / "bin" / "pixi",
|
|
62
|
+
home / ".local" / "bin" / "pixi",
|
|
63
|
+
]
|
|
64
|
+
|
|
65
|
+
if sys.platform == "win32":
|
|
66
|
+
candidates = [p.with_suffix(".exe") for p in candidates]
|
|
67
|
+
|
|
68
|
+
for candidate in candidates:
|
|
69
|
+
if candidate.exists():
|
|
70
|
+
return candidate
|
|
71
|
+
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def ensure_pixi(
|
|
76
|
+
install_dir: Optional[Path] = None,
|
|
77
|
+
log: Callable[[str], None] = print,
|
|
78
|
+
) -> Path:
|
|
79
|
+
"""Ensure pixi is installed, downloading if necessary."""
|
|
80
|
+
existing = get_pixi_path()
|
|
81
|
+
if existing:
|
|
82
|
+
return existing
|
|
83
|
+
|
|
84
|
+
log("Pixi not found, downloading...")
|
|
85
|
+
|
|
86
|
+
if install_dir is None:
|
|
87
|
+
install_dir = Path.home() / ".local" / "bin"
|
|
88
|
+
install_dir.mkdir(parents=True, exist_ok=True)
|
|
89
|
+
|
|
90
|
+
system = platform.system()
|
|
91
|
+
machine = platform.machine()
|
|
92
|
+
|
|
93
|
+
if machine in ("x86_64", "AMD64"):
|
|
94
|
+
machine = "x86_64" if system != "Windows" else "AMD64"
|
|
95
|
+
elif machine in ("arm64", "aarch64"):
|
|
96
|
+
machine = "arm64" if system == "Darwin" else "aarch64"
|
|
97
|
+
|
|
98
|
+
url_key = (system, machine)
|
|
99
|
+
if url_key not in PIXI_URLS:
|
|
100
|
+
raise RuntimeError(f"No pixi download for {system}/{machine}")
|
|
101
|
+
|
|
102
|
+
url = PIXI_URLS[url_key]
|
|
103
|
+
pixi_path = install_dir / ("pixi.exe" if system == "Windows" else "pixi")
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
import urllib.request
|
|
107
|
+
urllib.request.urlretrieve(url, pixi_path)
|
|
108
|
+
except Exception as e:
|
|
109
|
+
result = subprocess.run(
|
|
110
|
+
["curl", "-fsSL", "-o", str(pixi_path), url],
|
|
111
|
+
capture_output=True, text=True,
|
|
112
|
+
)
|
|
113
|
+
if result.returncode != 0:
|
|
114
|
+
raise RuntimeError(f"Failed to download pixi: {result.stderr}") from e
|
|
115
|
+
|
|
116
|
+
if system != "Windows":
|
|
117
|
+
pixi_path.chmod(pixi_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
118
|
+
|
|
119
|
+
log(f"Installed pixi to: {pixi_path}")
|
|
120
|
+
return pixi_path
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def clean_pixi_artifacts(node_dir: Path, log: Callable[[str], None] = print) -> None:
|
|
124
|
+
"""Remove previous pixi installation artifacts."""
|
|
125
|
+
for path in [node_dir / "pixi.toml", node_dir / "pixi.lock"]:
|
|
126
|
+
if path.exists():
|
|
127
|
+
path.unlink()
|
|
128
|
+
pixi_dir = node_dir / ".pixi"
|
|
129
|
+
if pixi_dir.exists():
|
|
130
|
+
shutil.rmtree(pixi_dir)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def get_pixi_python(node_dir: Path) -> Optional[Path]:
|
|
134
|
+
"""Get path to Python in the pixi environment."""
|
|
135
|
+
env_dir = node_dir / ".pixi" / "envs" / "default"
|
|
136
|
+
if sys.platform == "win32":
|
|
137
|
+
python_path = env_dir / "python.exe"
|
|
138
|
+
else:
|
|
139
|
+
python_path = env_dir / "bin" / "python"
|
|
140
|
+
return python_path if python_path.exists() else None
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def pixi_run(
|
|
144
|
+
command: List[str],
|
|
145
|
+
node_dir: Path,
|
|
146
|
+
log: Callable[[str], None] = print,
|
|
147
|
+
) -> subprocess.CompletedProcess:
|
|
148
|
+
"""Run a command in the pixi environment."""
|
|
149
|
+
pixi_path = get_pixi_path()
|
|
150
|
+
if not pixi_path:
|
|
151
|
+
raise RuntimeError("Pixi not found")
|
|
152
|
+
return subprocess.run(
|
|
153
|
+
[str(pixi_path), "run"] + command,
|
|
154
|
+
cwd=node_dir,
|
|
155
|
+
capture_output=True,
|
|
156
|
+
text=True,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
|
161
|
+
"""Deep merge two dicts, override wins for conflicts."""
|
|
162
|
+
result = copy.deepcopy(base)
|
|
163
|
+
for key, value in override.items():
|
|
164
|
+
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
165
|
+
result[key] = _deep_merge(result[key], value)
|
|
166
|
+
else:
|
|
167
|
+
result[key] = copy.deepcopy(value)
|
|
168
|
+
return result
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def pixi_install(
|
|
172
|
+
cfg: ComfyEnvConfig,
|
|
173
|
+
node_dir: Path,
|
|
174
|
+
log: Callable[[str], None] = print,
|
|
175
|
+
create_env_link: bool = False,
|
|
176
|
+
) -> bool:
|
|
177
|
+
"""
|
|
178
|
+
Install all packages via pixi.
|
|
179
|
+
|
|
180
|
+
comfy-env.toml is a superset of pixi.toml. This function:
|
|
181
|
+
1. Starts with passthrough sections from comfy-env.toml
|
|
182
|
+
2. Adds workspace metadata (name, version, channels, platforms)
|
|
183
|
+
3. Adds system-requirements if needed (CUDA detection)
|
|
184
|
+
4. Adds CUDA find-links and PyTorch if [cuda] packages present
|
|
185
|
+
5. Writes combined data as pixi.toml
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
cfg: ComfyEnvConfig with packages to install.
|
|
189
|
+
node_dir: Directory to install in.
|
|
190
|
+
log: Logging callback.
|
|
191
|
+
create_env_link: If True, create _env_<name> symlink for isolation.
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
True if installation succeeded.
|
|
195
|
+
"""
|
|
196
|
+
try:
|
|
197
|
+
import tomli_w
|
|
198
|
+
except ImportError:
|
|
199
|
+
raise ImportError(
|
|
200
|
+
"tomli-w required for writing TOML. Install with: pip install tomli-w"
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
from .cuda_detection import get_recommended_cuda_version
|
|
204
|
+
|
|
205
|
+
# Start with passthrough data from comfy-env.toml
|
|
206
|
+
pixi_data = copy.deepcopy(cfg.pixi_passthrough)
|
|
207
|
+
|
|
208
|
+
# Detect CUDA version if CUDA packages requested
|
|
209
|
+
cuda_version = None
|
|
210
|
+
torch_version = None
|
|
211
|
+
if cfg.has_cuda and sys.platform != "darwin":
|
|
212
|
+
cuda_version = get_recommended_cuda_version()
|
|
213
|
+
if cuda_version:
|
|
214
|
+
cuda_mm = ".".join(cuda_version.split(".")[:2])
|
|
215
|
+
torch_version = CUDA_TORCH_MAP.get(cuda_mm, "2.8")
|
|
216
|
+
log(f"Detected CUDA {cuda_version} → PyTorch {torch_version}")
|
|
217
|
+
else:
|
|
218
|
+
log("Warning: CUDA packages requested but no GPU detected")
|
|
219
|
+
|
|
220
|
+
# Clean previous artifacts
|
|
221
|
+
clean_pixi_artifacts(node_dir, log)
|
|
222
|
+
|
|
223
|
+
# Ensure pixi is installed
|
|
224
|
+
pixi_path = ensure_pixi(log=log)
|
|
225
|
+
|
|
226
|
+
# Build workspace section
|
|
227
|
+
workspace = pixi_data.get("workspace", {})
|
|
228
|
+
workspace.setdefault("name", node_dir.name)
|
|
229
|
+
workspace.setdefault("version", "0.1.0")
|
|
230
|
+
workspace.setdefault("channels", ["conda-forge"])
|
|
231
|
+
workspace.setdefault("platforms", [get_current_platform()])
|
|
232
|
+
pixi_data["workspace"] = workspace
|
|
233
|
+
|
|
234
|
+
# Build system-requirements section
|
|
235
|
+
system_reqs = pixi_data.get("system-requirements", {})
|
|
236
|
+
if sys.platform == "linux":
|
|
237
|
+
system_reqs.setdefault("libc", {"family": "glibc", "version": "2.35"})
|
|
238
|
+
if cuda_version:
|
|
239
|
+
cuda_major = cuda_version.split(".")[0]
|
|
240
|
+
system_reqs["cuda"] = cuda_major
|
|
241
|
+
if system_reqs:
|
|
242
|
+
pixi_data["system-requirements"] = system_reqs
|
|
243
|
+
|
|
244
|
+
# Build dependencies section (conda packages + python + pip)
|
|
245
|
+
dependencies = pixi_data.get("dependencies", {})
|
|
246
|
+
if cfg.python:
|
|
247
|
+
py_version = cfg.python
|
|
248
|
+
log(f"Using specified Python {py_version}")
|
|
249
|
+
else:
|
|
250
|
+
py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
|
251
|
+
dependencies.setdefault("python", f"{py_version}.*")
|
|
252
|
+
dependencies.setdefault("pip", "*") # Always include pip
|
|
253
|
+
pixi_data["dependencies"] = dependencies
|
|
254
|
+
|
|
255
|
+
# Add pypi-options for CUDA wheels
|
|
256
|
+
if cfg.has_cuda and cuda_version:
|
|
257
|
+
pypi_options = pixi_data.get("pypi-options", {})
|
|
258
|
+
# Merge find-links (pixi expects [{url: "..."}, ...] format)
|
|
259
|
+
find_links = pypi_options.get("find-links", [])
|
|
260
|
+
existing_urls = {
|
|
261
|
+
entry.get("url") if isinstance(entry, dict) else entry
|
|
262
|
+
for entry in find_links
|
|
263
|
+
}
|
|
264
|
+
if CUDA_WHEELS_INDEX not in existing_urls:
|
|
265
|
+
find_links.append({"url": CUDA_WHEELS_INDEX})
|
|
266
|
+
# Normalize any plain strings to {url: ...} format
|
|
267
|
+
find_links = [
|
|
268
|
+
{"url": entry} if isinstance(entry, str) else entry
|
|
269
|
+
for entry in find_links
|
|
270
|
+
]
|
|
271
|
+
pypi_options["find-links"] = find_links
|
|
272
|
+
# Merge extra-index-urls
|
|
273
|
+
cuda_short = cuda_version.replace(".", "")[:3]
|
|
274
|
+
pytorch_index = f"https://download.pytorch.org/whl/cu{cuda_short}"
|
|
275
|
+
extra_urls = pypi_options.get("extra-index-urls", [])
|
|
276
|
+
if pytorch_index not in extra_urls:
|
|
277
|
+
extra_urls.append(pytorch_index)
|
|
278
|
+
pypi_options["extra-index-urls"] = extra_urls
|
|
279
|
+
pixi_data["pypi-options"] = pypi_options
|
|
280
|
+
|
|
281
|
+
# Build pypi-dependencies section (CUDA packages excluded - installed separately)
|
|
282
|
+
pypi_deps = pixi_data.get("pypi-dependencies", {})
|
|
283
|
+
|
|
284
|
+
# Add torch if we have CUDA packages
|
|
285
|
+
if cfg.has_cuda and torch_version:
|
|
286
|
+
torch_major = torch_version.split(".")[0]
|
|
287
|
+
torch_minor = int(torch_version.split(".")[1])
|
|
288
|
+
pypi_deps.setdefault("torch", f">={torch_version},<{torch_major}.{torch_minor + 1}")
|
|
289
|
+
|
|
290
|
+
# NOTE: CUDA packages are NOT added here - they're installed with --no-deps after pixi
|
|
291
|
+
|
|
292
|
+
if pypi_deps:
|
|
293
|
+
pixi_data["pypi-dependencies"] = pypi_deps
|
|
294
|
+
|
|
295
|
+
# Write pixi.toml
|
|
296
|
+
pixi_toml = node_dir / "pixi.toml"
|
|
297
|
+
with open(pixi_toml, "wb") as f:
|
|
298
|
+
tomli_w.dump(pixi_data, f)
|
|
299
|
+
log(f"Generated {pixi_toml}")
|
|
300
|
+
|
|
301
|
+
# Run pixi install
|
|
302
|
+
log("Running pixi install...")
|
|
303
|
+
result = subprocess.run(
|
|
304
|
+
[str(pixi_path), "install"],
|
|
305
|
+
cwd=node_dir,
|
|
306
|
+
capture_output=True,
|
|
307
|
+
text=True,
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
if result.returncode != 0:
|
|
311
|
+
log(f"pixi install failed:\n{result.stderr}")
|
|
312
|
+
raise RuntimeError(f"pixi install failed: {result.stderr}")
|
|
313
|
+
|
|
314
|
+
# Install CUDA packages with --no-deps (avoids PyPI version conflicts)
|
|
315
|
+
if cfg.cuda_packages and cuda_version:
|
|
316
|
+
log(f"Installing CUDA packages with --no-deps: {cfg.cuda_packages}")
|
|
317
|
+
python_path = get_pixi_python(node_dir)
|
|
318
|
+
if not python_path:
|
|
319
|
+
raise RuntimeError("Could not find Python in pixi environment")
|
|
320
|
+
|
|
321
|
+
pip_cmd = [
|
|
322
|
+
str(python_path), "-m", "pip", "install",
|
|
323
|
+
"--no-deps",
|
|
324
|
+
"--find-links", CUDA_WHEELS_INDEX,
|
|
325
|
+
] + cfg.cuda_packages
|
|
326
|
+
|
|
327
|
+
result = subprocess.run(pip_cmd, capture_output=True, text=True)
|
|
328
|
+
if result.returncode != 0:
|
|
329
|
+
log(f"CUDA package install failed:\n{result.stderr}")
|
|
330
|
+
raise RuntimeError(f"CUDA package install failed: {result.stderr}")
|
|
331
|
+
log("CUDA packages installed")
|
|
332
|
+
|
|
333
|
+
# Create symlink/junction to _env_<name> for discovery (only for isolated subdirs)
|
|
334
|
+
if create_env_link:
|
|
335
|
+
env_dir = node_dir / ".pixi" / "envs" / "default"
|
|
336
|
+
env_link = node_dir / f"_env_{node_dir.name}"
|
|
337
|
+
if env_dir.exists():
|
|
338
|
+
# Remove existing link/dir if present
|
|
339
|
+
if env_link.is_symlink() or env_link.exists():
|
|
340
|
+
if env_link.is_symlink():
|
|
341
|
+
env_link.unlink()
|
|
342
|
+
else:
|
|
343
|
+
shutil.rmtree(env_link)
|
|
344
|
+
# Create symlink (Linux/Mac) or junction (Windows)
|
|
345
|
+
if sys.platform == "win32":
|
|
346
|
+
# Use junction on Windows
|
|
347
|
+
subprocess.run(
|
|
348
|
+
["cmd", "/c", "mklink", "/J", str(env_link), str(env_dir)],
|
|
349
|
+
capture_output=True,
|
|
350
|
+
)
|
|
351
|
+
else:
|
|
352
|
+
env_link.symlink_to(env_dir)
|
|
353
|
+
log(f"Linked: {env_link.name} -> .pixi/envs/default")
|
|
354
|
+
|
|
355
|
+
log("Installation complete!")
|
|
356
|
+
return True
|
|
@@ -1,22 +1,9 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Runtime environment detection for wheel resolution.
|
|
3
|
-
|
|
4
|
-
This module provides RuntimeEnv for detecting the current system environment
|
|
5
|
-
(CUDA version, PyTorch version, Python version, platform).
|
|
6
|
-
|
|
7
|
-
Example:
|
|
8
|
-
from comfy_env.resolver import RuntimeEnv
|
|
9
|
-
|
|
10
|
-
env = RuntimeEnv.detect()
|
|
11
|
-
print(f"CUDA: {env.cuda_version}, PyTorch: {env.torch_version}")
|
|
12
|
-
"""
|
|
13
|
-
|
|
14
1
|
import platform
|
|
15
2
|
import sys
|
|
16
3
|
from dataclasses import dataclass
|
|
17
4
|
from typing import Dict, Optional, Tuple
|
|
18
5
|
|
|
19
|
-
from .
|
|
6
|
+
from .cuda_detection import detect_cuda_version, detect_gpu_info
|
|
20
7
|
|
|
21
8
|
|
|
22
9
|
@dataclass
|
comfy_env/prestartup.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Prestartup helpers for ComfyUI custom nodes.
|
|
3
|
+
|
|
4
|
+
Call setup_env() in your prestartup_script.py before any native imports.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import glob
|
|
8
|
+
import os
|
|
9
|
+
import sys
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def setup_env(node_dir: Optional[str] = None) -> None:
|
|
15
|
+
"""
|
|
16
|
+
Set up environment for pixi conda libraries.
|
|
17
|
+
|
|
18
|
+
Call this in prestartup_script.py before any native library imports.
|
|
19
|
+
Sets LD_LIBRARY_PATH (Linux/Mac) or PATH (Windows) for conda libs,
|
|
20
|
+
and adds pixi site-packages to sys.path.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
node_dir: Path to the custom node directory. Auto-detected if not provided.
|
|
24
|
+
|
|
25
|
+
Example:
|
|
26
|
+
# In prestartup_script.py:
|
|
27
|
+
from comfy_env import setup_env
|
|
28
|
+
setup_env()
|
|
29
|
+
"""
|
|
30
|
+
# Auto-detect node_dir from caller
|
|
31
|
+
if node_dir is None:
|
|
32
|
+
import inspect
|
|
33
|
+
frame = inspect.stack()[1]
|
|
34
|
+
node_dir = str(Path(frame.filename).parent)
|
|
35
|
+
|
|
36
|
+
pixi_env = os.path.join(node_dir, ".pixi", "envs", "default")
|
|
37
|
+
|
|
38
|
+
if not os.path.exists(pixi_env):
|
|
39
|
+
return # No pixi environment
|
|
40
|
+
|
|
41
|
+
if sys.platform == "win32":
|
|
42
|
+
# Windows: add to PATH for DLL loading
|
|
43
|
+
lib_dir = os.path.join(pixi_env, "Library", "bin")
|
|
44
|
+
if os.path.exists(lib_dir):
|
|
45
|
+
os.environ["PATH"] = lib_dir + ";" + os.environ.get("PATH", "")
|
|
46
|
+
else:
|
|
47
|
+
# Linux/Mac: LD_LIBRARY_PATH
|
|
48
|
+
lib_dir = os.path.join(pixi_env, "lib")
|
|
49
|
+
if os.path.exists(lib_dir):
|
|
50
|
+
os.environ["LD_LIBRARY_PATH"] = lib_dir + ":" + os.environ.get("LD_LIBRARY_PATH", "")
|
|
51
|
+
|
|
52
|
+
# Add site-packages to sys.path for pixi-installed Python packages
|
|
53
|
+
if sys.platform == "win32":
|
|
54
|
+
site_packages = os.path.join(pixi_env, "Lib", "site-packages")
|
|
55
|
+
else:
|
|
56
|
+
matches = glob.glob(os.path.join(pixi_env, "lib", "python*", "site-packages"))
|
|
57
|
+
site_packages = matches[0] if matches else None
|
|
58
|
+
|
|
59
|
+
if site_packages and os.path.exists(site_packages) and site_packages not in sys.path:
|
|
60
|
+
sys.path.insert(0, site_packages)
|
|
@@ -1,103 +1,46 @@
|
|
|
1
1
|
comfy-env Setup Instructions
|
|
2
2
|
============================
|
|
3
3
|
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
All dependencies go through pixi (conda + pip unified).
|
|
5
|
+
GPU is auto-detected to select the right CUDA version.
|
|
6
6
|
|
|
7
7
|
QUICK START
|
|
8
8
|
-----------
|
|
9
|
-
1.
|
|
10
|
-
|
|
9
|
+
1. pip install comfy-env
|
|
10
|
+
2. comfy-env init
|
|
11
|
+
3. Edit comfy-env.toml
|
|
12
|
+
4. comfy-env install
|
|
11
13
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
+
CONFIG FORMAT
|
|
15
|
+
-------------
|
|
16
|
+
[cuda]
|
|
17
|
+
packages = ["nvdiffrast", "pytorch3d"]
|
|
14
18
|
|
|
15
|
-
|
|
19
|
+
[conda]
|
|
20
|
+
channels = ["conda-forge"]
|
|
21
|
+
packages = ["mesalib", "ffmpeg"]
|
|
16
22
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
install()
|
|
23
|
+
[packages]
|
|
24
|
+
requirements = ["numpy", "trimesh"]
|
|
20
25
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
comfy-env install # Actually install
|
|
24
|
-
comfy-env doctor # Verify all packages work
|
|
26
|
+
[node_reqs]
|
|
27
|
+
SomeNode = "owner/repo"
|
|
25
28
|
|
|
29
|
+
USAGE IN __init__.py
|
|
30
|
+
--------------------
|
|
31
|
+
from comfy_env import install
|
|
32
|
+
install() # That's it!
|
|
26
33
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
- Call install() in your __init__.py
|
|
33
|
-
|
|
34
|
-
Case 2: Need process isolation (conflicting dependencies, conda packages)
|
|
35
|
-
- Define an isolated environment with `isolated = true`
|
|
36
|
-
- Use enable_isolation(NODE_CLASS_MAPPINGS) in your __init__.py
|
|
37
|
-
- See PROCESS ISOLATION section below
|
|
38
|
-
|
|
39
|
-
Case 3: Need system packages (apt)
|
|
40
|
-
- Add to [system] linux = ["package1", "package2"]
|
|
41
|
-
|
|
34
|
+
For isolated nodes:
|
|
35
|
+
from comfy_env import wrap_isolated_nodes
|
|
36
|
+
NODE_CLASS_MAPPINGS.update(
|
|
37
|
+
wrap_isolated_nodes(isolated_nodes, Path(__file__).parent / "nodes/isolated")
|
|
38
|
+
)
|
|
42
39
|
|
|
43
40
|
CLI COMMANDS
|
|
44
41
|
------------
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
comfy-env info Show detected environment (Python, CUDA, PyTorch)
|
|
49
|
-
comfy-env resolve PKG Show resolved wheel URL for a package
|
|
50
|
-
comfy-env doctor Verify installation
|
|
51
|
-
comfy-env list-packages Show all packages in built-in registry
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
PROCESS ISOLATION
|
|
55
|
-
-----------------
|
|
56
|
-
For nodes that need isolated dependencies:
|
|
57
|
-
|
|
58
|
-
RECOMMENDED: Pack-wide isolation (all nodes in same isolated env)
|
|
59
|
-
|
|
60
|
-
from comfy_env import setup_isolated_imports, enable_isolation
|
|
61
|
-
|
|
62
|
-
# Setup import stubs BEFORE importing nodes
|
|
63
|
-
setup_isolated_imports(__file__)
|
|
64
|
-
|
|
65
|
-
from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
|
66
|
-
|
|
67
|
-
# Enable isolation for all nodes
|
|
68
|
-
enable_isolation(NODE_CLASS_MAPPINGS)
|
|
69
|
-
|
|
70
|
-
Requires `isolated = true` in comfy-env.toml:
|
|
71
|
-
|
|
72
|
-
[mypack]
|
|
73
|
-
python = "3.11"
|
|
74
|
-
isolated = true
|
|
75
|
-
|
|
76
|
-
[mypack.packages]
|
|
77
|
-
requirements = ["trimesh", "scipy"]
|
|
78
|
-
|
|
79
|
-
ALTERNATIVE: Per-node isolation (for multiple isolated envs)
|
|
80
|
-
|
|
81
|
-
from comfy_env import isolated
|
|
82
|
-
|
|
83
|
-
@isolated(env="myenv")
|
|
84
|
-
class MyNode:
|
|
85
|
-
FUNCTION = "process"
|
|
86
|
-
def process(self, image):
|
|
87
|
-
import conflicting_lib
|
|
88
|
-
return (result,)
|
|
89
|
-
|
|
90
|
-
How it works:
|
|
91
|
-
- Runs FUNCTION methods in a separate Python process
|
|
92
|
-
- Tensors/numpy arrays passed by value (efficient)
|
|
93
|
-
- Complex objects (meshes, etc.) passed by reference
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
TROUBLESHOOTING
|
|
97
|
-
---------------
|
|
98
|
-
- "Package X not found in registry": Add custom wheel URL to [wheel_sources]
|
|
99
|
-
- "CUDA not detected": Ensure PyTorch with CUDA is installed in ComfyUI
|
|
100
|
-
- "Worker failed to connect": Check the isolated env was set up correctly
|
|
101
|
-
- Import errors: Run `comfy-env doctor` to verify packages
|
|
42
|
+
comfy-env init Create comfy-env.toml template
|
|
43
|
+
comfy-env install Install all dependencies
|
|
44
|
+
comfy-env info Show detected GPU and CUDA version
|
|
102
45
|
|
|
103
|
-
For more
|
|
46
|
+
For more: https://github.com/PozzettiAndrea/comfy-env
|