comfy-env 0.0.64__py3-none-any.whl → 0.0.66__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +70 -122
- comfy_env/cli.py +78 -7
- comfy_env/config/__init__.py +19 -0
- comfy_env/config/parser.py +151 -0
- comfy_env/config/types.py +64 -0
- comfy_env/install.py +83 -361
- comfy_env/isolation/__init__.py +9 -0
- comfy_env/isolation/wrap.py +351 -0
- comfy_env/nodes.py +2 -2
- comfy_env/pixi/__init__.py +48 -0
- comfy_env/pixi/core.py +356 -0
- comfy_env/{resolver.py → pixi/resolver.py} +1 -14
- comfy_env/prestartup.py +60 -0
- comfy_env/templates/comfy-env-instructions.txt +30 -87
- comfy_env/templates/comfy-env.toml +68 -136
- comfy_env/workers/__init__.py +21 -32
- comfy_env/workers/base.py +1 -1
- comfy_env/workers/{torch_mp.py → mp.py} +47 -14
- comfy_env/workers/{venv.py → subprocess.py} +405 -441
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/METADATA +2 -1
- comfy_env-0.0.66.dist-info/RECORD +34 -0
- comfy_env/decorator.py +0 -700
- comfy_env/env/__init__.py +0 -47
- comfy_env/env/config.py +0 -201
- comfy_env/env/config_file.py +0 -740
- comfy_env/env/manager.py +0 -636
- comfy_env/env/security.py +0 -267
- comfy_env/ipc/__init__.py +0 -55
- comfy_env/ipc/bridge.py +0 -476
- comfy_env/ipc/protocol.py +0 -265
- comfy_env/ipc/tensor.py +0 -371
- comfy_env/ipc/torch_bridge.py +0 -401
- comfy_env/ipc/transport.py +0 -318
- comfy_env/ipc/worker.py +0 -221
- comfy_env/isolation.py +0 -310
- comfy_env/pixi.py +0 -760
- comfy_env/stub_imports.py +0 -270
- comfy_env/stubs/__init__.py +0 -1
- comfy_env/stubs/comfy/__init__.py +0 -6
- comfy_env/stubs/comfy/model_management.py +0 -58
- comfy_env/stubs/comfy/utils.py +0 -29
- comfy_env/stubs/folder_paths.py +0 -71
- comfy_env/workers/pool.py +0 -241
- comfy_env-0.0.64.dist-info/RECORD +0 -48
- /comfy_env/{env/cuda_gpu_detection.py → pixi/cuda_detection.py} +0 -0
- /comfy_env/{env → pixi}/platform/__init__.py +0 -0
- /comfy_env/{env → pixi}/platform/base.py +0 -0
- /comfy_env/{env → pixi}/platform/darwin.py +0 -0
- /comfy_env/{env → pixi}/platform/linux.py +0 -0
- /comfy_env/{env → pixi}/platform/windows.py +0 -0
- /comfy_env/{registry.py → pixi/registry.py} +0 -0
- /comfy_env/{wheel_sources.yml → pixi/wheel_sources.yml} +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/WHEEL +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.0.64.dist-info → comfy_env-0.0.66.dist-info}/licenses/LICENSE +0 -0
comfy_env/pixi.py
DELETED
|
@@ -1,760 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Pixi integration for comfy-env.
|
|
3
|
-
|
|
4
|
-
Pixi is a fast package manager that supports both conda and pip packages.
|
|
5
|
-
When an environment has conda packages defined, we use pixi as the backend
|
|
6
|
-
instead of uv.
|
|
7
|
-
|
|
8
|
-
See: https://pixi.sh/
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
import os
|
|
12
|
-
import platform
|
|
13
|
-
import re
|
|
14
|
-
import shutil
|
|
15
|
-
import stat
|
|
16
|
-
import subprocess
|
|
17
|
-
import sys
|
|
18
|
-
from pathlib import Path
|
|
19
|
-
from typing import Callable, List, Optional, Tuple
|
|
20
|
-
|
|
21
|
-
from .env.config import IsolatedEnv, CondaConfig
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
# Pixi download URLs by platform
|
|
25
|
-
PIXI_URLS = {
|
|
26
|
-
("Linux", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-unknown-linux-musl",
|
|
27
|
-
("Linux", "aarch64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-unknown-linux-musl",
|
|
28
|
-
("Darwin", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-apple-darwin",
|
|
29
|
-
("Darwin", "arm64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-apple-darwin",
|
|
30
|
-
("Windows", "AMD64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-pc-windows-msvc.exe",
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def get_pixi_path() -> Optional[Path]:
|
|
35
|
-
"""
|
|
36
|
-
Find the pixi executable.
|
|
37
|
-
|
|
38
|
-
Checks:
|
|
39
|
-
1. System PATH
|
|
40
|
-
2. ~/.pixi/bin/pixi
|
|
41
|
-
3. ~/.local/bin/pixi
|
|
42
|
-
|
|
43
|
-
Returns:
|
|
44
|
-
Path to pixi executable, or None if not found.
|
|
45
|
-
"""
|
|
46
|
-
# Check system PATH
|
|
47
|
-
pixi_cmd = shutil.which("pixi")
|
|
48
|
-
if pixi_cmd:
|
|
49
|
-
return Path(pixi_cmd)
|
|
50
|
-
|
|
51
|
-
# Check common install locations
|
|
52
|
-
home = Path.home()
|
|
53
|
-
candidates = [
|
|
54
|
-
home / ".pixi" / "bin" / "pixi",
|
|
55
|
-
home / ".local" / "bin" / "pixi",
|
|
56
|
-
]
|
|
57
|
-
|
|
58
|
-
# Add .exe on Windows
|
|
59
|
-
if sys.platform == "win32":
|
|
60
|
-
candidates = [p.with_suffix(".exe") for p in candidates]
|
|
61
|
-
|
|
62
|
-
for candidate in candidates:
|
|
63
|
-
if candidate.exists():
|
|
64
|
-
return candidate
|
|
65
|
-
|
|
66
|
-
return None
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
def ensure_pixi(
|
|
70
|
-
install_dir: Optional[Path] = None,
|
|
71
|
-
log: Callable[[str], None] = print,
|
|
72
|
-
) -> Path:
|
|
73
|
-
"""
|
|
74
|
-
Ensure pixi is installed, downloading if necessary.
|
|
75
|
-
|
|
76
|
-
Args:
|
|
77
|
-
install_dir: Directory to install pixi to. Defaults to ~/.local/bin/
|
|
78
|
-
log: Logging callback.
|
|
79
|
-
|
|
80
|
-
Returns:
|
|
81
|
-
Path to pixi executable.
|
|
82
|
-
|
|
83
|
-
Raises:
|
|
84
|
-
RuntimeError: If pixi cannot be installed.
|
|
85
|
-
"""
|
|
86
|
-
# Check if already installed
|
|
87
|
-
existing = get_pixi_path()
|
|
88
|
-
if existing:
|
|
89
|
-
log(f"Found pixi at: {existing}")
|
|
90
|
-
return existing
|
|
91
|
-
|
|
92
|
-
log("Pixi not found, downloading...")
|
|
93
|
-
|
|
94
|
-
# Determine install location
|
|
95
|
-
if install_dir is None:
|
|
96
|
-
install_dir = Path.home() / ".local" / "bin"
|
|
97
|
-
install_dir.mkdir(parents=True, exist_ok=True)
|
|
98
|
-
|
|
99
|
-
# Determine download URL
|
|
100
|
-
system = platform.system()
|
|
101
|
-
machine = platform.machine()
|
|
102
|
-
|
|
103
|
-
# Normalize machine name
|
|
104
|
-
if machine in ("x86_64", "AMD64"):
|
|
105
|
-
machine = "x86_64" if system != "Windows" else "AMD64"
|
|
106
|
-
elif machine in ("arm64", "aarch64"):
|
|
107
|
-
machine = "arm64" if system == "Darwin" else "aarch64"
|
|
108
|
-
|
|
109
|
-
url_key = (system, machine)
|
|
110
|
-
if url_key not in PIXI_URLS:
|
|
111
|
-
raise RuntimeError(
|
|
112
|
-
f"No pixi download available for {system}/{machine}. "
|
|
113
|
-
f"Available: {list(PIXI_URLS.keys())}"
|
|
114
|
-
)
|
|
115
|
-
|
|
116
|
-
url = PIXI_URLS[url_key]
|
|
117
|
-
pixi_path = install_dir / ("pixi.exe" if system == "Windows" else "pixi")
|
|
118
|
-
|
|
119
|
-
log(f"Downloading pixi from: {url}")
|
|
120
|
-
|
|
121
|
-
# Download using curl or urllib
|
|
122
|
-
try:
|
|
123
|
-
import urllib.request
|
|
124
|
-
urllib.request.urlretrieve(url, pixi_path)
|
|
125
|
-
except Exception as e:
|
|
126
|
-
# Try curl as fallback
|
|
127
|
-
result = subprocess.run(
|
|
128
|
-
["curl", "-fsSL", "-o", str(pixi_path), url],
|
|
129
|
-
capture_output=True,
|
|
130
|
-
text=True,
|
|
131
|
-
)
|
|
132
|
-
if result.returncode != 0:
|
|
133
|
-
raise RuntimeError(f"Failed to download pixi: {result.stderr}") from e
|
|
134
|
-
|
|
135
|
-
# Make executable on Unix
|
|
136
|
-
if system != "Windows":
|
|
137
|
-
pixi_path.chmod(pixi_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
138
|
-
|
|
139
|
-
# Verify installation
|
|
140
|
-
result = subprocess.run([str(pixi_path), "--version"], capture_output=True, text=True)
|
|
141
|
-
if result.returncode != 0:
|
|
142
|
-
raise RuntimeError(f"Pixi installation failed: {result.stderr}")
|
|
143
|
-
|
|
144
|
-
log(f"Installed pixi {result.stdout.strip()} to: {pixi_path}")
|
|
145
|
-
return pixi_path
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
def _parse_pypi_requirement(dep: str) -> Tuple[str, Optional[str], List[str]]:
|
|
149
|
-
"""
|
|
150
|
-
Parse a pip requirement into (name, version_spec, extras).
|
|
151
|
-
|
|
152
|
-
Examples:
|
|
153
|
-
"trimesh[easy]>=4.0.0" -> ("trimesh", ">=4.0.0", ["easy"])
|
|
154
|
-
"numpy>=1.21.0" -> ("numpy", ">=1.21.0", [])
|
|
155
|
-
"torch" -> ("torch", None, [])
|
|
156
|
-
"pkg[a,b]" -> ("pkg", None, ["a", "b"])
|
|
157
|
-
|
|
158
|
-
Returns:
|
|
159
|
-
Tuple of (package_name, version_spec_or_None, list_of_extras)
|
|
160
|
-
"""
|
|
161
|
-
dep = dep.strip()
|
|
162
|
-
|
|
163
|
-
# Match: name[extras]version_spec or name version_spec
|
|
164
|
-
# Package names can contain letters, numbers, underscores, hyphens, and dots
|
|
165
|
-
match = re.match(r'^([a-zA-Z0-9._-]+)(?:\[([^\]]+)\])?(.*)$', dep)
|
|
166
|
-
if not match:
|
|
167
|
-
return dep, None, []
|
|
168
|
-
|
|
169
|
-
name = match.group(1)
|
|
170
|
-
extras_str = match.group(2)
|
|
171
|
-
version_spec = match.group(3).strip() if match.group(3) else None
|
|
172
|
-
|
|
173
|
-
extras = []
|
|
174
|
-
if extras_str:
|
|
175
|
-
extras = [e.strip() for e in extras_str.split(',')]
|
|
176
|
-
|
|
177
|
-
# Return None instead of empty string for version_spec
|
|
178
|
-
if version_spec == "":
|
|
179
|
-
version_spec = None
|
|
180
|
-
|
|
181
|
-
return name, version_spec, extras
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
def _build_cuda_vars(env_config: IsolatedEnv) -> dict:
|
|
185
|
-
"""
|
|
186
|
-
Build variable dict for CUDA wheel URL resolution.
|
|
187
|
-
|
|
188
|
-
Returns a dict with CUDA, PyTorch, Python, and platform variables
|
|
189
|
-
for template substitution.
|
|
190
|
-
"""
|
|
191
|
-
# Get CUDA/PyTorch versions from env_config (resolved from "auto" based on GPU arch)
|
|
192
|
-
# Pascal or below: CUDA 12.4, PyTorch 2.4.0
|
|
193
|
-
# Turing+: CUDA 12.8, PyTorch 2.8.0
|
|
194
|
-
cuda_version = env_config.cuda or "12.8"
|
|
195
|
-
torch_version = env_config.pytorch_version or "2.8.0"
|
|
196
|
-
|
|
197
|
-
# Parse CUDA version
|
|
198
|
-
cuda_parts = cuda_version.split(".")
|
|
199
|
-
cuda_short = "".join(cuda_parts[:2]) # "12.8" -> "128"
|
|
200
|
-
|
|
201
|
-
# Parse PyTorch version
|
|
202
|
-
torch_parts = torch_version.split(".")
|
|
203
|
-
torch_short = "".join(torch_parts) # "2.8.0" -> "280"
|
|
204
|
-
torch_mm = "".join(torch_parts[:2]) # "2.8.0" -> "28"
|
|
205
|
-
torch_dotted_mm = ".".join(torch_parts[:2]) # "2.8.0" -> "2.8"
|
|
206
|
-
|
|
207
|
-
vars_dict = {
|
|
208
|
-
"cuda_version": cuda_version,
|
|
209
|
-
"cuda_short": cuda_short,
|
|
210
|
-
"cuda_short2": cuda_short,
|
|
211
|
-
"cuda_major": cuda_parts[0],
|
|
212
|
-
"torch_version": torch_version,
|
|
213
|
-
"torch_short": torch_short,
|
|
214
|
-
"torch_mm": torch_mm,
|
|
215
|
-
"torch_dotted_mm": torch_dotted_mm,
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
# Platform detection
|
|
219
|
-
if sys.platform == "linux":
|
|
220
|
-
vars_dict["platform"] = "linux_x86_64"
|
|
221
|
-
elif sys.platform == "darwin":
|
|
222
|
-
vars_dict["platform"] = "macosx_arm64" if platform.machine() == "arm64" else "macosx_x86_64"
|
|
223
|
-
elif sys.platform == "win32":
|
|
224
|
-
vars_dict["platform"] = "win_amd64"
|
|
225
|
-
|
|
226
|
-
# Python version from env config
|
|
227
|
-
if env_config.python:
|
|
228
|
-
py_parts = env_config.python.split(".")
|
|
229
|
-
py_major = py_parts[0]
|
|
230
|
-
py_minor = py_parts[1] if len(py_parts) > 1 else "0"
|
|
231
|
-
vars_dict["py_version"] = env_config.python
|
|
232
|
-
vars_dict["py_short"] = f"{py_major}{py_minor}"
|
|
233
|
-
vars_dict["py_minor"] = py_minor
|
|
234
|
-
vars_dict["py_tag"] = f"cp{py_major}{py_minor}"
|
|
235
|
-
|
|
236
|
-
return vars_dict
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
def _resolve_cuda_wheel_url(
|
|
240
|
-
req: str,
|
|
241
|
-
vars_dict: dict,
|
|
242
|
-
log: Callable[[str], None] = print,
|
|
243
|
-
) -> Optional[str]:
|
|
244
|
-
"""
|
|
245
|
-
Resolve a CUDA package requirement to a wheel URL.
|
|
246
|
-
|
|
247
|
-
Args:
|
|
248
|
-
req: Package requirement (e.g., "cumesh" or "cumesh==0.0.1")
|
|
249
|
-
vars_dict: Variable dict for URL template substitution
|
|
250
|
-
log: Logging callback
|
|
251
|
-
|
|
252
|
-
Returns:
|
|
253
|
-
Resolved wheel URL, or None if package not in registry.
|
|
254
|
-
"""
|
|
255
|
-
from .registry import PACKAGE_REGISTRY
|
|
256
|
-
|
|
257
|
-
# Parse requirement (e.g., "cumesh" or "cumesh==0.0.1")
|
|
258
|
-
if "==" in req:
|
|
259
|
-
pkg_name, version = req.split("==", 1)
|
|
260
|
-
else:
|
|
261
|
-
pkg_name = req
|
|
262
|
-
version = None
|
|
263
|
-
|
|
264
|
-
pkg_lower = pkg_name.lower()
|
|
265
|
-
if pkg_lower not in PACKAGE_REGISTRY:
|
|
266
|
-
log(f" Warning: CUDA package {pkg_name} not in registry")
|
|
267
|
-
return None
|
|
268
|
-
|
|
269
|
-
config = PACKAGE_REGISTRY[pkg_lower]
|
|
270
|
-
template = config.get("wheel_template")
|
|
271
|
-
if not template:
|
|
272
|
-
log(f" Warning: No wheel template for {pkg_name}")
|
|
273
|
-
return None
|
|
274
|
-
|
|
275
|
-
# Use version from requirement or default
|
|
276
|
-
v = version or config.get("default_version")
|
|
277
|
-
if v:
|
|
278
|
-
vars_dict = vars_dict.copy() # Don't mutate original
|
|
279
|
-
vars_dict["version"] = v
|
|
280
|
-
|
|
281
|
-
# Resolve URL template
|
|
282
|
-
url = template
|
|
283
|
-
for key, value in vars_dict.items():
|
|
284
|
-
if value:
|
|
285
|
-
url = url.replace(f"{{{key}}}", str(value))
|
|
286
|
-
|
|
287
|
-
return url
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
def create_pixi_toml(
|
|
291
|
-
env_config: IsolatedEnv,
|
|
292
|
-
node_dir: Path,
|
|
293
|
-
log: Callable[[str], None] = print,
|
|
294
|
-
) -> Path:
|
|
295
|
-
"""
|
|
296
|
-
Generate a pixi.toml file from the environment configuration.
|
|
297
|
-
|
|
298
|
-
The generated pixi.toml includes:
|
|
299
|
-
- Project metadata
|
|
300
|
-
- Conda channels
|
|
301
|
-
- Conda dependencies
|
|
302
|
-
- PyPI dependencies (from requirements)
|
|
303
|
-
|
|
304
|
-
Note: CUDA packages (no_deps_requirements) are NOT included in pixi.toml.
|
|
305
|
-
They are installed separately with pip --no-deps after pixi install
|
|
306
|
-
to avoid transitive dependency conflicts.
|
|
307
|
-
|
|
308
|
-
Args:
|
|
309
|
-
env_config: The isolated environment configuration.
|
|
310
|
-
node_dir: Directory to write pixi.toml to.
|
|
311
|
-
log: Logging callback.
|
|
312
|
-
|
|
313
|
-
Returns:
|
|
314
|
-
Path to the generated pixi.toml file.
|
|
315
|
-
"""
|
|
316
|
-
# Conda is optional - use defaults if not present
|
|
317
|
-
if env_config.conda:
|
|
318
|
-
conda = env_config.conda
|
|
319
|
-
else:
|
|
320
|
-
from comfy_env.env.config import CondaConfig
|
|
321
|
-
conda = CondaConfig(channels=["conda-forge"], packages=[])
|
|
322
|
-
pixi_toml_path = node_dir / "pixi.toml"
|
|
323
|
-
|
|
324
|
-
# Build pixi.toml content
|
|
325
|
-
lines = []
|
|
326
|
-
|
|
327
|
-
# Project section
|
|
328
|
-
lines.append("[workspace]")
|
|
329
|
-
lines.append(f'name = "{env_config.name}"')
|
|
330
|
-
lines.append('version = "0.1.0"')
|
|
331
|
-
|
|
332
|
-
# Channels - use conda-forge only (pytorch channel is deprecated)
|
|
333
|
-
# PyTorch is installed via PyPI with extra-index-urls for proper CUDA version support
|
|
334
|
-
base_channels = conda.channels or ["conda-forge"]
|
|
335
|
-
channels_str = ", ".join(f'"{ch}"' for ch in base_channels)
|
|
336
|
-
lines.append(f"channels = [{channels_str}]")
|
|
337
|
-
|
|
338
|
-
# Platforms
|
|
339
|
-
if sys.platform == "linux":
|
|
340
|
-
lines.append('platforms = ["linux-64"]')
|
|
341
|
-
elif sys.platform == "darwin":
|
|
342
|
-
if platform.machine() == "arm64":
|
|
343
|
-
lines.append('platforms = ["osx-arm64"]')
|
|
344
|
-
else:
|
|
345
|
-
lines.append('platforms = ["osx-64"]')
|
|
346
|
-
elif sys.platform == "win32":
|
|
347
|
-
lines.append('platforms = ["win-64"]')
|
|
348
|
-
|
|
349
|
-
# System requirements - specify glibc version for proper wheel resolution
|
|
350
|
-
# Ubuntu 22.04+ has glibc 2.35, enabling manylinux_2_35 wheels
|
|
351
|
-
if sys.platform == "linux":
|
|
352
|
-
lines.append("")
|
|
353
|
-
lines.append("[system-requirements]")
|
|
354
|
-
lines.append('libc = { family = "glibc", version = "2.35" }')
|
|
355
|
-
|
|
356
|
-
lines.append("")
|
|
357
|
-
|
|
358
|
-
# Dependencies section (conda packages)
|
|
359
|
-
lines.append("[dependencies]")
|
|
360
|
-
lines.append(f'python = "{env_config.python}.*"')
|
|
361
|
-
lines.append('pip = "*"') # Required for installing CUDA packages with --no-deps
|
|
362
|
-
|
|
363
|
-
# On Windows, use MKL BLAS to avoid OpenBLAS crashes (numpy blas_fpe_check issue)
|
|
364
|
-
if sys.platform == "win32":
|
|
365
|
-
lines.append('libblas = { version = "*", build = "*mkl" }')
|
|
366
|
-
|
|
367
|
-
for pkg in conda.packages:
|
|
368
|
-
# Parse package spec (name=version or name>=version or name<version or just name)
|
|
369
|
-
if ">=" in pkg:
|
|
370
|
-
name, version = pkg.split(">=", 1)
|
|
371
|
-
lines.append(f'{name} = ">={version}"')
|
|
372
|
-
elif "<=" in pkg:
|
|
373
|
-
name, version = pkg.split("<=", 1)
|
|
374
|
-
lines.append(f'{name} = "<={version}"')
|
|
375
|
-
elif "==" in pkg:
|
|
376
|
-
name, version = pkg.split("==", 1)
|
|
377
|
-
lines.append(f'{name} = "=={version}"')
|
|
378
|
-
elif ">" in pkg:
|
|
379
|
-
name, version = pkg.split(">", 1)
|
|
380
|
-
lines.append(f'{name} = ">{version}"')
|
|
381
|
-
elif "<" in pkg:
|
|
382
|
-
name, version = pkg.split("<", 1)
|
|
383
|
-
lines.append(f'{name} = "<{version}"')
|
|
384
|
-
elif "=" in pkg and not pkg.startswith("="):
|
|
385
|
-
# Single = means exact version in conda
|
|
386
|
-
name, version = pkg.split("=", 1)
|
|
387
|
-
lines.append(f'{name} = "=={version}"')
|
|
388
|
-
else:
|
|
389
|
-
# No version, use any
|
|
390
|
-
lines.append(f'{pkg} = "*"')
|
|
391
|
-
|
|
392
|
-
lines.append("")
|
|
393
|
-
|
|
394
|
-
# PyPI dependencies section
|
|
395
|
-
pypi_deps = []
|
|
396
|
-
special_deps = {} # For dependencies that need special syntax (path, etc.)
|
|
397
|
-
|
|
398
|
-
# Always include comfy-env for worker support
|
|
399
|
-
# Priority: 1. COMFY_LOCAL_WHEELS env var, 2. ~/utils/comfy-env, 3. PyPI
|
|
400
|
-
local_wheels_dir = os.environ.get("COMFY_LOCAL_WHEELS")
|
|
401
|
-
if local_wheels_dir:
|
|
402
|
-
local_wheels = list(Path(local_wheels_dir).glob("comfy_env-*.whl"))
|
|
403
|
-
if local_wheels:
|
|
404
|
-
# Copy wheel to node_dir (next to pixi.toml) for simple relative path
|
|
405
|
-
wheel_name = local_wheels[0].name
|
|
406
|
-
wheel_dest = node_dir / wheel_name
|
|
407
|
-
if not wheel_dest.exists():
|
|
408
|
-
shutil.copy(local_wheels[0], wheel_dest)
|
|
409
|
-
# Reference with simple relative path (forward slashes, no backslash issues)
|
|
410
|
-
special_deps["comfy-env"] = f'{{ path = "./{wheel_name}" }}'
|
|
411
|
-
else:
|
|
412
|
-
pypi_deps.append("comfy-env")
|
|
413
|
-
else:
|
|
414
|
-
# Check for local editable comfy-env at ~/utils/comfy-env
|
|
415
|
-
local_comfy_env = Path.home() / "utils" / "comfy-env"
|
|
416
|
-
if local_comfy_env.exists() and (local_comfy_env / "pyproject.toml").exists():
|
|
417
|
-
# Use forward slashes for TOML compatibility
|
|
418
|
-
path_str = local_comfy_env.as_posix()
|
|
419
|
-
special_deps["comfy-env"] = f'{{ path = "{path_str}", editable = true }}'
|
|
420
|
-
else:
|
|
421
|
-
pypi_deps.append("comfy-env")
|
|
422
|
-
|
|
423
|
-
# Add regular requirements
|
|
424
|
-
if env_config.requirements:
|
|
425
|
-
pypi_deps.extend(env_config.requirements)
|
|
426
|
-
|
|
427
|
-
# NOTE: CUDA packages (no_deps_requirements) are NOT added to pixi.toml.
|
|
428
|
-
# They are installed separately with pip --no-deps after pixi install
|
|
429
|
-
# to avoid transitive dependency conflicts. See pixi_install().
|
|
430
|
-
|
|
431
|
-
# Add platform-specific requirements
|
|
432
|
-
if sys.platform == "linux" and env_config.linux_requirements:
|
|
433
|
-
pypi_deps.extend(env_config.linux_requirements)
|
|
434
|
-
elif sys.platform == "darwin" and env_config.darwin_requirements:
|
|
435
|
-
pypi_deps.extend(env_config.darwin_requirements)
|
|
436
|
-
elif sys.platform == "win32" and env_config.windows_requirements:
|
|
437
|
-
pypi_deps.extend(env_config.windows_requirements)
|
|
438
|
-
|
|
439
|
-
# PyPI options for PyTorch - use CUDA index for GPU, plain PyPI for CPU
|
|
440
|
-
# The pytorch conda channel is deprecated, so we use PyPI wheels
|
|
441
|
-
if env_config.cuda:
|
|
442
|
-
# GPU detected - use CUDA-enabled PyTorch wheels
|
|
443
|
-
cuda_parts = env_config.cuda.split(".")
|
|
444
|
-
cuda_short = "".join(cuda_parts[:2]) # "12.8" -> "128"
|
|
445
|
-
lines.append("[pypi-options]")
|
|
446
|
-
lines.append(f'extra-index-urls = ["https://download.pytorch.org/whl/cu{cuda_short}"]')
|
|
447
|
-
lines.append("")
|
|
448
|
-
# For CPU-only, no extra index needed - plain torch from PyPI works
|
|
449
|
-
|
|
450
|
-
# Add torch to pypi dependencies
|
|
451
|
-
torch_version = env_config.pytorch_version or "2.8.0"
|
|
452
|
-
torch_parts = torch_version.split(".")
|
|
453
|
-
torch_mm = ".".join(torch_parts[:2]) # "2.8.0" -> "2.8"
|
|
454
|
-
pypi_deps.insert(0, f"torch>={torch_mm},<{torch_parts[0]}.{int(torch_parts[1])+1}")
|
|
455
|
-
|
|
456
|
-
if pypi_deps or special_deps:
|
|
457
|
-
lines.append("[pypi-dependencies]")
|
|
458
|
-
|
|
459
|
-
# Add special dependencies first (path-based, etc.)
|
|
460
|
-
for name, value in special_deps.items():
|
|
461
|
-
lines.append(f'{name} = {value}')
|
|
462
|
-
|
|
463
|
-
for dep in pypi_deps:
|
|
464
|
-
# Handle git dependencies in two formats:
|
|
465
|
-
# 1. pkg @ git+https://github.com/user/repo.git@commit
|
|
466
|
-
# 2. git+https://github.com/user/repo.git@commit (extract name from URL)
|
|
467
|
-
if "git+" in dep:
|
|
468
|
-
if " @ git+" in dep:
|
|
469
|
-
# Format: pkg @ git+URL@commit
|
|
470
|
-
match = re.match(r'^([a-zA-Z0-9._-]+)\s*@\s*git\+(.+?)(?:@([a-f0-9]+))?$', dep)
|
|
471
|
-
if match:
|
|
472
|
-
pkg_name = match.group(1)
|
|
473
|
-
git_url = match.group(2)
|
|
474
|
-
rev = match.group(3)
|
|
475
|
-
else:
|
|
476
|
-
# Format: git+URL@commit (extract package name from repo name)
|
|
477
|
-
match = re.match(r'^git\+(.+?)(?:@([a-f0-9]+))?$', dep)
|
|
478
|
-
if match:
|
|
479
|
-
git_url = match.group(1)
|
|
480
|
-
rev = match.group(2)
|
|
481
|
-
# Extract package name from URL (repo name without .git)
|
|
482
|
-
repo_match = re.search(r'/([^/]+?)(?:\.git)?$', git_url)
|
|
483
|
-
pkg_name = repo_match.group(1) if repo_match else git_url.split('/')[-1].replace('.git', '')
|
|
484
|
-
|
|
485
|
-
if match:
|
|
486
|
-
if rev:
|
|
487
|
-
lines.append(f'{pkg_name} = {{ git = "{git_url}", rev = "{rev}" }}')
|
|
488
|
-
else:
|
|
489
|
-
lines.append(f'{pkg_name} = {{ git = "{git_url}" }}')
|
|
490
|
-
continue
|
|
491
|
-
|
|
492
|
-
# Parse pip requirement format to pixi format
|
|
493
|
-
# Handles extras like trimesh[easy]>=4.0.0
|
|
494
|
-
name, version_spec, extras = _parse_pypi_requirement(dep)
|
|
495
|
-
|
|
496
|
-
if extras:
|
|
497
|
-
# Use table syntax for packages with extras
|
|
498
|
-
# e.g., trimesh = { version = ">=4.0.0", extras = ["easy"] }
|
|
499
|
-
extras_json = "[" + ", ".join(f'"{e}"' for e in extras) + "]"
|
|
500
|
-
if version_spec:
|
|
501
|
-
lines.append(f'{name} = {{ version = "{version_spec}", extras = {extras_json} }}')
|
|
502
|
-
else:
|
|
503
|
-
lines.append(f'{name} = {{ version = "*", extras = {extras_json} }}')
|
|
504
|
-
else:
|
|
505
|
-
# Simple syntax for packages without extras
|
|
506
|
-
if version_spec:
|
|
507
|
-
lines.append(f'{name} = "{version_spec}"')
|
|
508
|
-
else:
|
|
509
|
-
lines.append(f'{name} = "*"')
|
|
510
|
-
|
|
511
|
-
content = "\n".join(lines) + "\n"
|
|
512
|
-
|
|
513
|
-
# Write the file
|
|
514
|
-
pixi_toml_path.write_text(content)
|
|
515
|
-
log(f"Generated pixi.toml at: {pixi_toml_path}")
|
|
516
|
-
|
|
517
|
-
return pixi_toml_path
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
def clean_pixi_artifacts(
|
|
521
|
-
node_dir: Path,
|
|
522
|
-
env_name: Optional[str] = None,
|
|
523
|
-
log: Callable[[str], None] = print,
|
|
524
|
-
) -> None:
|
|
525
|
-
"""
|
|
526
|
-
Remove previous pixi installation artifacts.
|
|
527
|
-
|
|
528
|
-
This ensures a clean state before generating a new pixi.toml,
|
|
529
|
-
preventing stale lock files or cached environments from causing conflicts.
|
|
530
|
-
|
|
531
|
-
Args:
|
|
532
|
-
node_dir: Directory containing the pixi artifacts.
|
|
533
|
-
env_name: Environment name (for removing _env_ symlink).
|
|
534
|
-
log: Logging callback.
|
|
535
|
-
"""
|
|
536
|
-
pixi_toml = node_dir / "pixi.toml"
|
|
537
|
-
pixi_lock = node_dir / "pixi.lock"
|
|
538
|
-
pixi_dir = node_dir / ".pixi"
|
|
539
|
-
|
|
540
|
-
if pixi_toml.exists():
|
|
541
|
-
pixi_toml.unlink()
|
|
542
|
-
log(" Removed previous pixi.toml")
|
|
543
|
-
if pixi_lock.exists():
|
|
544
|
-
pixi_lock.unlink()
|
|
545
|
-
log(" Removed previous pixi.lock")
|
|
546
|
-
if pixi_dir.exists():
|
|
547
|
-
shutil.rmtree(pixi_dir)
|
|
548
|
-
log(" Removed previous .pixi/ directory")
|
|
549
|
-
|
|
550
|
-
# Remove _env_ symlink if it exists
|
|
551
|
-
if env_name:
|
|
552
|
-
symlink_path = node_dir / f"_env_{env_name}"
|
|
553
|
-
if symlink_path.is_symlink():
|
|
554
|
-
symlink_path.unlink()
|
|
555
|
-
log(f" Removed previous _env_{env_name} symlink")
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
def pixi_install(
|
|
559
|
-
env_config: IsolatedEnv,
|
|
560
|
-
node_dir: Path,
|
|
561
|
-
log: Callable[[str], None] = print,
|
|
562
|
-
dry_run: bool = False,
|
|
563
|
-
) -> bool:
|
|
564
|
-
"""
|
|
565
|
-
Install conda and pip packages using pixi.
|
|
566
|
-
|
|
567
|
-
This is the main entry point for pixi-based installation. It:
|
|
568
|
-
1. Cleans previous pixi artifacts
|
|
569
|
-
2. Ensures pixi is installed
|
|
570
|
-
3. Generates pixi.toml from the config
|
|
571
|
-
4. Runs `pixi install` to install all dependencies
|
|
572
|
-
|
|
573
|
-
Args:
|
|
574
|
-
env_config: The isolated environment configuration.
|
|
575
|
-
node_dir: Directory containing the node (where pixi.toml will be created).
|
|
576
|
-
log: Logging callback.
|
|
577
|
-
dry_run: If True, only show what would be done.
|
|
578
|
-
|
|
579
|
-
Returns:
|
|
580
|
-
True if installation succeeded.
|
|
581
|
-
|
|
582
|
-
Raises:
|
|
583
|
-
RuntimeError: If installation fails.
|
|
584
|
-
"""
|
|
585
|
-
log(f"Installing {env_config.name} with pixi backend...")
|
|
586
|
-
|
|
587
|
-
if dry_run:
|
|
588
|
-
log("Dry run - would:")
|
|
589
|
-
log(f" - Clean previous pixi artifacts")
|
|
590
|
-
log(f" - Ensure pixi is installed")
|
|
591
|
-
log(f" - Generate pixi.toml in {node_dir}")
|
|
592
|
-
if env_config.conda:
|
|
593
|
-
log(f" - Install {len(env_config.conda.packages)} conda packages")
|
|
594
|
-
if env_config.requirements:
|
|
595
|
-
log(f" - Install {len(env_config.requirements)} pip packages")
|
|
596
|
-
if env_config.no_deps_requirements:
|
|
597
|
-
log(f" - Install {len(env_config.no_deps_requirements)} CUDA packages: {', '.join(env_config.no_deps_requirements)}")
|
|
598
|
-
return True
|
|
599
|
-
|
|
600
|
-
# Clean previous pixi artifacts
|
|
601
|
-
clean_pixi_artifacts(node_dir, env_config.name, log)
|
|
602
|
-
|
|
603
|
-
# Ensure pixi is installed
|
|
604
|
-
pixi_path = ensure_pixi(log=log)
|
|
605
|
-
|
|
606
|
-
# Generate pixi.toml
|
|
607
|
-
pixi_toml = create_pixi_toml(env_config, node_dir, log)
|
|
608
|
-
|
|
609
|
-
# Run pixi install
|
|
610
|
-
log("Running pixi install...")
|
|
611
|
-
|
|
612
|
-
# Build environment with custom vars from config
|
|
613
|
-
install_env = os.environ.copy()
|
|
614
|
-
if env_config.env_vars:
|
|
615
|
-
install_env.update(env_config.env_vars)
|
|
616
|
-
# Disable build isolation so CMAKE_ARGS propagates to builds
|
|
617
|
-
if 'CMAKE_ARGS' in env_config.env_vars:
|
|
618
|
-
install_env['UV_NO_BUILD_ISOLATION'] = '1'
|
|
619
|
-
log(f" Using custom env vars: {list(env_config.env_vars.keys())}")
|
|
620
|
-
|
|
621
|
-
result = subprocess.run(
|
|
622
|
-
[str(pixi_path), "install"],
|
|
623
|
-
cwd=node_dir,
|
|
624
|
-
capture_output=True,
|
|
625
|
-
text=True,
|
|
626
|
-
env=install_env,
|
|
627
|
-
)
|
|
628
|
-
|
|
629
|
-
if result.returncode != 0:
|
|
630
|
-
log(f"pixi install failed:")
|
|
631
|
-
log(result.stderr)
|
|
632
|
-
raise RuntimeError(f"pixi install failed: {result.stderr}")
|
|
633
|
-
|
|
634
|
-
if result.stdout:
|
|
635
|
-
# Log output, but filter for key info
|
|
636
|
-
for line in result.stdout.strip().split("\n"):
|
|
637
|
-
if line.strip():
|
|
638
|
-
log(f" {line}")
|
|
639
|
-
|
|
640
|
-
log("pixi install completed successfully!")
|
|
641
|
-
|
|
642
|
-
# Phase 2: Install CUDA packages with pip --no-deps
|
|
643
|
-
# These are kept out of pixi.toml to avoid transitive dependency conflicts
|
|
644
|
-
# Skip on macOS - CUDA is not supported
|
|
645
|
-
if env_config.no_deps_requirements:
|
|
646
|
-
if sys.platform == "darwin":
|
|
647
|
-
log("Skipping CUDA packages (not supported on macOS)")
|
|
648
|
-
else:
|
|
649
|
-
log("Installing CUDA packages with --no-deps...")
|
|
650
|
-
python_path = get_pixi_python(node_dir)
|
|
651
|
-
if not python_path:
|
|
652
|
-
raise RuntimeError("Failed to find Python in pixi environment")
|
|
653
|
-
|
|
654
|
-
vars_dict = _build_cuda_vars(env_config)
|
|
655
|
-
|
|
656
|
-
for req in env_config.no_deps_requirements:
|
|
657
|
-
url = _resolve_cuda_wheel_url(req, vars_dict, log)
|
|
658
|
-
if url:
|
|
659
|
-
log(f" Installing {req} (--no-deps)...")
|
|
660
|
-
result = subprocess.run(
|
|
661
|
-
[str(python_path), "-m", "pip", "install", "--no-deps", url],
|
|
662
|
-
capture_output=True,
|
|
663
|
-
text=True,
|
|
664
|
-
)
|
|
665
|
-
if result.returncode != 0:
|
|
666
|
-
log(f" Failed to install {req}:")
|
|
667
|
-
log(result.stderr)
|
|
668
|
-
raise RuntimeError(f"Failed to install {req} with --no-deps: {result.stderr}")
|
|
669
|
-
else:
|
|
670
|
-
log(f" Warning: Could not resolve wheel URL for {req}, skipping")
|
|
671
|
-
|
|
672
|
-
# Create _env_{name} link for compatibility with uv backend
|
|
673
|
-
# This ensures code that expects _env_envname/bin/python works with pixi
|
|
674
|
-
symlink_path = node_dir / f"_env_{env_config.name}"
|
|
675
|
-
pixi_env_path = node_dir / ".pixi" / "envs" / "default"
|
|
676
|
-
|
|
677
|
-
if pixi_env_path.exists():
|
|
678
|
-
# Remove existing symlink/junction or directory if present
|
|
679
|
-
if symlink_path.is_symlink() or (sys.platform == "win32" and symlink_path.is_dir()):
|
|
680
|
-
# On Windows, junctions appear as directories but can be removed with rmdir
|
|
681
|
-
try:
|
|
682
|
-
symlink_path.unlink()
|
|
683
|
-
except (OSError, PermissionError):
|
|
684
|
-
# Junction on Windows - remove with rmdir (doesn't delete contents)
|
|
685
|
-
subprocess.run(["cmd", "/c", "rmdir", str(symlink_path)], capture_output=True)
|
|
686
|
-
elif symlink_path.exists():
|
|
687
|
-
shutil.rmtree(symlink_path)
|
|
688
|
-
|
|
689
|
-
# On Windows, use directory junctions (no admin required) instead of symlinks
|
|
690
|
-
if sys.platform == "win32":
|
|
691
|
-
# mklink /J creates a directory junction (no admin privileges needed)
|
|
692
|
-
result = subprocess.run(
|
|
693
|
-
["cmd", "/c", "mklink", "/J", str(symlink_path), str(pixi_env_path)],
|
|
694
|
-
capture_output=True,
|
|
695
|
-
text=True
|
|
696
|
-
)
|
|
697
|
-
if result.returncode == 0:
|
|
698
|
-
log(f"Created junction: _env_{env_config.name} -> .pixi/envs/default")
|
|
699
|
-
else:
|
|
700
|
-
log(f"Warning: Failed to create junction: {result.stderr}")
|
|
701
|
-
else:
|
|
702
|
-
symlink_path.symlink_to(pixi_env_path)
|
|
703
|
-
log(f"Created symlink: _env_{env_config.name} -> .pixi/envs/default")
|
|
704
|
-
|
|
705
|
-
return True
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
def get_pixi_python(node_dir: Path) -> Optional[Path]:
|
|
709
|
-
"""
|
|
710
|
-
Get the path to the Python interpreter in the pixi environment.
|
|
711
|
-
|
|
712
|
-
Args:
|
|
713
|
-
node_dir: Directory containing pixi.toml.
|
|
714
|
-
|
|
715
|
-
Returns:
|
|
716
|
-
Path to Python executable in the pixi env, or None if not found.
|
|
717
|
-
"""
|
|
718
|
-
# Pixi creates .pixi/envs/default/ in the project directory
|
|
719
|
-
env_dir = node_dir / ".pixi" / "envs" / "default"
|
|
720
|
-
|
|
721
|
-
if sys.platform == "win32":
|
|
722
|
-
python_path = env_dir / "python.exe"
|
|
723
|
-
else:
|
|
724
|
-
python_path = env_dir / "bin" / "python"
|
|
725
|
-
|
|
726
|
-
if python_path.exists():
|
|
727
|
-
return python_path
|
|
728
|
-
|
|
729
|
-
return None
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
def pixi_run(
|
|
733
|
-
command: List[str],
|
|
734
|
-
node_dir: Path,
|
|
735
|
-
log: Callable[[str], None] = print,
|
|
736
|
-
) -> subprocess.CompletedProcess:
|
|
737
|
-
"""
|
|
738
|
-
Run a command in the pixi environment.
|
|
739
|
-
|
|
740
|
-
Args:
|
|
741
|
-
command: Command and arguments to run.
|
|
742
|
-
node_dir: Directory containing pixi.toml.
|
|
743
|
-
log: Logging callback.
|
|
744
|
-
|
|
745
|
-
Returns:
|
|
746
|
-
CompletedProcess result.
|
|
747
|
-
"""
|
|
748
|
-
pixi_path = get_pixi_path()
|
|
749
|
-
if not pixi_path:
|
|
750
|
-
raise RuntimeError("Pixi not found")
|
|
751
|
-
|
|
752
|
-
full_cmd = [str(pixi_path), "run"] + command
|
|
753
|
-
log(f"Running: pixi run {' '.join(command)}")
|
|
754
|
-
|
|
755
|
-
return subprocess.run(
|
|
756
|
-
full_cmd,
|
|
757
|
-
cwd=node_dir,
|
|
758
|
-
capture_output=True,
|
|
759
|
-
text=True,
|
|
760
|
-
)
|