comfy-env 0.0.49__py3-none-any.whl → 0.0.51__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +7 -0
- comfy_env/decorator.py +252 -1
- comfy_env/env/config.py +2 -0
- comfy_env/env/config_file.py +4 -0
- comfy_env/install.py +38 -104
- comfy_env/isolation.py +297 -0
- comfy_env/pixi.py +131 -24
- comfy_env/stub_imports.py +310 -0
- comfy_env/templates/comfy-env-instructions.txt +31 -10
- comfy_env/templates/comfy-env.toml +36 -61
- comfy_env/workers/venv.py +179 -12
- {comfy_env-0.0.49.dist-info → comfy_env-0.0.51.dist-info}/METADATA +62 -4
- {comfy_env-0.0.49.dist-info → comfy_env-0.0.51.dist-info}/RECORD +16 -14
- {comfy_env-0.0.49.dist-info → comfy_env-0.0.51.dist-info}/WHEEL +0 -0
- {comfy_env-0.0.49.dist-info → comfy_env-0.0.51.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.0.49.dist-info → comfy_env-0.0.51.dist-info}/licenses/LICENSE +0 -0
comfy_env/isolation.py
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Process isolation for ComfyUI node packs.
|
|
3
|
+
|
|
4
|
+
This module provides enable_isolation() which wraps all node classes
|
|
5
|
+
to run their FUNCTION methods in an isolated Python environment.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
# In your node pack's __init__.py:
|
|
9
|
+
from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
|
10
|
+
from comfy_env import enable_isolation
|
|
11
|
+
|
|
12
|
+
enable_isolation(NODE_CLASS_MAPPINGS) # That's it!
|
|
13
|
+
|
|
14
|
+
This requires `isolated = true` in comfy-env.toml:
|
|
15
|
+
|
|
16
|
+
[myenv]
|
|
17
|
+
python = "3.11"
|
|
18
|
+
isolated = true
|
|
19
|
+
|
|
20
|
+
[myenv.packages]
|
|
21
|
+
requirements = ["my-package"]
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
import atexit
|
|
25
|
+
import inspect
|
|
26
|
+
import os
|
|
27
|
+
import sys
|
|
28
|
+
import threading
|
|
29
|
+
from functools import wraps
|
|
30
|
+
from pathlib import Path
|
|
31
|
+
from typing import Any, Dict, Optional
|
|
32
|
+
|
|
33
|
+
# Global worker cache (one per isolated environment)
|
|
34
|
+
_workers: Dict[str, Any] = {}
|
|
35
|
+
_workers_lock = threading.Lock()
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _get_worker(
|
|
39
|
+
env_name: str,
|
|
40
|
+
python_path: Path,
|
|
41
|
+
working_dir: Path,
|
|
42
|
+
sys_path: list[str],
|
|
43
|
+
):
|
|
44
|
+
"""Get or create a persistent worker for the isolated environment."""
|
|
45
|
+
from .workers.venv import PersistentVenvWorker
|
|
46
|
+
|
|
47
|
+
cache_key = str(python_path)
|
|
48
|
+
|
|
49
|
+
with _workers_lock:
|
|
50
|
+
if cache_key in _workers:
|
|
51
|
+
worker = _workers[cache_key]
|
|
52
|
+
if worker.is_alive():
|
|
53
|
+
return worker
|
|
54
|
+
# Worker died, will recreate
|
|
55
|
+
|
|
56
|
+
print(f"[comfy-env] Starting isolated worker: {env_name}")
|
|
57
|
+
print(f"[comfy-env] Python: {python_path}")
|
|
58
|
+
|
|
59
|
+
worker = PersistentVenvWorker(
|
|
60
|
+
python=str(python_path),
|
|
61
|
+
working_dir=working_dir,
|
|
62
|
+
sys_path=sys_path,
|
|
63
|
+
name=env_name,
|
|
64
|
+
)
|
|
65
|
+
_workers[cache_key] = worker
|
|
66
|
+
return worker
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _shutdown_workers():
|
|
70
|
+
"""Shutdown all cached workers. Called at exit."""
|
|
71
|
+
with _workers_lock:
|
|
72
|
+
for name, worker in _workers.items():
|
|
73
|
+
try:
|
|
74
|
+
worker.shutdown()
|
|
75
|
+
except Exception:
|
|
76
|
+
pass
|
|
77
|
+
_workers.clear()
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
atexit.register(_shutdown_workers)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _find_python_path(node_dir: Path, env_name: str) -> Optional[Path]:
|
|
84
|
+
"""
|
|
85
|
+
Find the Python executable for the isolated environment.
|
|
86
|
+
|
|
87
|
+
Priority:
|
|
88
|
+
1. .pixi/envs/default/bin/python (pixi/conda environment)
|
|
89
|
+
2. _env_{name}/bin/python (uv venv)
|
|
90
|
+
3. _env_{name}/Scripts/python.exe (Windows uv venv)
|
|
91
|
+
"""
|
|
92
|
+
# Check pixi environment first
|
|
93
|
+
if sys.platform == "win32":
|
|
94
|
+
pixi_python = node_dir / ".pixi" / "envs" / "default" / "python.exe"
|
|
95
|
+
else:
|
|
96
|
+
pixi_python = node_dir / ".pixi" / "envs" / "default" / "bin" / "python"
|
|
97
|
+
|
|
98
|
+
if pixi_python.exists():
|
|
99
|
+
return pixi_python
|
|
100
|
+
|
|
101
|
+
# Check _env_* directory (uv venv)
|
|
102
|
+
env_dir = node_dir / f"_env_{env_name}"
|
|
103
|
+
if sys.platform == "win32":
|
|
104
|
+
env_python = env_dir / "Scripts" / "python.exe"
|
|
105
|
+
else:
|
|
106
|
+
env_python = env_dir / "bin" / "python"
|
|
107
|
+
|
|
108
|
+
if env_python.exists():
|
|
109
|
+
return env_python
|
|
110
|
+
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _wrap_node_class(
|
|
115
|
+
cls: type,
|
|
116
|
+
env_name: str,
|
|
117
|
+
python_path: Path,
|
|
118
|
+
working_dir: Path,
|
|
119
|
+
sys_path: list[str],
|
|
120
|
+
) -> type:
|
|
121
|
+
"""
|
|
122
|
+
Wrap a node class so its FUNCTION method runs in the isolated environment.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
cls: The node class to wrap
|
|
126
|
+
env_name: Name of the isolated environment
|
|
127
|
+
python_path: Path to the isolated Python executable
|
|
128
|
+
working_dir: Working directory for the worker
|
|
129
|
+
sys_path: Additional paths to add to sys.path in the worker
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
The wrapped class (modified in place)
|
|
133
|
+
"""
|
|
134
|
+
func_name = getattr(cls, "FUNCTION", None)
|
|
135
|
+
if not func_name:
|
|
136
|
+
return cls # Not a valid ComfyUI node class
|
|
137
|
+
|
|
138
|
+
original_method = getattr(cls, func_name, None)
|
|
139
|
+
if original_method is None:
|
|
140
|
+
return cls
|
|
141
|
+
|
|
142
|
+
# Get source file for the class
|
|
143
|
+
try:
|
|
144
|
+
source_file = Path(inspect.getfile(cls)).resolve()
|
|
145
|
+
except (TypeError, OSError):
|
|
146
|
+
# Can't get source file, skip wrapping
|
|
147
|
+
return cls
|
|
148
|
+
|
|
149
|
+
# Compute relative module path from working_dir
|
|
150
|
+
# e.g., /path/to/nodes/io/load_mesh.py -> nodes.io.load_mesh
|
|
151
|
+
try:
|
|
152
|
+
relative_path = source_file.relative_to(working_dir)
|
|
153
|
+
# Convert path to module: nodes/io/load_mesh.py -> nodes.io.load_mesh
|
|
154
|
+
module_name = str(relative_path.with_suffix("")).replace("/", ".").replace("\\", ".")
|
|
155
|
+
except ValueError:
|
|
156
|
+
# File not under working_dir, use stem as fallback
|
|
157
|
+
module_name = source_file.stem
|
|
158
|
+
|
|
159
|
+
@wraps(original_method)
|
|
160
|
+
def proxy(self, **kwargs):
|
|
161
|
+
worker = _get_worker(env_name, python_path, working_dir, sys_path)
|
|
162
|
+
|
|
163
|
+
# Clone tensors for IPC if needed
|
|
164
|
+
try:
|
|
165
|
+
from .decorator import _clone_tensor_if_needed
|
|
166
|
+
|
|
167
|
+
kwargs = {k: _clone_tensor_if_needed(v) for k, v in kwargs.items()}
|
|
168
|
+
except ImportError:
|
|
169
|
+
pass # No torch available, skip cloning
|
|
170
|
+
|
|
171
|
+
result = worker.call_method(
|
|
172
|
+
module_name=module_name,
|
|
173
|
+
class_name=cls.__name__,
|
|
174
|
+
method_name=func_name,
|
|
175
|
+
self_state=self.__dict__.copy() if hasattr(self, "__dict__") else None,
|
|
176
|
+
kwargs=kwargs,
|
|
177
|
+
timeout=600.0,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# Clone result tensors
|
|
181
|
+
try:
|
|
182
|
+
from .decorator import _clone_tensor_if_needed
|
|
183
|
+
|
|
184
|
+
result = _clone_tensor_if_needed(result)
|
|
185
|
+
except ImportError:
|
|
186
|
+
pass
|
|
187
|
+
|
|
188
|
+
return result
|
|
189
|
+
|
|
190
|
+
# Replace the method
|
|
191
|
+
setattr(cls, func_name, proxy)
|
|
192
|
+
|
|
193
|
+
# Mark as isolated for debugging
|
|
194
|
+
cls._comfy_env_isolated = True
|
|
195
|
+
cls._comfy_env_name = env_name
|
|
196
|
+
|
|
197
|
+
return cls
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def enable_isolation(node_class_mappings: Dict[str, type]) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Enable process isolation for all nodes in a node pack.
|
|
203
|
+
|
|
204
|
+
Call this AFTER importing NODE_CLASS_MAPPINGS. It wraps all node classes
|
|
205
|
+
so their FUNCTION methods run in the isolated Python environment specified
|
|
206
|
+
in comfy-env.toml.
|
|
207
|
+
|
|
208
|
+
Requires `isolated = true` in comfy-env.toml:
|
|
209
|
+
|
|
210
|
+
[myenv]
|
|
211
|
+
python = "3.11"
|
|
212
|
+
isolated = true
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
node_class_mappings: The NODE_CLASS_MAPPINGS dict from the node pack.
|
|
216
|
+
|
|
217
|
+
Example:
|
|
218
|
+
from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
|
219
|
+
from comfy_env import enable_isolation
|
|
220
|
+
|
|
221
|
+
enable_isolation(NODE_CLASS_MAPPINGS)
|
|
222
|
+
|
|
223
|
+
__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS']
|
|
224
|
+
"""
|
|
225
|
+
# Skip if running inside worker subprocess
|
|
226
|
+
if os.environ.get("COMFYUI_ISOLATION_WORKER") == "1":
|
|
227
|
+
return
|
|
228
|
+
|
|
229
|
+
# Find the calling module's directory (node pack root)
|
|
230
|
+
frame = inspect.currentframe()
|
|
231
|
+
if frame is None:
|
|
232
|
+
print("[comfy-env] Warning: Could not get current frame")
|
|
233
|
+
return
|
|
234
|
+
|
|
235
|
+
caller_frame = frame.f_back
|
|
236
|
+
if caller_frame is None:
|
|
237
|
+
print("[comfy-env] Warning: Could not get caller frame")
|
|
238
|
+
return
|
|
239
|
+
|
|
240
|
+
caller_file = caller_frame.f_globals.get("__file__")
|
|
241
|
+
if not caller_file:
|
|
242
|
+
print("[comfy-env] Warning: Could not determine caller location")
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
node_dir = Path(caller_file).resolve().parent
|
|
246
|
+
|
|
247
|
+
# Load config
|
|
248
|
+
from .env.config_file import discover_config
|
|
249
|
+
|
|
250
|
+
config = discover_config(node_dir)
|
|
251
|
+
if not config:
|
|
252
|
+
print(f"[comfy-env] No comfy-env.toml found in {node_dir}")
|
|
253
|
+
return
|
|
254
|
+
|
|
255
|
+
# Find isolated environment
|
|
256
|
+
isolated_env = None
|
|
257
|
+
env_name = None
|
|
258
|
+
|
|
259
|
+
for name, env in config.envs.items():
|
|
260
|
+
if getattr(env, "isolated", False):
|
|
261
|
+
isolated_env = env
|
|
262
|
+
env_name = name
|
|
263
|
+
break
|
|
264
|
+
|
|
265
|
+
if not isolated_env or not env_name:
|
|
266
|
+
# No isolated env configured, silently return
|
|
267
|
+
return
|
|
268
|
+
|
|
269
|
+
# Find Python executable
|
|
270
|
+
python_path = _find_python_path(node_dir, env_name)
|
|
271
|
+
|
|
272
|
+
if not python_path:
|
|
273
|
+
print(f"[comfy-env] Warning: Isolated environment not found for '{env_name}'")
|
|
274
|
+
print(f"[comfy-env] Expected: .pixi/envs/default/bin/python or _env_{env_name}/bin/python")
|
|
275
|
+
print(f"[comfy-env] Run 'comfy-env install' to create the environment")
|
|
276
|
+
return
|
|
277
|
+
|
|
278
|
+
# Build sys.path for the worker
|
|
279
|
+
sys_path = [str(node_dir)]
|
|
280
|
+
|
|
281
|
+
# Add nodes directory if it exists
|
|
282
|
+
nodes_dir = node_dir / "nodes"
|
|
283
|
+
if nodes_dir.exists():
|
|
284
|
+
sys_path.append(str(nodes_dir))
|
|
285
|
+
|
|
286
|
+
print(f"[comfy-env] Enabling isolation for {len(node_class_mappings)} nodes")
|
|
287
|
+
print(f"[comfy-env] Environment: {env_name}")
|
|
288
|
+
print(f"[comfy-env] Python: {python_path}")
|
|
289
|
+
|
|
290
|
+
# Wrap all node classes
|
|
291
|
+
wrapped_count = 0
|
|
292
|
+
for node_name, node_cls in node_class_mappings.items():
|
|
293
|
+
if hasattr(node_cls, "FUNCTION"):
|
|
294
|
+
_wrap_node_class(node_cls, env_name, python_path, node_dir, sys_path)
|
|
295
|
+
wrapped_count += 1
|
|
296
|
+
|
|
297
|
+
print(f"[comfy-env] Wrapped {wrapped_count} node classes for isolation")
|
comfy_env/pixi.py
CHANGED
|
@@ -10,12 +10,13 @@ See: https://pixi.sh/
|
|
|
10
10
|
|
|
11
11
|
import os
|
|
12
12
|
import platform
|
|
13
|
+
import re
|
|
13
14
|
import shutil
|
|
14
15
|
import stat
|
|
15
16
|
import subprocess
|
|
16
17
|
import sys
|
|
17
18
|
from pathlib import Path
|
|
18
|
-
from typing import Callable, List, Optional
|
|
19
|
+
from typing import Callable, List, Optional, Tuple
|
|
19
20
|
|
|
20
21
|
from .env.config import IsolatedEnv, CondaConfig
|
|
21
22
|
|
|
@@ -144,6 +145,42 @@ def ensure_pixi(
|
|
|
144
145
|
return pixi_path
|
|
145
146
|
|
|
146
147
|
|
|
148
|
+
def _parse_pypi_requirement(dep: str) -> Tuple[str, Optional[str], List[str]]:
|
|
149
|
+
"""
|
|
150
|
+
Parse a pip requirement into (name, version_spec, extras).
|
|
151
|
+
|
|
152
|
+
Examples:
|
|
153
|
+
"trimesh[easy]>=4.0.0" -> ("trimesh", ">=4.0.0", ["easy"])
|
|
154
|
+
"numpy>=1.21.0" -> ("numpy", ">=1.21.0", [])
|
|
155
|
+
"torch" -> ("torch", None, [])
|
|
156
|
+
"pkg[a,b]" -> ("pkg", None, ["a", "b"])
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
Tuple of (package_name, version_spec_or_None, list_of_extras)
|
|
160
|
+
"""
|
|
161
|
+
dep = dep.strip()
|
|
162
|
+
|
|
163
|
+
# Match: name[extras]version_spec or name version_spec
|
|
164
|
+
# Package names can contain letters, numbers, underscores, hyphens, and dots
|
|
165
|
+
match = re.match(r'^([a-zA-Z0-9._-]+)(?:\[([^\]]+)\])?(.*)$', dep)
|
|
166
|
+
if not match:
|
|
167
|
+
return dep, None, []
|
|
168
|
+
|
|
169
|
+
name = match.group(1)
|
|
170
|
+
extras_str = match.group(2)
|
|
171
|
+
version_spec = match.group(3).strip() if match.group(3) else None
|
|
172
|
+
|
|
173
|
+
extras = []
|
|
174
|
+
if extras_str:
|
|
175
|
+
extras = [e.strip() for e in extras_str.split(',')]
|
|
176
|
+
|
|
177
|
+
# Return None instead of empty string for version_spec
|
|
178
|
+
if version_spec == "":
|
|
179
|
+
version_spec = None
|
|
180
|
+
|
|
181
|
+
return name, version_spec, extras
|
|
182
|
+
|
|
183
|
+
|
|
147
184
|
def create_pixi_toml(
|
|
148
185
|
env_config: IsolatedEnv,
|
|
149
186
|
node_dir: Path,
|
|
@@ -196,6 +233,13 @@ def create_pixi_toml(
|
|
|
196
233
|
elif sys.platform == "win32":
|
|
197
234
|
lines.append('platforms = ["win-64"]')
|
|
198
235
|
|
|
236
|
+
# System requirements - specify glibc version for proper wheel resolution
|
|
237
|
+
# Ubuntu 22.04+ has glibc 2.35, enabling manylinux_2_35 wheels
|
|
238
|
+
if sys.platform == "linux":
|
|
239
|
+
lines.append("")
|
|
240
|
+
lines.append("[system-requirements]")
|
|
241
|
+
lines.append('libc = { family = "glibc", version = "2.35" }')
|
|
242
|
+
|
|
199
243
|
lines.append("")
|
|
200
244
|
|
|
201
245
|
# Dependencies section (conda packages)
|
|
@@ -227,7 +271,7 @@ def create_pixi_toml(
|
|
|
227
271
|
special_deps = {} # For dependencies that need special syntax (path, etc.)
|
|
228
272
|
|
|
229
273
|
# Always include comfy-env for worker support
|
|
230
|
-
#
|
|
274
|
+
# Priority: 1. COMFY_LOCAL_WHEELS env var, 2. ~/utils/comfy-env, 3. PyPI
|
|
231
275
|
local_wheels_dir = os.environ.get("COMFY_LOCAL_WHEELS")
|
|
232
276
|
if local_wheels_dir:
|
|
233
277
|
local_wheels = list(Path(local_wheels_dir).glob("comfy_env-*.whl"))
|
|
@@ -238,15 +282,78 @@ def create_pixi_toml(
|
|
|
238
282
|
else:
|
|
239
283
|
pypi_deps.append("comfy-env")
|
|
240
284
|
else:
|
|
241
|
-
|
|
285
|
+
# Check for local editable comfy-env at ~/utils/comfy-env
|
|
286
|
+
local_comfy_env = Path.home() / "utils" / "comfy-env"
|
|
287
|
+
if local_comfy_env.exists() and (local_comfy_env / "pyproject.toml").exists():
|
|
288
|
+
special_deps["comfy-env"] = f'{{ path = "{local_comfy_env}", editable = true }}'
|
|
289
|
+
else:
|
|
290
|
+
pypi_deps.append("comfy-env")
|
|
242
291
|
|
|
243
292
|
# Add regular requirements
|
|
244
293
|
if env_config.requirements:
|
|
245
294
|
pypi_deps.extend(env_config.requirements)
|
|
246
295
|
|
|
247
|
-
#
|
|
248
|
-
|
|
249
|
-
|
|
296
|
+
# Add CUDA packages with resolved wheel URLs
|
|
297
|
+
if env_config.no_deps_requirements:
|
|
298
|
+
from .registry import PACKAGE_REGISTRY
|
|
299
|
+
|
|
300
|
+
# Use fixed CUDA 12.8 / PyTorch 2.8 for pixi environments (modern GPU default)
|
|
301
|
+
# This ensures wheels match what pixi will install, not what the host has
|
|
302
|
+
vars_dict = {
|
|
303
|
+
"cuda_version": "12.8",
|
|
304
|
+
"cuda_short": "128",
|
|
305
|
+
"cuda_short2": "128",
|
|
306
|
+
"cuda_major": "12",
|
|
307
|
+
"torch_version": "2.8.0",
|
|
308
|
+
"torch_short": "280",
|
|
309
|
+
"torch_mm": "28",
|
|
310
|
+
"torch_dotted_mm": "2.8",
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
# Platform detection
|
|
314
|
+
if sys.platform == "linux":
|
|
315
|
+
vars_dict["platform"] = "linux_x86_64"
|
|
316
|
+
elif sys.platform == "darwin":
|
|
317
|
+
vars_dict["platform"] = "macosx_arm64" if platform.machine() == "arm64" else "macosx_x86_64"
|
|
318
|
+
elif sys.platform == "win32":
|
|
319
|
+
vars_dict["platform"] = "win_amd64"
|
|
320
|
+
|
|
321
|
+
# Python version from pixi env config
|
|
322
|
+
if env_config.python:
|
|
323
|
+
py_parts = env_config.python.split(".")
|
|
324
|
+
py_major = py_parts[0]
|
|
325
|
+
py_minor = py_parts[1] if len(py_parts) > 1 else "0"
|
|
326
|
+
vars_dict["py_version"] = env_config.python
|
|
327
|
+
vars_dict["py_short"] = f"{py_major}{py_minor}"
|
|
328
|
+
vars_dict["py_minor"] = py_minor
|
|
329
|
+
vars_dict["py_tag"] = f"cp{py_major}{py_minor}"
|
|
330
|
+
|
|
331
|
+
for req in env_config.no_deps_requirements:
|
|
332
|
+
# Parse requirement (e.g., "cumesh" or "cumesh==0.0.1")
|
|
333
|
+
if "==" in req:
|
|
334
|
+
pkg_name, version = req.split("==", 1)
|
|
335
|
+
else:
|
|
336
|
+
pkg_name = req
|
|
337
|
+
version = None
|
|
338
|
+
|
|
339
|
+
pkg_lower = pkg_name.lower()
|
|
340
|
+
if pkg_lower in PACKAGE_REGISTRY:
|
|
341
|
+
config = PACKAGE_REGISTRY[pkg_lower]
|
|
342
|
+
template = config.get("wheel_template")
|
|
343
|
+
if template:
|
|
344
|
+
# Use version from requirement or default
|
|
345
|
+
v = version or config.get("default_version")
|
|
346
|
+
if v:
|
|
347
|
+
vars_dict["version"] = v
|
|
348
|
+
|
|
349
|
+
# Resolve URL
|
|
350
|
+
url = template
|
|
351
|
+
for key, value in vars_dict.items():
|
|
352
|
+
if value:
|
|
353
|
+
url = url.replace(f"{{{key}}}", str(value))
|
|
354
|
+
|
|
355
|
+
special_deps[pkg_name] = f'{{ url = "{url}" }}'
|
|
356
|
+
log(f" CUDA package {pkg_name}: resolved wheel URL")
|
|
250
357
|
|
|
251
358
|
# Add platform-specific requirements
|
|
252
359
|
if sys.platform == "linux" and env_config.linux_requirements:
|
|
@@ -265,25 +372,23 @@ def create_pixi_toml(
|
|
|
265
372
|
|
|
266
373
|
for dep in pypi_deps:
|
|
267
374
|
# Parse pip requirement format to pixi format
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
version =
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
name, version = dep_clean.split(">", 1)
|
|
280
|
-
lines.append(f'{name.strip()} = ">{version.strip()}"')
|
|
281
|
-
elif "<" in dep_clean:
|
|
282
|
-
name, version = dep_clean.split("<", 1)
|
|
283
|
-
lines.append(f'{name.strip()} = "<{version.strip()}"')
|
|
375
|
+
# Handles extras like trimesh[easy]>=4.0.0
|
|
376
|
+
name, version_spec, extras = _parse_pypi_requirement(dep)
|
|
377
|
+
|
|
378
|
+
if extras:
|
|
379
|
+
# Use table syntax for packages with extras
|
|
380
|
+
# e.g., trimesh = { version = ">=4.0.0", extras = ["easy"] }
|
|
381
|
+
extras_json = "[" + ", ".join(f'"{e}"' for e in extras) + "]"
|
|
382
|
+
if version_spec:
|
|
383
|
+
lines.append(f'{name} = {{ version = "{version_spec}", extras = {extras_json} }}')
|
|
384
|
+
else:
|
|
385
|
+
lines.append(f'{name} = {{ version = "*", extras = {extras_json} }}')
|
|
284
386
|
else:
|
|
285
|
-
#
|
|
286
|
-
|
|
387
|
+
# Simple syntax for packages without extras
|
|
388
|
+
if version_spec:
|
|
389
|
+
lines.append(f'{name} = "{version_spec}"')
|
|
390
|
+
else:
|
|
391
|
+
lines.append(f'{name} = "*"')
|
|
287
392
|
|
|
288
393
|
content = "\n".join(lines) + "\n"
|
|
289
394
|
|
|
@@ -370,6 +475,8 @@ def pixi_install(
|
|
|
370
475
|
log(f" - Install {len(env_config.conda.packages)} conda packages")
|
|
371
476
|
if env_config.requirements:
|
|
372
477
|
log(f" - Install {len(env_config.requirements)} pip packages")
|
|
478
|
+
if env_config.no_deps_requirements:
|
|
479
|
+
log(f" - Install {len(env_config.no_deps_requirements)} CUDA packages: {', '.join(env_config.no_deps_requirements)}")
|
|
373
480
|
return True
|
|
374
481
|
|
|
375
482
|
# Clean previous pixi artifacts
|