comfy-env 0.1.15__py3-none-any.whl → 0.1.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +117 -40
- comfy_env/cli.py +122 -311
- comfy_env/config/__init__.py +12 -4
- comfy_env/config/parser.py +30 -79
- comfy_env/config/types.py +37 -0
- comfy_env/detection/__init__.py +77 -0
- comfy_env/detection/cuda.py +61 -0
- comfy_env/detection/gpu.py +230 -0
- comfy_env/detection/platform.py +70 -0
- comfy_env/detection/runtime.py +103 -0
- comfy_env/environment/__init__.py +53 -0
- comfy_env/environment/cache.py +141 -0
- comfy_env/environment/libomp.py +41 -0
- comfy_env/environment/paths.py +38 -0
- comfy_env/environment/setup.py +91 -0
- comfy_env/install.py +134 -331
- comfy_env/isolation/__init__.py +32 -2
- comfy_env/isolation/tensor_utils.py +83 -0
- comfy_env/isolation/workers/__init__.py +16 -0
- comfy_env/{workers → isolation/workers}/mp.py +1 -1
- comfy_env/{workers → isolation/workers}/subprocess.py +1 -1
- comfy_env/isolation/wrap.py +128 -509
- comfy_env/packages/__init__.py +60 -0
- comfy_env/packages/apt.py +36 -0
- comfy_env/packages/cuda_wheels.py +97 -0
- comfy_env/packages/node_dependencies.py +77 -0
- comfy_env/packages/pixi.py +85 -0
- comfy_env/packages/toml_generator.py +88 -0
- comfy_env-0.1.17.dist-info/METADATA +225 -0
- comfy_env-0.1.17.dist-info/RECORD +36 -0
- comfy_env/cache.py +0 -203
- comfy_env/nodes.py +0 -187
- comfy_env/pixi/__init__.py +0 -48
- comfy_env/pixi/core.py +0 -587
- comfy_env/pixi/cuda_detection.py +0 -303
- comfy_env/pixi/platform/__init__.py +0 -21
- comfy_env/pixi/platform/base.py +0 -96
- comfy_env/pixi/platform/darwin.py +0 -53
- comfy_env/pixi/platform/linux.py +0 -68
- comfy_env/pixi/platform/windows.py +0 -284
- comfy_env/pixi/resolver.py +0 -198
- comfy_env/prestartup.py +0 -208
- comfy_env/workers/__init__.py +0 -38
- comfy_env/workers/tensor_utils.py +0 -188
- comfy_env-0.1.15.dist-info/METADATA +0 -291
- comfy_env-0.1.15.dist-info/RECORD +0 -31
- /comfy_env/{workers → isolation/workers}/base.py +0 -0
- {comfy_env-0.1.15.dist-info → comfy_env-0.1.17.dist-info}/WHEEL +0 -0
- {comfy_env-0.1.15.dist-info → comfy_env-0.1.17.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.1.15.dist-info → comfy_env-0.1.17.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
comfy_env/__init__.py,sha256=7R1WnVv6Rmq97bbK2Fvp1A50jkFwxIgcmfiILit9k8E,4666
|
|
2
|
+
comfy_env/cli.py,sha256=SWErVa3lB2ZHc7aNJJgWElWCRMsCzVLP_CrupuOk_zw,7684
|
|
3
|
+
comfy_env/install.py,sha256=CT3A3kIVQqXGA5nfp7jxTU_gHdsood4NsjW0tbXv7_8,10781
|
|
4
|
+
comfy_env/config/__init__.py,sha256=QlxIc5Hdghje6cm4FutReMO6fQK5rBu-zr36V2fjcLE,474
|
|
5
|
+
comfy_env/config/parser.py,sha256=bNSy8Jn4VZg3xw8pldlkEyGaaI_BmNsUEYQz-hKiXgE,2221
|
|
6
|
+
comfy_env/config/types.py,sha256=vFgWeEl_p26OmcoUv0wAOlHe9GBio2isjIWl7kACKFY,1096
|
|
7
|
+
comfy_env/detection/__init__.py,sha256=dH84PlSRJfs8MRJp2gp9_NX8ZzGIDHR8iZXy7_B8Ez4,1671
|
|
8
|
+
comfy_env/detection/cuda.py,sha256=BOaRQOGP2yoaPCO9eqPvWBB5Us_MNo-sSbadQsIjHqM,1708
|
|
9
|
+
comfy_env/detection/gpu.py,sha256=Rf7pgtZXzUbJqcXzZXQi-yK5naeuSP1FiL6SdreeADM,8393
|
|
10
|
+
comfy_env/detection/platform.py,sha256=Xe01dIZm7JT19kIH-j11h7KIBVRaKTLh8u4TzI3uZ6E,2127
|
|
11
|
+
comfy_env/detection/runtime.py,sha256=gDplni7ZPGW7WjNJuqWbtgSwkWz27kBWSFvYbhXun6o,3756
|
|
12
|
+
comfy_env/environment/__init__.py,sha256=WfZnyOvbI0MrDQPYTtOG2kHn0XCSCrqKcOlJcmB29nU,1009
|
|
13
|
+
comfy_env/environment/cache.py,sha256=RGfVW2caMO0Dd1nX2otUQP0xW3pVS7iSOP4vIUAMdEA,4568
|
|
14
|
+
comfy_env/environment/libomp.py,sha256=nzr3kDnRLgcf9CZ_WF4ItWskqEDS2S0geqZS43XoKig,1319
|
|
15
|
+
comfy_env/environment/paths.py,sha256=5TFFAkOZXa8R3cHfVHDEFnwy6_JcHilVBOHJuy-yqR0,1129
|
|
16
|
+
comfy_env/environment/setup.py,sha256=KQgeqlEaqB_tOVhsR2RQF76-LuPud2EPtkQWUM3AJ5Y,3231
|
|
17
|
+
comfy_env/isolation/__init__.py,sha256=XfMLEiBIcEzHG_k2vk9fT9GvFfmOsfbpM26czuxbdRI,800
|
|
18
|
+
comfy_env/isolation/tensor_utils.py,sha256=2_f4jjylqCPaPldD1Jw-es5CyOtuF5I1ROdyEIxsg-U,2951
|
|
19
|
+
comfy_env/isolation/wrap.py,sha256=K7GAkqU_Uxe717eUtPsFv5kcr_Jfbh3x79A-8vbY1nY,8592
|
|
20
|
+
comfy_env/isolation/workers/__init__.py,sha256=Zp6sZSRBcb5Negqgzqs3jPjfO9T1u3nNrQhp6WqTAuc,325
|
|
21
|
+
comfy_env/isolation/workers/base.py,sha256=4ZYTaQ4J0kBHCoO_OfZnsowm4rJCoqinZUaOtgkOPbw,2307
|
|
22
|
+
comfy_env/isolation/workers/mp.py,sha256=ygOgx2iyLN7l5fWkKI4lqzQsDyfAAd9Gb4gTYLp7o1A,34061
|
|
23
|
+
comfy_env/isolation/workers/subprocess.py,sha256=ML6I9IenReagP8iT0Cd2ipet6JPK1gnDbOianOuFwOw,57164
|
|
24
|
+
comfy_env/packages/__init__.py,sha256=6PTwUfUdJDTbIw46dCiA42qk4zUe_gw29xOaklBiMMc,1193
|
|
25
|
+
comfy_env/packages/apt.py,sha256=pxy3A5ZHv3X8ExCVyohODY8Fcy9ji4izIVPfYoxhqT4,1027
|
|
26
|
+
comfy_env/packages/cuda_wheels.py,sha256=G_CnlwNcfeWlEU24aCVBpeqQQ05y8_02dDLBwBFNwII,3980
|
|
27
|
+
comfy_env/packages/node_dependencies.py,sha256=AX_CY6j43tTY5KhyPfU7Wz6zgLAfWF0o0JkTrcNSecg,2966
|
|
28
|
+
comfy_env/packages/pixi.py,sha256=RPu8x5sSOLE1CYAhWMMjoQrbFGGt00fdsbqtRcTz7LQ,3871
|
|
29
|
+
comfy_env/packages/toml_generator.py,sha256=Vhc8F9euHhMTwH1TV6t96-D9Pjrn9jIN4e9WXrCIFE8,3414
|
|
30
|
+
comfy_env/templates/comfy-env-instructions.txt,sha256=ve1RAthW7ouumU9h6DM7mIRX1MS8_Tyonq2U4tcrFu8,1031
|
|
31
|
+
comfy_env/templates/comfy-env.toml,sha256=ROIqi4BlPL1MEdL1VgebfTHpdwPNYGHwWeigI9Kw-1I,4831
|
|
32
|
+
comfy_env-0.1.17.dist-info/METADATA,sha256=XlM9mAERf1U2q9sBbrmY7jIgqrl8IsLrs8vK9CmLnpw,4808
|
|
33
|
+
comfy_env-0.1.17.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
34
|
+
comfy_env-0.1.17.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
|
|
35
|
+
comfy_env-0.1.17.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
|
|
36
|
+
comfy_env-0.1.17.dist-info/RECORD,,
|
comfy_env/cache.py
DELETED
|
@@ -1,203 +0,0 @@
|
|
|
1
|
-
"""Central environment cache management."""
|
|
2
|
-
|
|
3
|
-
import hashlib
|
|
4
|
-
import os
|
|
5
|
-
import shutil
|
|
6
|
-
import sys
|
|
7
|
-
from datetime import datetime
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
from typing import Optional, Tuple, Callable
|
|
10
|
-
|
|
11
|
-
# Import version
|
|
12
|
-
try:
|
|
13
|
-
from . import __version__
|
|
14
|
-
except ImportError:
|
|
15
|
-
__version__ = "0.0.0-dev"
|
|
16
|
-
|
|
17
|
-
import tomli
|
|
18
|
-
import tomli_w
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
# Constants
|
|
22
|
-
CACHE_DIR = Path.home() / ".comfy-env" / "envs"
|
|
23
|
-
MARKER_FILE = ".comfy-env-marker.toml"
|
|
24
|
-
METADATA_FILE = ".comfy-env-metadata.toml"
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def get_cache_dir() -> Path:
|
|
28
|
-
"""Get central cache directory, create if needed."""
|
|
29
|
-
CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
|
30
|
-
return CACHE_DIR
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def compute_config_hash(config_path: Path) -> str:
|
|
34
|
-
"""Compute hash of comfy-env.toml content (first 8 chars of SHA256)."""
|
|
35
|
-
content = config_path.read_bytes()
|
|
36
|
-
return hashlib.sha256(content).hexdigest()[:8]
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def sanitize_name(name: str) -> str:
|
|
40
|
-
"""Sanitize a name for use in filesystem paths."""
|
|
41
|
-
# Lowercase and replace problematic chars
|
|
42
|
-
name = name.lower()
|
|
43
|
-
for prefix in ("comfyui-", "comfyui_"):
|
|
44
|
-
if name.startswith(prefix):
|
|
45
|
-
name = name[len(prefix):]
|
|
46
|
-
return name.replace("-", "_").replace(" ", "_")
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def get_env_name(node_dir: Path, config_path: Path) -> str:
|
|
50
|
-
"""Generate env name: <nodename>_<subfolder>_<hash>."""
|
|
51
|
-
# Get node name
|
|
52
|
-
node_name = sanitize_name(node_dir.name)
|
|
53
|
-
|
|
54
|
-
# Get subfolder (relative path from node_dir to config parent)
|
|
55
|
-
config_parent = config_path.parent
|
|
56
|
-
if config_parent == node_dir:
|
|
57
|
-
subfolder = ""
|
|
58
|
-
else:
|
|
59
|
-
try:
|
|
60
|
-
rel_path = config_parent.relative_to(node_dir)
|
|
61
|
-
subfolder = rel_path.as_posix().replace("/", "_")
|
|
62
|
-
except ValueError:
|
|
63
|
-
# config_path not under node_dir - use parent folder name
|
|
64
|
-
subfolder = sanitize_name(config_parent.name)
|
|
65
|
-
|
|
66
|
-
# Compute hash
|
|
67
|
-
config_hash = compute_config_hash(config_path)
|
|
68
|
-
|
|
69
|
-
return f"{node_name}_{subfolder}_{config_hash}"
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def get_central_env_path(node_dir: Path, config_path: Path) -> Path:
|
|
73
|
-
"""Get path to central environment for this config."""
|
|
74
|
-
env_name = get_env_name(node_dir, config_path)
|
|
75
|
-
return get_cache_dir() / env_name
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def write_marker(config_path: Path, env_path: Path) -> None:
|
|
79
|
-
"""Write marker file linking node to central env."""
|
|
80
|
-
marker_path = config_path.parent / MARKER_FILE
|
|
81
|
-
marker_data = {
|
|
82
|
-
"env": {
|
|
83
|
-
"name": env_path.name,
|
|
84
|
-
"path": str(env_path),
|
|
85
|
-
"config_hash": compute_config_hash(config_path),
|
|
86
|
-
"created": datetime.now().isoformat(),
|
|
87
|
-
"comfy_env_version": __version__,
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
marker_path.write_text(tomli_w.dumps(marker_data))
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def write_env_metadata(env_path: Path, marker_path: Path) -> None:
|
|
94
|
-
"""Write metadata file for orphan detection."""
|
|
95
|
-
metadata_path = env_path / METADATA_FILE
|
|
96
|
-
metadata = {
|
|
97
|
-
"marker_path": str(marker_path),
|
|
98
|
-
"created": datetime.now().isoformat(),
|
|
99
|
-
}
|
|
100
|
-
metadata_path.write_text(tomli_w.dumps(metadata))
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def read_marker(marker_path: Path) -> Optional[dict]:
|
|
104
|
-
"""Read marker file, return None if invalid/missing."""
|
|
105
|
-
if not marker_path.exists():
|
|
106
|
-
return None
|
|
107
|
-
try:
|
|
108
|
-
with open(marker_path, "rb") as f:
|
|
109
|
-
return tomli.load(f)
|
|
110
|
-
except Exception:
|
|
111
|
-
return None
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def read_env_metadata(env_path: Path) -> Optional[dict]:
|
|
115
|
-
"""Read metadata file from env, return None if invalid/missing."""
|
|
116
|
-
metadata_path = env_path / METADATA_FILE
|
|
117
|
-
if not metadata_path.exists():
|
|
118
|
-
return None
|
|
119
|
-
try:
|
|
120
|
-
with open(metadata_path, "rb") as f:
|
|
121
|
-
return tomli.load(f)
|
|
122
|
-
except Exception:
|
|
123
|
-
return None
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def resolve_env_path(node_dir: Path) -> Tuple[Optional[Path], Optional[Path], Optional[Path]]:
|
|
127
|
-
"""Resolve environment path. Returns (env_path, site_packages, lib_dir)."""
|
|
128
|
-
# 1. Check marker file -> central cache
|
|
129
|
-
marker_path = node_dir / MARKER_FILE
|
|
130
|
-
marker = read_marker(marker_path)
|
|
131
|
-
if marker and "env" in marker:
|
|
132
|
-
env_path = Path(marker["env"]["path"])
|
|
133
|
-
if env_path.exists():
|
|
134
|
-
return _get_env_paths(env_path)
|
|
135
|
-
|
|
136
|
-
# 2. Check _env_<name>
|
|
137
|
-
node_name = sanitize_name(node_dir.name)
|
|
138
|
-
env_name = f"_env_{node_name}"
|
|
139
|
-
local_env = node_dir / env_name
|
|
140
|
-
if local_env.exists():
|
|
141
|
-
return _get_env_paths(local_env)
|
|
142
|
-
|
|
143
|
-
# 3. Check .pixi/envs/default
|
|
144
|
-
pixi_env = node_dir / ".pixi" / "envs" / "default"
|
|
145
|
-
if pixi_env.exists():
|
|
146
|
-
return _get_env_paths(pixi_env)
|
|
147
|
-
|
|
148
|
-
# 4. Check .venv
|
|
149
|
-
venv_dir = node_dir / ".venv"
|
|
150
|
-
if venv_dir.exists():
|
|
151
|
-
return _get_env_paths(venv_dir)
|
|
152
|
-
|
|
153
|
-
return None, None, None
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
def _get_env_paths(env_path: Path) -> Tuple[Path, Optional[Path], Optional[Path]]:
|
|
157
|
-
"""Get site-packages and lib paths from an environment."""
|
|
158
|
-
import glob
|
|
159
|
-
|
|
160
|
-
if sys.platform == "win32":
|
|
161
|
-
site_packages = env_path / "Lib" / "site-packages"
|
|
162
|
-
lib_dir = env_path / "Library" / "bin"
|
|
163
|
-
else:
|
|
164
|
-
# Linux/Mac: lib/python*/site-packages
|
|
165
|
-
matches = glob.glob(str(env_path / "lib" / "python*" / "site-packages"))
|
|
166
|
-
site_packages = Path(matches[0]) if matches else None
|
|
167
|
-
lib_dir = env_path / "lib"
|
|
168
|
-
|
|
169
|
-
return env_path, site_packages, lib_dir
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
def cleanup_orphaned_envs(log: Callable[[str], None] = print) -> int:
|
|
173
|
-
"""Remove orphaned environments. Returns count cleaned."""
|
|
174
|
-
cache_dir = get_cache_dir()
|
|
175
|
-
if not cache_dir.exists():
|
|
176
|
-
return 0
|
|
177
|
-
|
|
178
|
-
cleaned = 0
|
|
179
|
-
for env_dir in cache_dir.iterdir():
|
|
180
|
-
if not env_dir.is_dir():
|
|
181
|
-
continue
|
|
182
|
-
|
|
183
|
-
# Skip if no metadata (might be manually created or old format)
|
|
184
|
-
metadata = read_env_metadata(env_dir)
|
|
185
|
-
if not metadata:
|
|
186
|
-
continue
|
|
187
|
-
|
|
188
|
-
# Check if marker file still exists
|
|
189
|
-
marker_path_str = metadata.get("marker_path", "")
|
|
190
|
-
if not marker_path_str:
|
|
191
|
-
continue
|
|
192
|
-
|
|
193
|
-
marker_path = Path(marker_path_str)
|
|
194
|
-
if not marker_path.exists():
|
|
195
|
-
# Marker gone = node was deleted = orphan
|
|
196
|
-
log(f"[comfy-env] Cleaning orphaned env: {env_dir.name}")
|
|
197
|
-
try:
|
|
198
|
-
shutil.rmtree(env_dir)
|
|
199
|
-
cleaned += 1
|
|
200
|
-
except Exception as e:
|
|
201
|
-
log(f"[comfy-env] Failed to cleanup {env_dir.name}: {e}")
|
|
202
|
-
|
|
203
|
-
return cleaned
|
comfy_env/nodes.py
DELETED
|
@@ -1,187 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Node dependency installation for comfy-env.
|
|
3
|
-
|
|
4
|
-
This module handles installation of dependent ComfyUI custom nodes
|
|
5
|
-
specified in the [node_reqs] section of comfy-env.toml.
|
|
6
|
-
|
|
7
|
-
Example configuration:
|
|
8
|
-
[node_reqs]
|
|
9
|
-
ComfyUI_essentials = "cubiq/ComfyUI_essentials"
|
|
10
|
-
ComfyUI-DepthAnythingV2 = "kijai/ComfyUI-DepthAnythingV2"
|
|
11
|
-
"""
|
|
12
|
-
|
|
13
|
-
import shutil
|
|
14
|
-
import subprocess
|
|
15
|
-
import sys
|
|
16
|
-
from pathlib import Path
|
|
17
|
-
from typing import TYPE_CHECKING, Callable, List, Set
|
|
18
|
-
|
|
19
|
-
if TYPE_CHECKING:
|
|
20
|
-
from .config.parser import NodeReq
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def normalize_repo_url(repo: str) -> str:
|
|
24
|
-
"""
|
|
25
|
-
Convert GitHub shorthand to full URL.
|
|
26
|
-
|
|
27
|
-
Args:
|
|
28
|
-
repo: Either 'owner/repo' or full URL like 'https://github.com/owner/repo'
|
|
29
|
-
|
|
30
|
-
Returns:
|
|
31
|
-
Full GitHub URL
|
|
32
|
-
"""
|
|
33
|
-
if repo.startswith("http://") or repo.startswith("https://"):
|
|
34
|
-
return repo
|
|
35
|
-
return f"https://github.com/{repo}"
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def clone_node(
|
|
39
|
-
repo: str,
|
|
40
|
-
name: str,
|
|
41
|
-
target_dir: Path,
|
|
42
|
-
log: Callable[[str], None],
|
|
43
|
-
) -> Path:
|
|
44
|
-
"""
|
|
45
|
-
Clone a node repository to target_dir/name.
|
|
46
|
-
|
|
47
|
-
Args:
|
|
48
|
-
repo: GitHub repo path (e.g., 'owner/repo') or full URL
|
|
49
|
-
name: Directory name for the cloned repo
|
|
50
|
-
target_dir: Parent directory (usually custom_nodes/)
|
|
51
|
-
log: Logging callback
|
|
52
|
-
|
|
53
|
-
Returns:
|
|
54
|
-
Path to the cloned node directory
|
|
55
|
-
|
|
56
|
-
Raises:
|
|
57
|
-
RuntimeError: If git clone fails
|
|
58
|
-
"""
|
|
59
|
-
node_path = target_dir / name
|
|
60
|
-
url = normalize_repo_url(repo)
|
|
61
|
-
|
|
62
|
-
log(f" Cloning {name} from {url}...")
|
|
63
|
-
result = subprocess.run(
|
|
64
|
-
["git", "clone", "--depth", "1", url, str(node_path)],
|
|
65
|
-
capture_output=True,
|
|
66
|
-
text=True,
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
if result.returncode != 0:
|
|
70
|
-
raise RuntimeError(f"Failed to clone {url}: {result.stderr.strip()}")
|
|
71
|
-
|
|
72
|
-
return node_path
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def install_requirements(
|
|
76
|
-
node_dir: Path,
|
|
77
|
-
log: Callable[[str], None],
|
|
78
|
-
) -> None:
|
|
79
|
-
"""
|
|
80
|
-
Install requirements.txt in a node directory if it exists.
|
|
81
|
-
|
|
82
|
-
Args:
|
|
83
|
-
node_dir: Path to the node directory
|
|
84
|
-
log: Logging callback
|
|
85
|
-
"""
|
|
86
|
-
requirements_file = node_dir / "requirements.txt"
|
|
87
|
-
|
|
88
|
-
if not requirements_file.exists():
|
|
89
|
-
return
|
|
90
|
-
|
|
91
|
-
log(f" Installing requirements for {node_dir.name}...")
|
|
92
|
-
|
|
93
|
-
# Try uv first, fall back to pip if uv not in PATH
|
|
94
|
-
if shutil.which("uv"):
|
|
95
|
-
cmd = ["uv", "pip", "install", "-r", str(requirements_file), "--python", sys.executable]
|
|
96
|
-
else:
|
|
97
|
-
cmd = [sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
|
|
98
|
-
|
|
99
|
-
result = subprocess.run(cmd, cwd=node_dir, capture_output=True, text=True)
|
|
100
|
-
if result.returncode != 0:
|
|
101
|
-
log(f" Warning: requirements.txt install failed for {node_dir.name}: {result.stderr.strip()[:200]}")
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def run_install_script(
|
|
105
|
-
node_dir: Path,
|
|
106
|
-
log: Callable[[str], None],
|
|
107
|
-
) -> None:
|
|
108
|
-
"""
|
|
109
|
-
Run install.py in a node directory if it exists.
|
|
110
|
-
|
|
111
|
-
Args:
|
|
112
|
-
node_dir: Path to the node directory
|
|
113
|
-
log: Logging callback
|
|
114
|
-
"""
|
|
115
|
-
install_script = node_dir / "install.py"
|
|
116
|
-
|
|
117
|
-
if install_script.exists():
|
|
118
|
-
log(f" Running install.py for {node_dir.name}...")
|
|
119
|
-
result = subprocess.run(
|
|
120
|
-
[sys.executable, str(install_script)],
|
|
121
|
-
cwd=node_dir,
|
|
122
|
-
capture_output=True,
|
|
123
|
-
text=True,
|
|
124
|
-
)
|
|
125
|
-
if result.returncode != 0:
|
|
126
|
-
log(f" Warning: install.py failed for {node_dir.name}: {result.stderr.strip()[:200]}")
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
def install_node_deps(
|
|
130
|
-
node_reqs: "List[NodeReq]",
|
|
131
|
-
custom_nodes_dir: Path,
|
|
132
|
-
log: Callable[[str], None],
|
|
133
|
-
visited: Set[str],
|
|
134
|
-
) -> None:
|
|
135
|
-
"""
|
|
136
|
-
Install node dependencies recursively.
|
|
137
|
-
|
|
138
|
-
Args:
|
|
139
|
-
node_reqs: List of NodeReq objects to install
|
|
140
|
-
custom_nodes_dir: Path to custom_nodes directory
|
|
141
|
-
log: Logging callback
|
|
142
|
-
visited: Set of already-processed node names (for cycle detection)
|
|
143
|
-
"""
|
|
144
|
-
from .config.parser import discover_config
|
|
145
|
-
|
|
146
|
-
for req in node_reqs:
|
|
147
|
-
# Skip if already visited (cycle detection)
|
|
148
|
-
if req.name in visited:
|
|
149
|
-
log(f" {req.name}: already in dependency chain, skipping")
|
|
150
|
-
continue
|
|
151
|
-
|
|
152
|
-
visited.add(req.name)
|
|
153
|
-
|
|
154
|
-
node_path = custom_nodes_dir / req.name
|
|
155
|
-
|
|
156
|
-
# Skip if already installed (directory exists)
|
|
157
|
-
if node_path.exists():
|
|
158
|
-
log(f" {req.name}: already installed, skipping")
|
|
159
|
-
continue
|
|
160
|
-
|
|
161
|
-
try:
|
|
162
|
-
# Clone the repository
|
|
163
|
-
clone_node(req.repo, req.name, custom_nodes_dir, log)
|
|
164
|
-
|
|
165
|
-
# Install requirements.txt if present
|
|
166
|
-
install_requirements(node_path, log)
|
|
167
|
-
|
|
168
|
-
# Run install.py if present
|
|
169
|
-
run_install_script(node_path, log)
|
|
170
|
-
|
|
171
|
-
# Recursively process nested node_reqs
|
|
172
|
-
try:
|
|
173
|
-
nested_config = discover_config(node_path)
|
|
174
|
-
if nested_config and nested_config.node_reqs:
|
|
175
|
-
log(f" {req.name}: found {len(nested_config.node_reqs)} nested dependencies")
|
|
176
|
-
install_node_deps(
|
|
177
|
-
nested_config.node_reqs,
|
|
178
|
-
custom_nodes_dir,
|
|
179
|
-
log,
|
|
180
|
-
visited,
|
|
181
|
-
)
|
|
182
|
-
except Exception as e:
|
|
183
|
-
# Don't fail if we can't parse nested config
|
|
184
|
-
log(f" {req.name}: could not check for nested deps: {e}")
|
|
185
|
-
|
|
186
|
-
except Exception as e:
|
|
187
|
-
log(f" Warning: Failed to install {req.name}: {e}")
|
comfy_env/pixi/__init__.py
DELETED
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Pixi integration for comfy-env.
|
|
3
|
-
|
|
4
|
-
All dependencies go through pixi for unified management.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
from .core import (
|
|
8
|
-
ensure_pixi,
|
|
9
|
-
get_pixi_path,
|
|
10
|
-
get_pixi_python,
|
|
11
|
-
pixi_run,
|
|
12
|
-
pixi_install,
|
|
13
|
-
clean_pixi_artifacts,
|
|
14
|
-
CUDA_WHEELS_INDEX,
|
|
15
|
-
)
|
|
16
|
-
from .cuda_detection import (
|
|
17
|
-
detect_cuda_version,
|
|
18
|
-
detect_cuda_environment,
|
|
19
|
-
detect_gpu_info,
|
|
20
|
-
detect_gpus,
|
|
21
|
-
get_gpu_summary,
|
|
22
|
-
get_recommended_cuda_version,
|
|
23
|
-
GPUInfo,
|
|
24
|
-
CUDAEnvironment,
|
|
25
|
-
)
|
|
26
|
-
from .resolver import RuntimeEnv
|
|
27
|
-
|
|
28
|
-
__all__ = [
|
|
29
|
-
# Core pixi functions
|
|
30
|
-
"ensure_pixi",
|
|
31
|
-
"get_pixi_path",
|
|
32
|
-
"get_pixi_python",
|
|
33
|
-
"pixi_run",
|
|
34
|
-
"pixi_install",
|
|
35
|
-
"clean_pixi_artifacts",
|
|
36
|
-
"CUDA_WHEELS_INDEX",
|
|
37
|
-
# CUDA detection
|
|
38
|
-
"detect_cuda_version",
|
|
39
|
-
"detect_cuda_environment",
|
|
40
|
-
"detect_gpu_info",
|
|
41
|
-
"detect_gpus",
|
|
42
|
-
"get_gpu_summary",
|
|
43
|
-
"get_recommended_cuda_version",
|
|
44
|
-
"GPUInfo",
|
|
45
|
-
"CUDAEnvironment",
|
|
46
|
-
# Resolver
|
|
47
|
-
"RuntimeEnv",
|
|
48
|
-
]
|