comfy-env 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +30 -27
- comfy_env/cache.py +203 -0
- comfy_env/cli.py +9 -11
- comfy_env/config/__init__.py +8 -10
- comfy_env/config/parser.py +28 -75
- comfy_env/install.py +141 -25
- comfy_env/isolation/__init__.py +2 -1
- comfy_env/isolation/wrap.py +202 -25
- comfy_env/nodes.py +1 -1
- comfy_env/pixi/core.py +44 -10
- comfy_env/prestartup.py +63 -19
- comfy_env/workers/subprocess.py +1 -1
- {comfy_env-0.1.13.dist-info → comfy_env-0.1.15.dist-info}/METADATA +2 -2
- comfy_env-0.1.15.dist-info/RECORD +31 -0
- comfy_env/config/types.py +0 -70
- comfy_env/errors.py +0 -293
- comfy_env-0.1.13.dist-info/RECORD +0 -32
- {comfy_env-0.1.13.dist-info → comfy_env-0.1.15.dist-info}/WHEEL +0 -0
- {comfy_env-0.1.13.dist-info → comfy_env-0.1.15.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.1.13.dist-info → comfy_env-0.1.15.dist-info}/licenses/LICENSE +0 -0
comfy_env/__init__.py
CHANGED
|
@@ -1,12 +1,4 @@
|
|
|
1
|
-
"""
|
|
2
|
-
comfy-env: Environment management for ComfyUI custom nodes.
|
|
3
|
-
|
|
4
|
-
All dependencies go through pixi for unified management.
|
|
5
|
-
|
|
6
|
-
Main APIs:
|
|
7
|
-
- install(): Install dependencies from comfy-env.toml
|
|
8
|
-
- wrap_isolated_nodes(): Wrap nodes for subprocess isolation
|
|
9
|
-
"""
|
|
1
|
+
"""Environment management for ComfyUI custom nodes."""
|
|
10
2
|
|
|
11
3
|
from importlib.metadata import version, PackageNotFoundError
|
|
12
4
|
|
|
@@ -48,32 +40,34 @@ from .workers import (
|
|
|
48
40
|
)
|
|
49
41
|
|
|
50
42
|
# Isolation
|
|
51
|
-
from .isolation import wrap_isolated_nodes
|
|
43
|
+
from .isolation import wrap_isolated_nodes, wrap_nodes
|
|
52
44
|
|
|
53
45
|
# Install API
|
|
54
|
-
from .install import install, verify_installation
|
|
46
|
+
from .install import install, verify_installation, USE_COMFY_ENV_VAR
|
|
55
47
|
|
|
56
48
|
# Prestartup helpers
|
|
57
|
-
from .prestartup import setup_env
|
|
49
|
+
from .prestartup import setup_env, copy_files
|
|
58
50
|
|
|
59
|
-
#
|
|
60
|
-
from .
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
InstallError,
|
|
51
|
+
# Cache management
|
|
52
|
+
from .cache import (
|
|
53
|
+
get_cache_dir,
|
|
54
|
+
cleanup_orphaned_envs,
|
|
55
|
+
resolve_env_path,
|
|
56
|
+
CACHE_DIR,
|
|
57
|
+
MARKER_FILE,
|
|
67
58
|
)
|
|
68
59
|
|
|
69
60
|
__all__ = [
|
|
70
61
|
# Install API
|
|
71
62
|
"install",
|
|
72
63
|
"verify_installation",
|
|
64
|
+
"USE_COMFY_ENV_VAR",
|
|
73
65
|
# Prestartup
|
|
74
66
|
"setup_env",
|
|
67
|
+
"copy_files",
|
|
75
68
|
# Isolation
|
|
76
69
|
"wrap_isolated_nodes",
|
|
70
|
+
"wrap_nodes",
|
|
77
71
|
# Config
|
|
78
72
|
"ComfyEnvConfig",
|
|
79
73
|
"NodeReq",
|
|
@@ -98,11 +92,20 @@ __all__ = [
|
|
|
98
92
|
"WorkerError",
|
|
99
93
|
"MPWorker",
|
|
100
94
|
"SubprocessWorker",
|
|
101
|
-
#
|
|
102
|
-
"
|
|
103
|
-
"
|
|
104
|
-
"
|
|
105
|
-
"
|
|
106
|
-
"
|
|
107
|
-
"InstallError",
|
|
95
|
+
# Cache
|
|
96
|
+
"get_cache_dir",
|
|
97
|
+
"cleanup_orphaned_envs",
|
|
98
|
+
"resolve_env_path",
|
|
99
|
+
"CACHE_DIR",
|
|
100
|
+
"MARKER_FILE",
|
|
108
101
|
]
|
|
102
|
+
|
|
103
|
+
# Run orphan cleanup once on module load (silently)
|
|
104
|
+
def _run_startup_cleanup():
|
|
105
|
+
"""Clean orphaned envs on startup."""
|
|
106
|
+
try:
|
|
107
|
+
cleanup_orphaned_envs(log=lambda x: None) # Silent
|
|
108
|
+
except Exception:
|
|
109
|
+
pass # Never fail startup due to cleanup
|
|
110
|
+
|
|
111
|
+
_run_startup_cleanup()
|
comfy_env/cache.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"""Central environment cache management."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import sys
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Optional, Tuple, Callable
|
|
10
|
+
|
|
11
|
+
# Import version
|
|
12
|
+
try:
|
|
13
|
+
from . import __version__
|
|
14
|
+
except ImportError:
|
|
15
|
+
__version__ = "0.0.0-dev"
|
|
16
|
+
|
|
17
|
+
import tomli
|
|
18
|
+
import tomli_w
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Constants
|
|
22
|
+
CACHE_DIR = Path.home() / ".comfy-env" / "envs"
|
|
23
|
+
MARKER_FILE = ".comfy-env-marker.toml"
|
|
24
|
+
METADATA_FILE = ".comfy-env-metadata.toml"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_cache_dir() -> Path:
|
|
28
|
+
"""Get central cache directory, create if needed."""
|
|
29
|
+
CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
return CACHE_DIR
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def compute_config_hash(config_path: Path) -> str:
|
|
34
|
+
"""Compute hash of comfy-env.toml content (first 8 chars of SHA256)."""
|
|
35
|
+
content = config_path.read_bytes()
|
|
36
|
+
return hashlib.sha256(content).hexdigest()[:8]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def sanitize_name(name: str) -> str:
|
|
40
|
+
"""Sanitize a name for use in filesystem paths."""
|
|
41
|
+
# Lowercase and replace problematic chars
|
|
42
|
+
name = name.lower()
|
|
43
|
+
for prefix in ("comfyui-", "comfyui_"):
|
|
44
|
+
if name.startswith(prefix):
|
|
45
|
+
name = name[len(prefix):]
|
|
46
|
+
return name.replace("-", "_").replace(" ", "_")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def get_env_name(node_dir: Path, config_path: Path) -> str:
|
|
50
|
+
"""Generate env name: <nodename>_<subfolder>_<hash>."""
|
|
51
|
+
# Get node name
|
|
52
|
+
node_name = sanitize_name(node_dir.name)
|
|
53
|
+
|
|
54
|
+
# Get subfolder (relative path from node_dir to config parent)
|
|
55
|
+
config_parent = config_path.parent
|
|
56
|
+
if config_parent == node_dir:
|
|
57
|
+
subfolder = ""
|
|
58
|
+
else:
|
|
59
|
+
try:
|
|
60
|
+
rel_path = config_parent.relative_to(node_dir)
|
|
61
|
+
subfolder = rel_path.as_posix().replace("/", "_")
|
|
62
|
+
except ValueError:
|
|
63
|
+
# config_path not under node_dir - use parent folder name
|
|
64
|
+
subfolder = sanitize_name(config_parent.name)
|
|
65
|
+
|
|
66
|
+
# Compute hash
|
|
67
|
+
config_hash = compute_config_hash(config_path)
|
|
68
|
+
|
|
69
|
+
return f"{node_name}_{subfolder}_{config_hash}"
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def get_central_env_path(node_dir: Path, config_path: Path) -> Path:
|
|
73
|
+
"""Get path to central environment for this config."""
|
|
74
|
+
env_name = get_env_name(node_dir, config_path)
|
|
75
|
+
return get_cache_dir() / env_name
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def write_marker(config_path: Path, env_path: Path) -> None:
|
|
79
|
+
"""Write marker file linking node to central env."""
|
|
80
|
+
marker_path = config_path.parent / MARKER_FILE
|
|
81
|
+
marker_data = {
|
|
82
|
+
"env": {
|
|
83
|
+
"name": env_path.name,
|
|
84
|
+
"path": str(env_path),
|
|
85
|
+
"config_hash": compute_config_hash(config_path),
|
|
86
|
+
"created": datetime.now().isoformat(),
|
|
87
|
+
"comfy_env_version": __version__,
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
marker_path.write_text(tomli_w.dumps(marker_data))
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def write_env_metadata(env_path: Path, marker_path: Path) -> None:
|
|
94
|
+
"""Write metadata file for orphan detection."""
|
|
95
|
+
metadata_path = env_path / METADATA_FILE
|
|
96
|
+
metadata = {
|
|
97
|
+
"marker_path": str(marker_path),
|
|
98
|
+
"created": datetime.now().isoformat(),
|
|
99
|
+
}
|
|
100
|
+
metadata_path.write_text(tomli_w.dumps(metadata))
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def read_marker(marker_path: Path) -> Optional[dict]:
|
|
104
|
+
"""Read marker file, return None if invalid/missing."""
|
|
105
|
+
if not marker_path.exists():
|
|
106
|
+
return None
|
|
107
|
+
try:
|
|
108
|
+
with open(marker_path, "rb") as f:
|
|
109
|
+
return tomli.load(f)
|
|
110
|
+
except Exception:
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def read_env_metadata(env_path: Path) -> Optional[dict]:
|
|
115
|
+
"""Read metadata file from env, return None if invalid/missing."""
|
|
116
|
+
metadata_path = env_path / METADATA_FILE
|
|
117
|
+
if not metadata_path.exists():
|
|
118
|
+
return None
|
|
119
|
+
try:
|
|
120
|
+
with open(metadata_path, "rb") as f:
|
|
121
|
+
return tomli.load(f)
|
|
122
|
+
except Exception:
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def resolve_env_path(node_dir: Path) -> Tuple[Optional[Path], Optional[Path], Optional[Path]]:
|
|
127
|
+
"""Resolve environment path. Returns (env_path, site_packages, lib_dir)."""
|
|
128
|
+
# 1. Check marker file -> central cache
|
|
129
|
+
marker_path = node_dir / MARKER_FILE
|
|
130
|
+
marker = read_marker(marker_path)
|
|
131
|
+
if marker and "env" in marker:
|
|
132
|
+
env_path = Path(marker["env"]["path"])
|
|
133
|
+
if env_path.exists():
|
|
134
|
+
return _get_env_paths(env_path)
|
|
135
|
+
|
|
136
|
+
# 2. Check _env_<name>
|
|
137
|
+
node_name = sanitize_name(node_dir.name)
|
|
138
|
+
env_name = f"_env_{node_name}"
|
|
139
|
+
local_env = node_dir / env_name
|
|
140
|
+
if local_env.exists():
|
|
141
|
+
return _get_env_paths(local_env)
|
|
142
|
+
|
|
143
|
+
# 3. Check .pixi/envs/default
|
|
144
|
+
pixi_env = node_dir / ".pixi" / "envs" / "default"
|
|
145
|
+
if pixi_env.exists():
|
|
146
|
+
return _get_env_paths(pixi_env)
|
|
147
|
+
|
|
148
|
+
# 4. Check .venv
|
|
149
|
+
venv_dir = node_dir / ".venv"
|
|
150
|
+
if venv_dir.exists():
|
|
151
|
+
return _get_env_paths(venv_dir)
|
|
152
|
+
|
|
153
|
+
return None, None, None
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _get_env_paths(env_path: Path) -> Tuple[Path, Optional[Path], Optional[Path]]:
|
|
157
|
+
"""Get site-packages and lib paths from an environment."""
|
|
158
|
+
import glob
|
|
159
|
+
|
|
160
|
+
if sys.platform == "win32":
|
|
161
|
+
site_packages = env_path / "Lib" / "site-packages"
|
|
162
|
+
lib_dir = env_path / "Library" / "bin"
|
|
163
|
+
else:
|
|
164
|
+
# Linux/Mac: lib/python*/site-packages
|
|
165
|
+
matches = glob.glob(str(env_path / "lib" / "python*" / "site-packages"))
|
|
166
|
+
site_packages = Path(matches[0]) if matches else None
|
|
167
|
+
lib_dir = env_path / "lib"
|
|
168
|
+
|
|
169
|
+
return env_path, site_packages, lib_dir
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def cleanup_orphaned_envs(log: Callable[[str], None] = print) -> int:
|
|
173
|
+
"""Remove orphaned environments. Returns count cleaned."""
|
|
174
|
+
cache_dir = get_cache_dir()
|
|
175
|
+
if not cache_dir.exists():
|
|
176
|
+
return 0
|
|
177
|
+
|
|
178
|
+
cleaned = 0
|
|
179
|
+
for env_dir in cache_dir.iterdir():
|
|
180
|
+
if not env_dir.is_dir():
|
|
181
|
+
continue
|
|
182
|
+
|
|
183
|
+
# Skip if no metadata (might be manually created or old format)
|
|
184
|
+
metadata = read_env_metadata(env_dir)
|
|
185
|
+
if not metadata:
|
|
186
|
+
continue
|
|
187
|
+
|
|
188
|
+
# Check if marker file still exists
|
|
189
|
+
marker_path_str = metadata.get("marker_path", "")
|
|
190
|
+
if not marker_path_str:
|
|
191
|
+
continue
|
|
192
|
+
|
|
193
|
+
marker_path = Path(marker_path_str)
|
|
194
|
+
if not marker_path.exists():
|
|
195
|
+
# Marker gone = node was deleted = orphan
|
|
196
|
+
log(f"[comfy-env] Cleaning orphaned env: {env_dir.name}")
|
|
197
|
+
try:
|
|
198
|
+
shutil.rmtree(env_dir)
|
|
199
|
+
cleaned += 1
|
|
200
|
+
except Exception as e:
|
|
201
|
+
log(f"[comfy-env] Failed to cleanup {env_dir.name}: {e}")
|
|
202
|
+
|
|
203
|
+
return cleaned
|
comfy_env/cli.py
CHANGED
|
@@ -309,7 +309,7 @@ def cmd_info(args) -> int:
|
|
|
309
309
|
def cmd_doctor(args) -> int:
|
|
310
310
|
"""Handle doctor command."""
|
|
311
311
|
from .install import verify_installation
|
|
312
|
-
from .config.parser import
|
|
312
|
+
from .config.parser import load_config, discover_config
|
|
313
313
|
|
|
314
314
|
print("Running diagnostics...")
|
|
315
315
|
print("=" * 40)
|
|
@@ -325,21 +325,19 @@ def cmd_doctor(args) -> int:
|
|
|
325
325
|
if args.package:
|
|
326
326
|
packages = [args.package]
|
|
327
327
|
elif args.config:
|
|
328
|
-
config =
|
|
328
|
+
config = load_config(Path(args.config))
|
|
329
329
|
if config:
|
|
330
|
-
|
|
330
|
+
# Get packages from pypi-dependencies
|
|
331
|
+
pypi_deps = config.pixi_passthrough.get("pypi-dependencies", {})
|
|
332
|
+
packages = list(pypi_deps.keys()) + config.cuda_packages
|
|
331
333
|
else:
|
|
332
|
-
config =
|
|
334
|
+
config = discover_config(Path.cwd())
|
|
333
335
|
if config:
|
|
334
|
-
|
|
336
|
+
pypi_deps = config.pixi_passthrough.get("pypi-dependencies", {})
|
|
337
|
+
packages = list(pypi_deps.keys()) + config.cuda_packages
|
|
335
338
|
|
|
336
339
|
if packages:
|
|
337
|
-
|
|
338
|
-
for pkg in packages:
|
|
339
|
-
name = pkg.split("==")[0].split(">=")[0].split("[")[0]
|
|
340
|
-
pkg_names.append(name)
|
|
341
|
-
|
|
342
|
-
all_ok = verify_installation(pkg_names)
|
|
340
|
+
all_ok = verify_installation(packages)
|
|
343
341
|
if all_ok:
|
|
344
342
|
print("\nAll packages verified!")
|
|
345
343
|
return 0
|
comfy_env/config/__init__.py
CHANGED
|
@@ -1,18 +1,16 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Config parsing for comfy-env.
|
|
1
|
+
"""Config parsing for comfy-env."""
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
3
|
+
from .parser import (
|
|
4
|
+
ComfyEnvConfig,
|
|
5
|
+
NodeReq,
|
|
6
|
+
load_config,
|
|
7
|
+
discover_config,
|
|
8
|
+
CONFIG_FILE_NAME,
|
|
9
|
+
)
|
|
10
10
|
|
|
11
11
|
__all__ = [
|
|
12
|
-
# Types
|
|
13
12
|
"ComfyEnvConfig",
|
|
14
13
|
"NodeReq",
|
|
15
|
-
# Parser
|
|
16
14
|
"load_config",
|
|
17
15
|
"discover_config",
|
|
18
16
|
"CONFIG_FILE_NAME",
|
comfy_env/config/parser.py
CHANGED
|
@@ -1,100 +1,53 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
comfy-env.toml is a superset of pixi.toml. Custom sections we handle:
|
|
4
|
-
- python = "3.11" - Python version for isolated envs
|
|
5
|
-
- [cuda] packages = [...] - CUDA packages (triggers find-links + PyTorch detection)
|
|
6
|
-
- [node_reqs] - Other ComfyUI nodes to clone
|
|
7
|
-
|
|
8
|
-
Everything else passes through to pixi.toml directly.
|
|
9
|
-
|
|
10
|
-
Example config:
|
|
11
|
-
|
|
12
|
-
python = "3.11"
|
|
13
|
-
|
|
14
|
-
[cuda]
|
|
15
|
-
packages = ["cumesh"]
|
|
16
|
-
|
|
17
|
-
[dependencies]
|
|
18
|
-
mesalib = "*"
|
|
19
|
-
cgal = "*"
|
|
20
|
-
|
|
21
|
-
[pypi-dependencies]
|
|
22
|
-
numpy = ">=1.21.0,<2"
|
|
23
|
-
trimesh = { version = ">=4.0.0", extras = ["easy"] }
|
|
24
|
-
|
|
25
|
-
[target.linux-64.pypi-dependencies]
|
|
26
|
-
embreex = "*"
|
|
1
|
+
"""
|
|
2
|
+
Configuration parsing for comfy-env.
|
|
27
3
|
|
|
28
|
-
|
|
29
|
-
SomeNode = "owner/repo"
|
|
4
|
+
Loads comfy-env.toml (a superset of pixi.toml) and provides typed config objects.
|
|
30
5
|
"""
|
|
31
6
|
|
|
32
7
|
import copy
|
|
33
8
|
import sys
|
|
9
|
+
from dataclasses import dataclass, field
|
|
34
10
|
from pathlib import Path
|
|
35
11
|
from typing import Optional, Dict, Any, List
|
|
12
|
+
import tomli
|
|
36
13
|
|
|
37
|
-
#
|
|
38
|
-
if sys.version_info >= (3, 11):
|
|
39
|
-
import tomllib
|
|
40
|
-
else:
|
|
41
|
-
try:
|
|
42
|
-
import tomli as tomllib
|
|
43
|
-
except ImportError:
|
|
44
|
-
tomllib = None # type: ignore
|
|
45
|
-
|
|
46
|
-
from .types import ComfyEnvConfig, NodeReq
|
|
47
|
-
|
|
48
|
-
|
|
14
|
+
# --- Types&Constants ---
|
|
49
15
|
CONFIG_FILE_NAME = "comfy-env.toml"
|
|
50
16
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
17
|
+
@dataclass
|
|
18
|
+
class NodeReq:
|
|
19
|
+
"""A node dependency (another ComfyUI custom node)."""
|
|
20
|
+
name: str
|
|
21
|
+
repo: str # GitHub repo, e.g., "owner/repo"
|
|
54
22
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
23
|
+
@dataclass
|
|
24
|
+
class ComfyEnvConfig:
|
|
25
|
+
"""Configuration from comfy-env.toml."""
|
|
26
|
+
python: Optional[str] = None
|
|
27
|
+
cuda_packages: List[str] = field(default_factory=list)
|
|
28
|
+
apt_packages: List[str] = field(default_factory=list)
|
|
29
|
+
env_vars: Dict[str, str] = field(default_factory=dict)
|
|
30
|
+
node_reqs: List[NodeReq] = field(default_factory=list)
|
|
31
|
+
pixi_passthrough: Dict[str, Any] = field(default_factory=dict)
|
|
58
32
|
|
|
59
|
-
|
|
60
|
-
|
|
33
|
+
@property
|
|
34
|
+
def has_cuda(self) -> bool:
|
|
35
|
+
return bool(self.cuda_packages)
|
|
36
|
+
# --- Types&Constants ---
|
|
61
37
|
|
|
62
|
-
Returns:
|
|
63
|
-
ComfyEnvConfig instance
|
|
64
|
-
|
|
65
|
-
Raises:
|
|
66
|
-
FileNotFoundError: If config file doesn't exist
|
|
67
|
-
ImportError: If tomli not installed (Python < 3.11)
|
|
68
|
-
"""
|
|
69
|
-
if tomllib is None:
|
|
70
|
-
raise ImportError(
|
|
71
|
-
"TOML parsing requires tomli for Python < 3.11. "
|
|
72
|
-
"Install with: pip install tomli"
|
|
73
|
-
)
|
|
74
38
|
|
|
39
|
+
def load_config(path: Path) -> ComfyEnvConfig:
|
|
40
|
+
"""Load config from a TOML file."""
|
|
75
41
|
path = Path(path)
|
|
76
42
|
if not path.exists():
|
|
77
43
|
raise FileNotFoundError(f"Config file not found: {path}")
|
|
78
|
-
|
|
79
44
|
with open(path, "rb") as f:
|
|
80
|
-
data =
|
|
81
|
-
|
|
45
|
+
data = tomli.load(f)
|
|
82
46
|
return _parse_config(data)
|
|
83
47
|
|
|
84
48
|
|
|
85
49
|
def discover_config(node_dir: Path) -> Optional[ComfyEnvConfig]:
|
|
86
|
-
"""
|
|
87
|
-
Find and load comfy-env.toml from a directory.
|
|
88
|
-
|
|
89
|
-
Args:
|
|
90
|
-
node_dir: Directory to search
|
|
91
|
-
|
|
92
|
-
Returns:
|
|
93
|
-
ComfyEnvConfig if found, None otherwise
|
|
94
|
-
"""
|
|
95
|
-
if tomllib is None:
|
|
96
|
-
return None
|
|
97
|
-
|
|
50
|
+
"""Find and load comfy-env.toml from a directory."""
|
|
98
51
|
config_path = Path(node_dir) / CONFIG_FILE_NAME
|
|
99
52
|
if config_path.exists():
|
|
100
53
|
return load_config(config_path)
|
comfy_env/install.py
CHANGED
|
@@ -1,17 +1,21 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Installation API for comfy-env.
|
|
3
|
-
|
|
4
|
-
Example:
|
|
5
|
-
from comfy_env import install
|
|
6
|
-
install() # Auto-discovers comfy-env.toml and installs
|
|
7
|
-
"""
|
|
1
|
+
"""Installation API for comfy-env."""
|
|
8
2
|
|
|
9
3
|
import inspect
|
|
4
|
+
import os
|
|
10
5
|
from pathlib import Path
|
|
11
6
|
from typing import Callable, List, Optional, Set, Union
|
|
12
7
|
|
|
13
|
-
from .config.
|
|
14
|
-
|
|
8
|
+
from .config.parser import ComfyEnvConfig, NodeReq, load_config, discover_config
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# Environment variable to disable comfy-env isolation
|
|
12
|
+
USE_COMFY_ENV_VAR = "USE_COMFY_ENV"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _is_comfy_env_enabled() -> bool:
|
|
16
|
+
"""Check if isolation is enabled."""
|
|
17
|
+
val = os.environ.get(USE_COMFY_ENV_VAR, "1").lower()
|
|
18
|
+
return val not in ("0", "false", "no", "off")
|
|
15
19
|
|
|
16
20
|
|
|
17
21
|
def install(
|
|
@@ -20,18 +24,7 @@ def install(
|
|
|
20
24
|
log_callback: Optional[Callable[[str], None]] = None,
|
|
21
25
|
dry_run: bool = False,
|
|
22
26
|
) -> bool:
|
|
23
|
-
"""
|
|
24
|
-
Install dependencies from comfy-env.toml.
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
config: Optional path to comfy-env.toml. Auto-discovered if not provided.
|
|
28
|
-
node_dir: Optional node directory. Auto-discovered from caller if not provided.
|
|
29
|
-
log_callback: Optional callback for logging. Defaults to print.
|
|
30
|
-
dry_run: If True, show what would be installed without installing.
|
|
31
|
-
|
|
32
|
-
Returns:
|
|
33
|
-
True if installation succeeded.
|
|
34
|
-
"""
|
|
27
|
+
"""Install dependencies from comfy-env.toml."""
|
|
35
28
|
# Auto-discover caller's directory if not provided
|
|
36
29
|
if node_dir is None:
|
|
37
30
|
frame = inspect.stack()[1]
|
|
@@ -67,11 +60,17 @@ def install(
|
|
|
67
60
|
if cfg.node_reqs:
|
|
68
61
|
_install_node_dependencies(cfg.node_reqs, node_dir, log, dry_run)
|
|
69
62
|
|
|
70
|
-
#
|
|
71
|
-
|
|
63
|
+
# Check if isolation is enabled
|
|
64
|
+
if _is_comfy_env_enabled():
|
|
65
|
+
# Install everything via pixi (isolated environment)
|
|
66
|
+
_install_via_pixi(cfg, node_dir, log, dry_run)
|
|
72
67
|
|
|
73
|
-
|
|
74
|
-
|
|
68
|
+
# Auto-discover and install isolated subdirectory environments
|
|
69
|
+
_install_isolated_subdirs(node_dir, log, dry_run)
|
|
70
|
+
else:
|
|
71
|
+
# Install directly to host Python (no isolation)
|
|
72
|
+
log("\n[comfy-env] Isolation disabled (USE_COMFY_ENV=0)")
|
|
73
|
+
_install_to_host_python(cfg, node_dir, log, dry_run)
|
|
75
74
|
|
|
76
75
|
log("\nInstallation complete!")
|
|
77
76
|
return True
|
|
@@ -291,6 +290,123 @@ def _install_via_pixi(
|
|
|
291
290
|
pixi_install(cfg, node_dir, log)
|
|
292
291
|
|
|
293
292
|
|
|
293
|
+
def _install_to_host_python(
|
|
294
|
+
cfg: ComfyEnvConfig,
|
|
295
|
+
node_dir: Path,
|
|
296
|
+
log: Callable[[str], None],
|
|
297
|
+
dry_run: bool,
|
|
298
|
+
) -> None:
|
|
299
|
+
"""Install packages directly to host Python (no isolation)."""
|
|
300
|
+
import shutil
|
|
301
|
+
import subprocess
|
|
302
|
+
import sys
|
|
303
|
+
|
|
304
|
+
from .pixi import CUDA_WHEELS_INDEX, find_wheel_url
|
|
305
|
+
from .pixi.cuda_detection import get_recommended_cuda_version
|
|
306
|
+
|
|
307
|
+
# Collect packages to install
|
|
308
|
+
pypi_deps = cfg.pixi_passthrough.get("pypi-dependencies", {})
|
|
309
|
+
conda_deps = cfg.pixi_passthrough.get("dependencies", {})
|
|
310
|
+
|
|
311
|
+
# Warn about conda dependencies (can't install without pixi)
|
|
312
|
+
# Filter out 'python' and 'pip' which are meta-dependencies
|
|
313
|
+
real_conda_deps = {k: v for k, v in conda_deps.items() if k not in ("python", "pip")}
|
|
314
|
+
if real_conda_deps:
|
|
315
|
+
log(f"\n[warning] Cannot install conda packages without isolation:")
|
|
316
|
+
for pkg in real_conda_deps:
|
|
317
|
+
log(f" - {pkg}")
|
|
318
|
+
log(" Set USE_COMFY_ENV=1 to enable isolated environments")
|
|
319
|
+
|
|
320
|
+
# Nothing to install?
|
|
321
|
+
if not pypi_deps and not cfg.cuda_packages:
|
|
322
|
+
log("No packages to install")
|
|
323
|
+
return
|
|
324
|
+
|
|
325
|
+
# Build pip install command
|
|
326
|
+
pip_packages = []
|
|
327
|
+
|
|
328
|
+
# Add pypi dependencies
|
|
329
|
+
for pkg, spec in pypi_deps.items():
|
|
330
|
+
if isinstance(spec, str):
|
|
331
|
+
if spec == "*":
|
|
332
|
+
pip_packages.append(pkg)
|
|
333
|
+
else:
|
|
334
|
+
pip_packages.append(f"{pkg}{spec}")
|
|
335
|
+
elif isinstance(spec, dict):
|
|
336
|
+
version = spec.get("version", "*")
|
|
337
|
+
extras = spec.get("extras", [])
|
|
338
|
+
if extras:
|
|
339
|
+
pkg_with_extras = f"{pkg}[{','.join(extras)}]"
|
|
340
|
+
else:
|
|
341
|
+
pkg_with_extras = pkg
|
|
342
|
+
if version == "*":
|
|
343
|
+
pip_packages.append(pkg_with_extras)
|
|
344
|
+
else:
|
|
345
|
+
pip_packages.append(f"{pkg_with_extras}{version}")
|
|
346
|
+
|
|
347
|
+
log(f"\nInstalling to host Python ({sys.executable}):")
|
|
348
|
+
if pip_packages:
|
|
349
|
+
log(f" PyPI packages: {len(pip_packages)}")
|
|
350
|
+
if cfg.cuda_packages:
|
|
351
|
+
log(f" CUDA packages: {', '.join(cfg.cuda_packages)}")
|
|
352
|
+
|
|
353
|
+
if dry_run:
|
|
354
|
+
if pip_packages:
|
|
355
|
+
log(f" Would install: {', '.join(pip_packages)}")
|
|
356
|
+
log("\n(dry run - no changes made)")
|
|
357
|
+
return
|
|
358
|
+
|
|
359
|
+
# Use uv if available, otherwise pip
|
|
360
|
+
use_uv = shutil.which("uv") is not None
|
|
361
|
+
|
|
362
|
+
# Install regular PyPI packages
|
|
363
|
+
if pip_packages:
|
|
364
|
+
if use_uv:
|
|
365
|
+
cmd = ["uv", "pip", "install", "--python", sys.executable] + pip_packages
|
|
366
|
+
else:
|
|
367
|
+
cmd = [sys.executable, "-m", "pip", "install"] + pip_packages
|
|
368
|
+
|
|
369
|
+
log(f" Running: {' '.join(cmd[:4])}...")
|
|
370
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
371
|
+
if result.returncode != 0:
|
|
372
|
+
log(f" [error] pip install failed: {result.stderr.strip()[:200]}")
|
|
373
|
+
else:
|
|
374
|
+
log(f" Installed {len(pip_packages)} package(s)")
|
|
375
|
+
|
|
376
|
+
# Install CUDA packages from cuda-wheels
|
|
377
|
+
if cfg.cuda_packages:
|
|
378
|
+
cuda_version = get_recommended_cuda_version()
|
|
379
|
+
if not cuda_version:
|
|
380
|
+
log(" [warning] No CUDA detected, skipping CUDA packages")
|
|
381
|
+
return
|
|
382
|
+
|
|
383
|
+
# Get torch version for wheel matching
|
|
384
|
+
cuda_mm = ".".join(cuda_version.split(".")[:2])
|
|
385
|
+
from .pixi.core import CUDA_TORCH_MAP
|
|
386
|
+
torch_version = CUDA_TORCH_MAP.get(cuda_mm, "2.8")
|
|
387
|
+
|
|
388
|
+
py_version = f"{sys.version_info.major}.{sys.version_info.minor}"
|
|
389
|
+
log(f" CUDA {cuda_version}, PyTorch {torch_version}, Python {py_version}")
|
|
390
|
+
|
|
391
|
+
for package in cfg.cuda_packages:
|
|
392
|
+
wheel_url = find_wheel_url(package, torch_version, cuda_version, py_version)
|
|
393
|
+
if not wheel_url:
|
|
394
|
+
log(f" [error] No wheel found for {package}")
|
|
395
|
+
continue
|
|
396
|
+
|
|
397
|
+
log(f" Installing {package}...")
|
|
398
|
+
if use_uv:
|
|
399
|
+
cmd = ["uv", "pip", "install", "--python", sys.executable, "--no-deps", wheel_url]
|
|
400
|
+
else:
|
|
401
|
+
cmd = [sys.executable, "-m", "pip", "install", "--no-deps", wheel_url]
|
|
402
|
+
|
|
403
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
404
|
+
if result.returncode != 0:
|
|
405
|
+
log(f" [error] Failed to install {package}: {result.stderr.strip()[:200]}")
|
|
406
|
+
else:
|
|
407
|
+
log(f" Installed {package}")
|
|
408
|
+
|
|
409
|
+
|
|
294
410
|
def _install_isolated_subdirs(
|
|
295
411
|
node_dir: Path,
|
|
296
412
|
log: Callable[[str], None],
|
comfy_env/isolation/__init__.py
CHANGED