comfy-env 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
comfy_env/__init__.py CHANGED
@@ -1,46 +1,4 @@
1
- """
2
- comfy-env: Environment management for ComfyUI custom nodes.
3
-
4
- This package provides:
5
- - CUDA wheel resolution and in-place installation (Type 2 nodes)
6
- - Process isolation with separate venvs (Type 1 nodes)
7
-
8
- ## Quick Start - In-Place Installation
9
-
10
- from comfy_env import install
11
-
12
- # Auto-discover config and install CUDA wheels
13
- install()
14
-
15
- # Or with explicit config
16
- install(config="comfyui_env.toml")
17
-
18
- ## Quick Start - Process Isolation
19
-
20
- from comfy_env.workers import get_worker, TorchMPWorker
21
-
22
- # Same-venv isolation (zero-copy tensors)
23
- worker = TorchMPWorker()
24
- result = worker.call(my_gpu_function, image=tensor)
25
-
26
- # Cross-venv isolation
27
- from comfy_env.workers import PersistentVenvWorker
28
- worker = PersistentVenvWorker(python="/path/to/venv/bin/python")
29
- result = worker.call_module("my_module", "my_func", image=tensor)
30
-
31
- ## CLI
32
-
33
- comfy-env install # Install from config
34
- comfy-env info # Show environment info
35
- comfy-env resolve pkg==1.0 # Show resolved wheel URL
36
- comfy-env doctor # Verify installation
37
-
38
- ## Legacy APIs (still supported)
39
-
40
- The @isolated decorator and WorkerBridge are still available.
41
- """
42
-
43
- __version__ = "0.0.11"
1
+ __version__ = "0.0.14"
44
2
 
45
3
  from .env.config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq
46
4
  from .env.config_file import (
@@ -51,7 +9,16 @@ from .env.config_file import (
51
9
  CONFIG_FILE_NAMES,
52
10
  )
53
11
  from .env.manager import IsolatedEnvManager
54
- from .env.detection import detect_cuda_version, detect_gpu_info, get_gpu_summary
12
+ from .env.cuda_gpu_detection import (
13
+ GPUInfo,
14
+ CUDAEnvironment,
15
+ detect_cuda_environment,
16
+ detect_cuda_version,
17
+ detect_gpu_info,
18
+ detect_gpus,
19
+ get_gpu_summary,
20
+ get_recommended_cuda_version,
21
+ )
55
22
  from .env.security import (
56
23
  normalize_env_name,
57
24
  validate_dependency,
@@ -134,9 +101,14 @@ __all__ = [
134
101
  "discover_config",
135
102
  "CONFIG_FILE_NAMES",
136
103
  # Detection
104
+ "GPUInfo",
105
+ "CUDAEnvironment",
106
+ "detect_cuda_environment",
137
107
  "detect_cuda_version",
138
108
  "detect_gpu_info",
109
+ "detect_gpus",
139
110
  "get_gpu_summary",
111
+ "get_recommended_cuda_version",
140
112
  # Security validation
141
113
  "normalize_env_name",
142
114
  "validate_dependency",
comfy_env/env/__init__.py CHANGED
@@ -1,8 +1,17 @@
1
1
  """Environment management for comfyui-isolation."""
2
2
 
3
- from .config import IsolatedEnv
3
+ from .config import IsolatedEnv, ToolConfig
4
4
  from .manager import IsolatedEnvManager
5
- from .detection import detect_cuda_version, detect_gpu_info, get_gpu_summary
5
+ from .cuda_gpu_detection import (
6
+ GPUInfo,
7
+ CUDAEnvironment,
8
+ detect_cuda_environment,
9
+ detect_cuda_version,
10
+ detect_gpu_info,
11
+ detect_gpus,
12
+ get_gpu_summary,
13
+ get_recommended_cuda_version,
14
+ )
6
15
  from .platform import get_platform, PlatformProvider, PlatformPaths
7
16
  from .security import (
8
17
  normalize_env_name,
@@ -15,9 +24,17 @@ from .security import (
15
24
  __all__ = [
16
25
  "IsolatedEnv",
17
26
  "IsolatedEnvManager",
27
+ "ToolConfig",
28
+ # GPU Detection
29
+ "GPUInfo",
30
+ "CUDAEnvironment",
31
+ "detect_cuda_environment",
18
32
  "detect_cuda_version",
19
33
  "detect_gpu_info",
34
+ "detect_gpus",
20
35
  "get_gpu_summary",
36
+ "get_recommended_cuda_version",
37
+ # Platform
21
38
  "get_platform",
22
39
  "PlatformProvider",
23
40
  "PlatformPaths",
comfy_env/env/config.py CHANGED
@@ -23,6 +23,14 @@ class NodeReq:
23
23
  repo: str # GitHub repo path, e.g., "Kosinkadink/ComfyUI-VideoHelperSuite"
24
24
 
25
25
 
26
+ @dataclass
27
+ class ToolConfig:
28
+ """Configuration for an external tool like Blender."""
29
+ name: str
30
+ version: str = "latest"
31
+ install_dir: Optional[Path] = None
32
+
33
+
26
34
  @dataclass
27
35
  class EnvManagerConfig:
28
36
  """
@@ -35,10 +43,12 @@ class EnvManagerConfig:
35
43
  [envname.cuda] - CUDA packages for isolated env
36
44
  [envname.packages] - Regular packages for isolated env
37
45
  [node_reqs] - Node dependencies
46
+ [tools] - External tools (e.g., blender = "4.2")
38
47
  """
39
48
  local: LocalConfig = field(default_factory=LocalConfig)
40
49
  envs: Dict[str, "IsolatedEnv"] = field(default_factory=dict)
41
50
  node_reqs: List[NodeReq] = field(default_factory=list)
51
+ tools: Dict[str, ToolConfig] = field(default_factory=dict)
42
52
 
43
53
  @property
44
54
  def has_local(self) -> bool:
@@ -63,8 +63,8 @@ else:
63
63
  except ImportError:
64
64
  tomllib = None # type: ignore
65
65
 
66
- from .config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq
67
- from .detection import detect_cuda_version
66
+ from .config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq, ToolConfig
67
+ from .cuda_gpu_detection import detect_cuda_version
68
68
 
69
69
 
70
70
  # Config file name
@@ -332,7 +332,7 @@ def _substitute_vars(s: str, variables: Dict[str, str]) -> str:
332
332
  # =============================================================================
333
333
 
334
334
  # Reserved table names that are NOT isolated environments
335
- RESERVED_TABLES = {"local", "node_reqs", "env", "packages", "sources", "cuda", "variables", "worker"}
335
+ RESERVED_TABLES = {"local", "node_reqs", "env", "packages", "sources", "cuda", "variables", "worker", "tools"}
336
336
 
337
337
 
338
338
  def load_config(
@@ -429,11 +429,13 @@ def _parse_full_config(data: Dict[str, Any], base_dir: Path) -> EnvManagerConfig
429
429
  local = _parse_local_section(data.get("local", {}))
430
430
  envs = _parse_env_sections(data, base_dir)
431
431
  node_reqs = _parse_node_reqs(data.get("node_reqs", {}))
432
+ tools = _parse_tools_section(data.get("tools", {}))
432
433
 
433
434
  return EnvManagerConfig(
434
435
  local=local,
435
436
  envs=envs,
436
437
  node_reqs=node_reqs,
438
+ tools=tools,
437
439
  )
438
440
 
439
441
 
@@ -582,6 +584,35 @@ def _parse_node_reqs(node_reqs_data: Dict[str, Any]) -> List[NodeReq]:
582
584
  return reqs
583
585
 
584
586
 
587
+ def _parse_tools_section(tools_data: Dict[str, Any]) -> Dict[str, ToolConfig]:
588
+ """Parse [tools] section.
589
+
590
+ Supports:
591
+ [tools]
592
+ blender = "4.2"
593
+
594
+ Or extended:
595
+ [tools.blender]
596
+ version = "4.2"
597
+ install_dir = "/custom/path"
598
+ """
599
+ tools = {}
600
+
601
+ for name, value in tools_data.items():
602
+ if isinstance(value, str):
603
+ # Simple format: blender = "4.2"
604
+ tools[name] = ToolConfig(name=name, version=value)
605
+ elif isinstance(value, dict):
606
+ # Extended format: [tools.blender] with version, install_dir
607
+ version = value.get("version", "latest")
608
+ install_dir = value.get("install_dir")
609
+ if install_dir:
610
+ install_dir = Path(install_dir)
611
+ tools[name] = ToolConfig(name=name, version=version, install_dir=install_dir)
612
+
613
+ return tools
614
+
615
+
585
616
  def _convert_simple_to_full(data: Dict[str, Any], base_dir: Path) -> EnvManagerConfig:
586
617
  """Convert simple config format to full EnvManagerConfig.
587
618
 
@@ -591,6 +622,9 @@ def _convert_simple_to_full(data: Dict[str, Any], base_dir: Path) -> EnvManagerC
591
622
  # Parse using simple parser to get IsolatedEnv
592
623
  simple_env = _parse_config(data, base_dir)
593
624
 
625
+ # Parse tools section (shared between simple and full format)
626
+ tools = _parse_tools_section(data.get("tools", {}))
627
+
594
628
  # Check if this has explicit env settings (isolated venv) vs just CUDA packages (local install)
595
629
  env_section = data.get("env", {})
596
630
  has_explicit_env = bool(env_section.get("name") or env_section.get("python"))
@@ -601,6 +635,7 @@ def _convert_simple_to_full(data: Dict[str, Any], base_dir: Path) -> EnvManagerC
601
635
  local=LocalConfig(),
602
636
  envs={simple_env.name: simple_env},
603
637
  node_reqs=[],
638
+ tools=tools,
604
639
  )
605
640
  else:
606
641
  # Local CUDA packages only (no isolated venv)
@@ -619,4 +654,5 @@ def _convert_simple_to_full(data: Dict[str, Any], base_dir: Path) -> EnvManagerC
619
654
  ),
620
655
  envs={},
621
656
  node_reqs=[],
657
+ tools=tools,
622
658
  )
@@ -0,0 +1,303 @@
1
+ """
2
+ Robust CUDA/GPU detection with multiple fallback methods.
3
+
4
+ Detection priority: NVML → PyTorch → nvidia-smi → sysfs → env vars
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import logging
10
+ import os
11
+ import re
12
+ import subprocess
13
+ import time
14
+ from dataclasses import dataclass, field
15
+ from pathlib import Path
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+ CUDA_VERSION_ENV_VAR = "COMFY_ENV_CUDA_VERSION"
20
+
21
+ _cache: tuple[float, "CUDAEnvironment | None"] = (0, None)
22
+ CACHE_TTL = 60
23
+
24
+
25
+ @dataclass
26
+ class GPUInfo:
27
+ index: int
28
+ name: str
29
+ compute_capability: tuple[int, int]
30
+ architecture: str
31
+ vram_total_mb: int = 0
32
+ vram_free_mb: int = 0
33
+ uuid: str = ""
34
+ pci_bus_id: str = ""
35
+ driver_version: str = ""
36
+
37
+ def cc_str(self) -> str:
38
+ return f"{self.compute_capability[0]}.{self.compute_capability[1]}"
39
+
40
+ def sm_version(self) -> str:
41
+ return f"sm_{self.compute_capability[0]}{self.compute_capability[1]}"
42
+
43
+
44
+ @dataclass
45
+ class CUDAEnvironment:
46
+ gpus: list[GPUInfo] = field(default_factory=list)
47
+ driver_version: str = ""
48
+ cuda_runtime_version: str = ""
49
+ recommended_cuda: str = ""
50
+ detection_method: str = ""
51
+
52
+
53
+ COMPUTE_TO_ARCH = {
54
+ (5, 0): "Maxwell", (5, 2): "Maxwell", (5, 3): "Maxwell",
55
+ (6, 0): "Pascal", (6, 1): "Pascal", (6, 2): "Pascal",
56
+ (7, 0): "Volta", (7, 2): "Volta", (7, 5): "Turing",
57
+ (8, 0): "Ampere", (8, 6): "Ampere", (8, 7): "Ampere", (8, 9): "Ada",
58
+ (9, 0): "Hopper",
59
+ (10, 0): "Blackwell", (10, 1): "Blackwell", (10, 2): "Blackwell",
60
+ }
61
+
62
+
63
+ def _cc_to_arch(major: int, minor: int) -> str:
64
+ if arch := COMPUTE_TO_ARCH.get((major, minor)):
65
+ return arch
66
+ if major >= 10: return "Blackwell"
67
+ if major == 9: return "Hopper"
68
+ if major == 8: return "Ada" if minor >= 9 else "Ampere"
69
+ if major == 7: return "Turing" if minor >= 5 else "Volta"
70
+ if major == 6: return "Pascal"
71
+ return "Maxwell" if major == 5 else "Unknown"
72
+
73
+
74
+ def _parse_cc(s: str) -> tuple[int, int]:
75
+ try:
76
+ if "." in s:
77
+ p = s.split(".")
78
+ return (int(p[0]), int(p[1]))
79
+ if len(s) >= 2:
80
+ return (int(s[:-1]), int(s[-1]))
81
+ except (ValueError, IndexError):
82
+ pass
83
+ return (0, 0)
84
+
85
+
86
+ def _detect_nvml() -> list[GPUInfo] | None:
87
+ try:
88
+ import pynvml
89
+ pynvml.nvmlInit()
90
+ try:
91
+ count = pynvml.nvmlDeviceGetCount()
92
+ if not count:
93
+ return None
94
+ gpus = []
95
+ for i in range(count):
96
+ h = pynvml.nvmlDeviceGetHandleByIndex(i)
97
+ name = pynvml.nvmlDeviceGetName(h)
98
+ if isinstance(name, bytes): name = name.decode()
99
+ cc = pynvml.nvmlDeviceGetCudaComputeCapability(h)
100
+ mem = pynvml.nvmlDeviceGetMemoryInfo(h)
101
+ gpus.append(GPUInfo(
102
+ index=i, name=name, compute_capability=cc,
103
+ architecture=_cc_to_arch(*cc),
104
+ vram_total_mb=mem.total // (1024*1024),
105
+ vram_free_mb=mem.free // (1024*1024),
106
+ ))
107
+ return gpus
108
+ finally:
109
+ pynvml.nvmlShutdown()
110
+ except Exception:
111
+ return None
112
+
113
+
114
+ def _detect_torch() -> list[GPUInfo] | None:
115
+ try:
116
+ import torch
117
+ if not torch.cuda.is_available():
118
+ return None
119
+ gpus = []
120
+ for i in range(torch.cuda.device_count()):
121
+ p = torch.cuda.get_device_properties(i)
122
+ gpus.append(GPUInfo(
123
+ index=i, name=p.name,
124
+ compute_capability=(p.major, p.minor),
125
+ architecture=_cc_to_arch(p.major, p.minor),
126
+ vram_total_mb=p.total_memory // (1024*1024),
127
+ ))
128
+ return gpus if gpus else None
129
+ except Exception:
130
+ return None
131
+
132
+
133
+ def _detect_smi() -> list[GPUInfo] | None:
134
+ try:
135
+ r = subprocess.run(
136
+ ["nvidia-smi", "--query-gpu=index,name,uuid,pci.bus_id,compute_cap,memory.total,memory.free,driver_version",
137
+ "--format=csv,noheader,nounits"],
138
+ capture_output=True, text=True, timeout=10
139
+ )
140
+ if r.returncode != 0:
141
+ return None
142
+ gpus = []
143
+ for line in r.stdout.strip().split("\n"):
144
+ if not line.strip():
145
+ continue
146
+ p = [x.strip() for x in line.split(",")]
147
+ if len(p) < 5:
148
+ continue
149
+ cc = _parse_cc(p[4])
150
+ gpus.append(GPUInfo(
151
+ index=int(p[0]) if p[0].isdigit() else len(gpus),
152
+ name=p[1], uuid=p[2] if len(p) > 2 else "",
153
+ pci_bus_id=p[3] if len(p) > 3 else "",
154
+ compute_capability=cc, architecture=_cc_to_arch(*cc),
155
+ vram_total_mb=int(p[5]) if len(p) > 5 and p[5].isdigit() else 0,
156
+ vram_free_mb=int(p[6]) if len(p) > 6 and p[6].isdigit() else 0,
157
+ driver_version=p[7] if len(p) > 7 else "",
158
+ ))
159
+ return gpus if gpus else None
160
+ except Exception:
161
+ return None
162
+
163
+
164
+ def _detect_sysfs() -> list[GPUInfo] | None:
165
+ try:
166
+ pci_path = Path("/sys/bus/pci/devices")
167
+ if not pci_path.exists():
168
+ return None
169
+ gpus = []
170
+ for d in sorted(pci_path.iterdir()):
171
+ vendor = (d / "vendor").read_text().strip().lower() if (d / "vendor").exists() else ""
172
+ if "10de" not in vendor:
173
+ continue
174
+ cls = (d / "class").read_text().strip() if (d / "class").exists() else ""
175
+ if not (cls.startswith("0x0300") or cls.startswith("0x0302")):
176
+ continue
177
+ gpus.append(GPUInfo(
178
+ index=len(gpus), name=f"NVIDIA GPU", pci_bus_id=d.name,
179
+ compute_capability=(0, 0), architecture="Unknown"
180
+ ))
181
+ return gpus if gpus else None
182
+ except Exception:
183
+ return None
184
+
185
+
186
+ def _get_driver_version() -> str:
187
+ try:
188
+ import pynvml
189
+ pynvml.nvmlInit()
190
+ v = pynvml.nvmlSystemGetDriverVersion()
191
+ pynvml.nvmlShutdown()
192
+ return v.decode() if isinstance(v, bytes) else v
193
+ except Exception:
194
+ pass
195
+ try:
196
+ r = subprocess.run(["nvidia-smi", "--query-gpu=driver_version", "--format=csv,noheader"],
197
+ capture_output=True, text=True, timeout=5)
198
+ if r.returncode == 0:
199
+ return r.stdout.strip().split("\n")[0]
200
+ except Exception:
201
+ pass
202
+ return ""
203
+
204
+
205
+ def _get_cuda_version() -> str:
206
+ try:
207
+ import torch
208
+ if torch.cuda.is_available() and torch.version.cuda:
209
+ return torch.version.cuda
210
+ except Exception:
211
+ pass
212
+ try:
213
+ r = subprocess.run(["nvcc", "--version"], capture_output=True, text=True, timeout=5)
214
+ if m := re.search(r"release (\d+\.\d+)", r.stdout):
215
+ return m.group(1)
216
+ except Exception:
217
+ pass
218
+ return ""
219
+
220
+
221
+ def _recommended_cuda(gpus: list[GPUInfo]) -> str:
222
+ if override := os.environ.get(CUDA_VERSION_ENV_VAR, "").strip():
223
+ if "." not in override and len(override) >= 2:
224
+ return f"{override[:-1]}.{override[-1]}"
225
+ return override
226
+ if not gpus:
227
+ return ""
228
+ for gpu in gpus:
229
+ if gpu.compute_capability[0] >= 10:
230
+ return "12.8" # Blackwell requires 12.8
231
+ for gpu in gpus:
232
+ cc = gpu.compute_capability
233
+ if cc[0] < 7 or (cc[0] == 7 and cc[1] < 5):
234
+ return "12.4" # Legacy (Pascal) uses 12.4
235
+ return "12.8" # Modern GPUs use 12.8
236
+
237
+
238
+ def detect_cuda_environment(force_refresh: bool = False) -> CUDAEnvironment:
239
+ global _cache
240
+ if not force_refresh and _cache[1] and time.time() - _cache[0] < CACHE_TTL:
241
+ return _cache[1]
242
+
243
+ gpus, method = None, "none"
244
+ for name, fn in [("nvml", _detect_nvml), ("torch", _detect_torch),
245
+ ("smi", _detect_smi), ("sysfs", _detect_sysfs)]:
246
+ if gpus := fn():
247
+ method = name
248
+ break
249
+
250
+ env = CUDAEnvironment(
251
+ gpus=gpus or [],
252
+ driver_version=_get_driver_version(),
253
+ cuda_runtime_version=_get_cuda_version(),
254
+ recommended_cuda=_recommended_cuda(gpus or []),
255
+ detection_method=method,
256
+ )
257
+ _cache = (time.time(), env)
258
+ return env
259
+
260
+
261
+ def get_recommended_cuda_version() -> str | None:
262
+ if override := os.environ.get(CUDA_VERSION_ENV_VAR, "").strip():
263
+ if "." not in override and len(override) >= 2:
264
+ return f"{override[:-1]}.{override[-1]}"
265
+ return override
266
+ env = detect_cuda_environment()
267
+ return env.recommended_cuda or None
268
+
269
+
270
+ def detect_gpus() -> list[GPUInfo]:
271
+ return detect_cuda_environment().gpus
272
+
273
+
274
+ def detect_gpu_info() -> list[dict]:
275
+ """Return GPU info as list of dicts."""
276
+ from dataclasses import asdict
277
+ return [asdict(gpu) for gpu in detect_gpus()]
278
+
279
+
280
+ def get_gpu_summary() -> str:
281
+ """Human-readable GPU summary."""
282
+ env = detect_cuda_environment()
283
+ if not env.gpus:
284
+ override = os.environ.get(CUDA_VERSION_ENV_VAR)
285
+ if override:
286
+ return f"No NVIDIA GPU detected (using {CUDA_VERSION_ENV_VAR}={override})"
287
+ return f"No NVIDIA GPU detected (set {CUDA_VERSION_ENV_VAR} to override)"
288
+
289
+ lines = [f"Detection: {env.detection_method}"]
290
+ if env.driver_version:
291
+ lines.append(f"Driver: {env.driver_version}")
292
+ if env.cuda_runtime_version:
293
+ lines.append(f"CUDA: {env.cuda_runtime_version}")
294
+ lines.append(f"Recommended: CUDA {env.recommended_cuda}")
295
+ lines.append("")
296
+ for gpu in env.gpus:
297
+ vram = f"{gpu.vram_total_mb}MB" if gpu.vram_total_mb else "?"
298
+ lines.append(f" GPU {gpu.index}: {gpu.name} ({gpu.sm_version()}) [{gpu.architecture}] {vram}")
299
+ return "\n".join(lines)
300
+
301
+
302
+ # Aliases
303
+ detect_cuda_version = get_recommended_cuda_version
comfy_env/env/manager.py CHANGED
@@ -12,7 +12,7 @@ from typing import Optional, Callable
12
12
 
13
13
  from .config import IsolatedEnv
14
14
  from .platform import get_platform, PlatformProvider
15
- from .detection import detect_cuda_version
15
+ from .cuda_gpu_detection import detect_cuda_version
16
16
  from .security import (
17
17
  normalize_env_name,
18
18
  validate_dependency,
@@ -475,7 +475,7 @@ class IsolatedEnvManager:
475
475
  url_template = source["url_template"]
476
476
  url = self._substitute_template(url_template, vars_dict)
477
477
 
478
- self.log(f" Trying {source.get('name', 'unknown')}: {url[:80]}...")
478
+ self.log(f" Trying {source.get('name', 'unknown')}: {url}")
479
479
  result = subprocess.run(
480
480
  pip_args + ["--no-deps", url],
481
481
  capture_output=True, text=True,
@@ -484,7 +484,7 @@ class IsolatedEnvManager:
484
484
  if result.returncode == 0:
485
485
  return # Success!
486
486
 
487
- errors.append(f"{source.get('name', 'unknown')}: {result.stderr[:100]}")
487
+ errors.append(f"{source.get('name', 'unknown')}: {result.stderr.strip()}")
488
488
 
489
489
  # All sources failed
490
490
  raise RuntimeError(
@@ -45,12 +45,7 @@ class WindowsPlatformProvider(PlatformProvider):
45
45
  f"Running in {shell_env.upper()} environment.\n"
46
46
  f"This package requires native Windows Python.\n"
47
47
  f"Please use PowerShell, Command Prompt, or native Windows terminal.")
48
-
49
- # Check Visual C++ Redistributable
50
- vc_ok, vc_error = self._check_vc_redistributable()
51
- if not vc_ok:
52
- return (False, vc_error)
53
-
48
+ # Note: VC++ runtime is handled by msvc-runtime package, no system check needed
54
49
  return (True, None)
55
50
 
56
51
  def _detect_shell_environment(self) -> str:
@@ -67,94 +62,6 @@ class WindowsPlatformProvider(PlatformProvider):
67
62
 
68
63
  return 'native-windows'
69
64
 
70
- def _find_vc_dlls(self) -> Dict[str, Optional[Path]]:
71
- """Find VC++ runtime DLLs in common locations."""
72
- required_dlls = ['vcruntime140.dll', 'msvcp140.dll']
73
- found = {}
74
-
75
- # Search locations in order of preference
76
- search_paths = []
77
-
78
- # 1. Current Python environment (conda/venv)
79
- if hasattr(sys, 'base_prefix'):
80
- search_paths.append(Path(sys.base_prefix) / 'Library' / 'bin')
81
- search_paths.append(Path(sys.base_prefix) / 'DLLs')
82
- if hasattr(sys, 'prefix'):
83
- search_paths.append(Path(sys.prefix) / 'Library' / 'bin')
84
- search_paths.append(Path(sys.prefix) / 'DLLs')
85
-
86
- # 2. System directories
87
- system_root = os.environ.get('SystemRoot', r'C:\Windows')
88
- search_paths.append(Path(system_root) / 'System32')
89
-
90
- # 3. Visual Studio redistributable directories
91
- program_files = os.environ.get('ProgramFiles', r'C:\Program Files')
92
- vc_redist = Path(program_files) / 'Microsoft Visual Studio' / '2022' / 'Community' / 'VC' / 'Redist' / 'MSVC'
93
- if vc_redist.exists():
94
- for version_dir in vc_redist.iterdir():
95
- search_paths.append(version_dir / 'x64' / 'Microsoft.VC143.CRT')
96
-
97
- for dll_name in required_dlls:
98
- found[dll_name] = None
99
- for search_path in search_paths:
100
- dll_path = search_path / dll_name
101
- if dll_path.exists():
102
- found[dll_name] = dll_path
103
- break
104
-
105
- return found
106
-
107
- def bundle_vc_dlls_to_env(self, env_dir: Path) -> Tuple[bool, Optional[str]]:
108
- """Bundle VC++ runtime DLLs into the isolated environment."""
109
- required_dlls = ['vcruntime140.dll', 'msvcp140.dll']
110
- found_dlls = self._find_vc_dlls()
111
-
112
- # Check which DLLs are missing
113
- missing = [dll for dll, path in found_dlls.items() if path is None]
114
-
115
- if missing:
116
- return (False,
117
- f"Could not find VC++ DLLs to bundle: {', '.join(missing)}\n\n"
118
- f"Please install Visual C++ Redistributable:\n"
119
- f" Download: https://aka.ms/vs/17/release/vc_redist.x64.exe\n"
120
- f"\nAfter installation, delete the environment and try again.")
121
-
122
- # Copy DLLs to the environment's Scripts directory
123
- scripts_dir = env_dir / 'Scripts'
124
-
125
- copied = []
126
- for dll_name, source_path in found_dlls.items():
127
- if source_path:
128
- try:
129
- if scripts_dir.exists():
130
- scripts_target = scripts_dir / dll_name
131
- if not scripts_target.exists():
132
- shutil.copy2(source_path, scripts_target)
133
- copied.append(f"{dll_name} -> Scripts/")
134
- except (OSError, IOError) as e:
135
- return (False, f"Failed to copy {dll_name}: {e}")
136
-
137
- return (True, None)
138
-
139
- def _check_vc_redistributable(self) -> Tuple[bool, Optional[str]]:
140
- """Check if Visual C++ Redistributable DLLs are available."""
141
- required_dlls = ['vcruntime140.dll', 'msvcp140.dll']
142
- found_dlls = self._find_vc_dlls()
143
-
144
- missing = [dll for dll, path in found_dlls.items() if path is None]
145
-
146
- if missing:
147
- error_msg = (
148
- f"Visual C++ Redistributable DLLs not found!\n"
149
- f"\nMissing: {', '.join(missing)}\n"
150
- f"\nPlease install Visual C++ Redistributable for Visual Studio 2015-2022:\n"
151
- f"\n Download (64-bit): https://aka.ms/vs/17/release/vc_redist.x64.exe\n"
152
- f"\nAfter installation, restart your terminal and try again."
153
- )
154
- return (False, error_msg)
155
-
156
- return (True, None)
157
-
158
65
  def make_executable(self, path: Path) -> None:
159
66
  # No-op on Windows - executables are determined by extension
160
67
  pass
comfy_env/install.py CHANGED
@@ -24,12 +24,13 @@ import sys
24
24
  from pathlib import Path
25
25
  from typing import Any, Callable, Dict, List, Optional, Union
26
26
 
27
- from .env.config import IsolatedEnv
28
- from .env.config_file import discover_env_config, load_env_from_file
27
+ from .env.config import IsolatedEnv, ToolConfig
28
+ from .env.config_file import discover_env_config, load_env_from_file, load_config, discover_config
29
29
  from .env.manager import IsolatedEnvManager
30
30
  from .errors import CUDANotFoundError, DependencyError, InstallError, WheelNotFoundError
31
31
  from .registry import PACKAGE_REGISTRY, get_cuda_short2, is_registered
32
32
  from .resolver import RuntimeEnv, WheelResolver, parse_wheel_requirement
33
+ from .tools import install_tool
33
34
 
34
35
 
35
36
  def install(
@@ -77,20 +78,38 @@ def install(
77
78
  log = log_callback or print
78
79
  node_dir = Path(node_dir) if node_dir else Path.cwd()
79
80
 
80
- # Load configuration
81
- env_config = _load_config(config, node_dir)
82
- if env_config is None:
81
+ # Load full configuration (includes tools)
82
+ full_config = _load_full_config(config, node_dir)
83
+ if full_config is None:
83
84
  raise FileNotFoundError(
84
85
  "No configuration file found. "
85
86
  "Create comfyui_env.toml or specify path explicitly."
86
87
  )
87
88
 
88
- log(f"Found configuration: {env_config.name}")
89
+ # Install tools first (e.g., Blender)
90
+ if full_config.tools:
91
+ log(f"Installing {len(full_config.tools)} tool(s)...")
92
+ for name, tool_config in full_config.tools.items():
93
+ if dry_run:
94
+ log(f" Would install {name} {tool_config.version}")
95
+ else:
96
+ install_tool(tool_config, log, node_dir)
97
+
98
+ # Get environment config
99
+ env_config = full_config.default_env
100
+ if env_config is None and not full_config.has_local:
101
+ log("No packages to install")
102
+ return True
103
+
104
+ if env_config:
105
+ log(f"Found configuration: {env_config.name}")
89
106
 
90
- if mode == "isolated":
107
+ if mode == "isolated" and env_config:
91
108
  return _install_isolated(env_config, node_dir, log, dry_run)
92
- else:
109
+ elif env_config:
93
110
  return _install_inplace(env_config, node_dir, log, dry_run, verify_wheels)
111
+ else:
112
+ return True
94
113
 
95
114
 
96
115
  def _load_config(
@@ -107,6 +126,17 @@ def _load_config(
107
126
  return discover_env_config(node_dir)
108
127
 
109
128
 
129
+ def _load_full_config(config: Optional[Union[str, Path]], node_dir: Path):
130
+ """Load full EnvManagerConfig (includes tools)."""
131
+ from .env.config import EnvManagerConfig
132
+ if config is not None:
133
+ config_path = Path(config)
134
+ if not config_path.is_absolute():
135
+ config_path = node_dir / config_path
136
+ return load_config(config_path, node_dir)
137
+ return discover_config(node_dir)
138
+
139
+
110
140
  def _install_isolated(
111
141
  env_config: IsolatedEnv,
112
142
  node_dir: Path,
@@ -140,6 +170,12 @@ def _install_inplace(
140
170
  """Install in-place into current environment using the package registry."""
141
171
  log("Installing in-place mode")
142
172
 
173
+ # Install MSVC runtime on Windows (required for CUDA/PyTorch native extensions)
174
+ if sys.platform == "win32":
175
+ log("Installing MSVC runtime for Windows...")
176
+ if not dry_run:
177
+ _pip_install(["msvc-runtime"], no_deps=False, log=log)
178
+
143
179
  # Detect runtime environment
144
180
  env = RuntimeEnv.detect()
145
181
  log(f"Detected environment: {env}")
comfy_env/resolver.py CHANGED
@@ -26,7 +26,7 @@ from pathlib import Path
26
26
  from typing import Dict, List, Optional, Tuple
27
27
  from urllib.parse import urlparse
28
28
 
29
- from .env.detection import detect_cuda_version, detect_gpu_info
29
+ from .env.cuda_gpu_detection import detect_cuda_version, detect_gpu_info
30
30
 
31
31
 
32
32
  @dataclass
@@ -137,11 +137,14 @@ class RuntimeEnv:
137
137
  "py_version": self.python_version,
138
138
  "py_short": self.python_short,
139
139
  "py_minor": py_minor,
140
+ "py_tag": f"cp{self.python_short}", # e.g., cp310, cp311
140
141
  }
141
142
 
142
143
  if self.cuda_version:
143
144
  result["cuda_version"] = self.cuda_version
144
145
  result["cuda_short"] = self.cuda_short
146
+ # cuda_major: just the major version (e.g., "12" from "12.8")
147
+ result["cuda_major"] = self.cuda_version.split(".")[0]
145
148
 
146
149
  if self.torch_version:
147
150
  result["torch_version"] = self.torch_version
comfy_env/tools.py ADDED
@@ -0,0 +1,221 @@
1
+ """
2
+ Tool installers for external dependencies like Blender.
3
+
4
+ Usage in comfy-env.toml:
5
+ [tools]
6
+ blender = "4.2"
7
+ """
8
+
9
+ import os
10
+ import platform
11
+ import shutil
12
+ import subprocess
13
+ import tarfile
14
+ import zipfile
15
+ from pathlib import Path
16
+ from typing import Callable, Optional
17
+ from urllib.request import urlretrieve
18
+
19
+ from .env.config import ToolConfig
20
+
21
+ # Default install location
22
+ DEFAULT_TOOLS_DIR = Path.home() / ".comfy-env" / "tools"
23
+
24
+ # Blender download URLs by platform and version
25
+ BLENDER_DOWNLOADS = {
26
+ "4.2": {
27
+ "linux": "https://download.blender.org/release/Blender4.2/blender-4.2.0-linux-x64.tar.xz",
28
+ "windows": "https://download.blender.org/release/Blender4.2/blender-4.2.0-windows-x64.zip",
29
+ "darwin": "https://download.blender.org/release/Blender4.2/blender-4.2.0-macos-arm64.dmg",
30
+ },
31
+ "4.3": {
32
+ "linux": "https://download.blender.org/release/Blender4.3/blender-4.3.0-linux-x64.tar.xz",
33
+ "windows": "https://download.blender.org/release/Blender4.3/blender-4.3.0-windows-x64.zip",
34
+ "darwin": "https://download.blender.org/release/Blender4.3/blender-4.3.0-macos-arm64.dmg",
35
+ },
36
+ }
37
+
38
+
39
+ def get_platform() -> str:
40
+ """Get current platform name."""
41
+ system = platform.system().lower()
42
+ if system == "linux":
43
+ return "linux"
44
+ elif system == "windows":
45
+ return "windows"
46
+ elif system == "darwin":
47
+ return "darwin"
48
+ return system
49
+
50
+
51
+ def install_tool(
52
+ config: ToolConfig,
53
+ log: Callable[[str], None] = print,
54
+ base_dir: Optional[Path] = None,
55
+ ) -> Optional[Path]:
56
+ """Install a tool based on its config.
57
+
58
+ Args:
59
+ config: Tool configuration
60
+ log: Logging callback
61
+ base_dir: Base directory for tools. Tools install to base_dir/tools/<name>/
62
+ """
63
+ if config.name.lower() == "blender":
64
+ return install_blender(config.version, log, config.install_dir or base_dir)
65
+ else:
66
+ log(f"Unknown tool: {config.name}")
67
+ return None
68
+
69
+
70
+ def install_blender(
71
+ version: str = "4.2",
72
+ log: Callable[[str], None] = print,
73
+ base_dir: Optional[Path] = None,
74
+ ) -> Optional[Path]:
75
+ """
76
+ Install Blender to the specified directory.
77
+
78
+ Args:
79
+ version: Blender version to install (e.g., "4.2")
80
+ log: Logging callback
81
+ base_dir: Base directory. Blender installs to base_dir/tools/blender/
82
+ If None, uses ~/.comfy-env/tools/blender/
83
+
84
+ Returns path to blender executable if successful.
85
+ """
86
+ plat = get_platform()
87
+ if base_dir:
88
+ install_dir = base_dir / "tools" / "blender"
89
+ else:
90
+ install_dir = DEFAULT_TOOLS_DIR / "blender"
91
+
92
+ # Check if already installed
93
+ exe = find_blender(install_dir)
94
+ if exe:
95
+ log(f"Blender already installed: {exe}")
96
+ return exe
97
+
98
+ # Get download URL
99
+ if version not in BLENDER_DOWNLOADS:
100
+ log(f"Unknown Blender version: {version}. Available: {list(BLENDER_DOWNLOADS.keys())}")
101
+ return None
102
+
103
+ urls = BLENDER_DOWNLOADS[version]
104
+ if plat not in urls:
105
+ log(f"Blender {version} not available for {plat}")
106
+ return None
107
+
108
+ url = urls[plat]
109
+ log(f"Downloading Blender {version} for {plat}...")
110
+
111
+ install_dir.mkdir(parents=True, exist_ok=True)
112
+ archive_name = url.split("/")[-1]
113
+ archive_path = install_dir / archive_name
114
+
115
+ try:
116
+ # Download
117
+ urlretrieve(url, archive_path)
118
+ log(f"Downloaded to {archive_path}")
119
+
120
+ # Extract
121
+ log("Extracting...")
122
+ if archive_name.endswith(".tar.xz"):
123
+ with tarfile.open(archive_path, "r:xz") as tar:
124
+ tar.extractall(install_dir)
125
+ elif archive_name.endswith(".zip"):
126
+ with zipfile.ZipFile(archive_path, "r") as zf:
127
+ zf.extractall(install_dir)
128
+ elif archive_name.endswith(".dmg"):
129
+ # macOS DMG requires special handling
130
+ log("macOS DMG installation not yet automated. Please install manually.")
131
+ archive_path.unlink()
132
+ return None
133
+
134
+ # Clean up archive
135
+ archive_path.unlink()
136
+
137
+ # Find the executable
138
+ exe = find_blender(install_dir)
139
+ if exe:
140
+ log(f"Blender installed: {exe}")
141
+ return exe
142
+ else:
143
+ log("Blender extracted but executable not found")
144
+ return None
145
+
146
+ except Exception as e:
147
+ log(f"Failed to install Blender: {e}")
148
+ if archive_path.exists():
149
+ archive_path.unlink()
150
+ return None
151
+
152
+
153
+ def find_blender(search_dir: Optional[Path] = None) -> Optional[Path]:
154
+ """Find Blender executable."""
155
+ # Check PATH first
156
+ blender_in_path = shutil.which("blender")
157
+ if blender_in_path:
158
+ return Path(blender_in_path)
159
+
160
+ # Check common locations
161
+ plat = get_platform()
162
+ search_paths = []
163
+
164
+ if search_dir:
165
+ search_paths.append(search_dir)
166
+
167
+ search_paths.append(DEFAULT_TOOLS_DIR / "blender")
168
+
169
+ if plat == "windows":
170
+ search_paths.extend([
171
+ Path(os.environ.get("ProgramFiles", "C:/Program Files")) / "Blender Foundation",
172
+ Path.home() / "AppData" / "Local" / "Blender Foundation",
173
+ ])
174
+ elif plat == "linux":
175
+ search_paths.extend([
176
+ Path("/opt/blender"),
177
+ Path.home() / "blender",
178
+ ])
179
+ elif plat == "darwin":
180
+ search_paths.append(Path("/Applications/Blender.app/Contents/MacOS"))
181
+
182
+ exe_name = "blender.exe" if plat == "windows" else "blender"
183
+
184
+ for base in search_paths:
185
+ if not base.exists():
186
+ continue
187
+ # Direct check
188
+ exe = base / exe_name
189
+ if exe.exists():
190
+ return exe
191
+ # Search subdirectories (for extracted archives like blender-4.2.0-linux-x64/)
192
+ for subdir in base.iterdir():
193
+ if subdir.is_dir():
194
+ exe = subdir / exe_name
195
+ if exe.exists():
196
+ return exe
197
+
198
+ return None
199
+
200
+
201
+ def ensure_blender(
202
+ version: str = "4.2",
203
+ log: Callable[[str], None] = print,
204
+ base_dir: Optional[Path] = None,
205
+ ) -> Optional[Path]:
206
+ """Ensure Blender is installed, installing if necessary.
207
+
208
+ Args:
209
+ version: Blender version to install
210
+ log: Logging callback
211
+ base_dir: Base directory. Searches/installs in base_dir/tools/blender/
212
+ """
213
+ if base_dir:
214
+ search_dir = base_dir / "tools" / "blender"
215
+ else:
216
+ search_dir = DEFAULT_TOOLS_DIR / "blender"
217
+
218
+ exe = find_blender(search_dir)
219
+ if exe:
220
+ return exe
221
+ return install_blender(version, log, base_dir)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: comfy-env
3
- Version: 0.0.13
3
+ Version: 0.0.15
4
4
  Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
5
5
  Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
6
6
  Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
@@ -1,21 +1,22 @@
1
- comfy_env/__init__.py,sha256=1EjSpAK20Z6RAXKn3onZwj2a_FIctG0E_LhWlE7SgIs,4250
1
+ comfy_env/__init__.py,sha256=76gIAh7qFff_v_bAolXVzuWzcgvD3bp-yQGCNzba_Iw,3287
2
2
  comfy_env/cli.py,sha256=q4y_tlPyqKMZhge7XeO9VdbFVZ4dl9LZsgnnTVQYXD4,15979
3
3
  comfy_env/decorator.py,sha256=daFR5aLzshkmo5sRKhSGPcTUgIUWml7Gs6A1bfnDuyc,15718
4
4
  comfy_env/errors.py,sha256=8hN8NDlo8oBUdapc-eT3ZluigI5VBzfqsSBvQdfWlz4,9943
5
- comfy_env/install.py,sha256=q8x7sQmCWMQyUjeBcbrg39eAP3ZPars54iepupMe2pE,18922
5
+ comfy_env/install.py,sha256=fXsybbqobs_WwEEuuXgi__zQbb4hBIlZO0apiGWFb20,20365
6
6
  comfy_env/registry.py,sha256=uFCtGmWYvwGCqObXgzmArX7o5JsFNsHXxayofk3m6no,2569
7
- comfy_env/resolver.py,sha256=AC_xizQARVdmc8ZpGrIPQd-oA6Gv8w49OsHKltRAq5o,12251
8
- comfy_env/env/__init__.py,sha256=sybOBrxJCfL4Xry9NNd5xwn9hXIHudXlXDa7SpJkPCE,811
9
- comfy_env/env/config.py,sha256=R8JyE5iQLHKgnxXOGA8SAI7iu2eYSfXn-MsaqHoU2_A,5667
10
- comfy_env/env/config_file.py,sha256=6Mh2VsNvyFJuTL4_vOX0sG3Q5SMHLI1P97aY8FFvwnk,20696
11
- comfy_env/env/detection.py,sha256=L4kXKGs9j7Vw4Qeh6Zw5PfVZW0dN2zuJqojDPgINIhA,5653
12
- comfy_env/env/manager.py,sha256=MRmqg3Qw0IYUwR6FtM3ZIcGZvWcnKDyO8xbzIAxnwCk,24782
7
+ comfy_env/resolver.py,sha256=l-AnmCE1puG6CvdpDB-KrsfG_cn_3uO2DryYizUnG_4,12474
8
+ comfy_env/tools.py,sha256=mFNB_uq64ON5hlreH_0wTLONahDo3pBHxhQYTcTHxXE,6554
9
+ comfy_env/env/__init__.py,sha256=imQdoQEQvrRT-QDtyNpFlkVbm2fBzgACdpQwRPd09fI,1157
10
+ comfy_env/env/config.py,sha256=6KZPhRiW8ShhuDnX-e6_yjZHMdO_xBm02w00Q3NBJXM,5980
11
+ comfy_env/env/config_file.py,sha256=tJ9xfmf2RghzK6PUxwTrhpvqV_5pecjC6IYYBoZ0U58,21890
12
+ comfy_env/env/cuda_gpu_detection.py,sha256=YLuXUdWg6FeKdNyLlQAHPlveg4rTenXJ2VbeAaEi9QE,9755
13
+ comfy_env/env/manager.py,sha256=bbV1MpURNGuBJ1sSWg_2oSU0J-dW-FhBCuHHHQxgrSM,24785
13
14
  comfy_env/env/security.py,sha256=dNSitAnfBNVdvxgBBntYw33AJaCs_S1MHb7KJhAVYzM,8171
14
15
  comfy_env/env/platform/__init__.py,sha256=Nb5MPZIEeanSMEWwqU4p4bnEKTJn1tWcwobnhq9x9IY,614
15
16
  comfy_env/env/platform/base.py,sha256=iS0ptTTVjXRwPU4qWUdvHI7jteuzxGSjWr5BUQ7hGiU,2453
16
17
  comfy_env/env/platform/darwin.py,sha256=HK3VkLT6DfesAnIXwx2IaUFHTBclF0xTQnC7azWY6Kc,1552
17
18
  comfy_env/env/platform/linux.py,sha256=xLp8FEbFqZLQrzIZBI9z3C4g23Ab1ASTHLsXDzsdCoA,2062
18
- comfy_env/env/platform/windows.py,sha256=nD1-bKU2rGmEJlS-cc5yWXMSA51YQtVupn-lQEO5UYA,14840
19
+ comfy_env/env/platform/windows.py,sha256=FCOCgpzGzorY9-HueMlJUR8DxM2eH-cj9iZk6K026Is,10891
19
20
  comfy_env/ipc/__init__.py,sha256=pTjgJn5YJxLXmEvuKh3lkCEJQs-6W6_F01jfkFMUi0c,1375
20
21
  comfy_env/ipc/bridge.py,sha256=zcyN3xzV4WWBrBFNwCniPBR58dLCg46-k9TtyW5U000,16437
21
22
  comfy_env/ipc/protocol.py,sha256=gfWe5yEDUn4QWhcdWFcxn40GqxlW1Uf23j0edOzPPng,7951
@@ -32,8 +33,8 @@ comfy_env/workers/tensor_utils.py,sha256=TCuOAjJymrSbkgfyvcKtQ_KbVWTqSwP9VH_bCaF
32
33
  comfy_env/workers/torch_mp.py,sha256=DsfxE3LBAWEuGtk-p-YL0UhVQ7VDh73KT_TFRxYN4-Q,12563
33
34
  comfy_env/workers/venv.py,sha256=_ekHfZPqBIPY08DjqiXm6cTBQH4DrbxRWR3AAv3mit8,31589
34
35
  comfy_env/wheel_sources.yml,sha256=ubVuQllCQGkZhLNQaG54divCwn0zLzYg4turzhnIZQ8,7150
35
- comfy_env-0.0.13.dist-info/METADATA,sha256=FRLYYBNxWwLm5I3vWRVxT1LYp5wvW13ibZ95PVdOaU4,5399
36
- comfy_env-0.0.13.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
37
- comfy_env-0.0.13.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
38
- comfy_env-0.0.13.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
39
- comfy_env-0.0.13.dist-info/RECORD,,
36
+ comfy_env-0.0.15.dist-info/METADATA,sha256=Fn5bJWsO2kt4UPmcCj44Oybsnd4msFl1YV-TCMf6eXQ,5399
37
+ comfy_env-0.0.15.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
38
+ comfy_env-0.0.15.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
39
+ comfy_env-0.0.15.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
40
+ comfy_env-0.0.15.dist-info/RECORD,,
@@ -1,183 +0,0 @@
1
- """
2
- GPU detection for automatic CUDA version selection.
3
-
4
- Detects Blackwell GPUs (RTX 50xx, B100, B200) which require CUDA 12.8,
5
- vs older GPUs which use CUDA 12.4.
6
-
7
- This runs BEFORE PyTorch is installed, so we use nvidia-smi directly.
8
- """
9
-
10
- import os
11
- import subprocess
12
- from typing import List, Dict, Optional
13
-
14
-
15
- # Environment variable to override CUDA version detection
16
- # Useful for CI environments without GPU
17
- CUDA_VERSION_ENV_VAR = "COMFY_ENV_CUDA_VERSION"
18
-
19
-
20
- def detect_gpu_info() -> List[Dict[str, str]]:
21
- """
22
- Detect GPU name and compute capability using nvidia-smi.
23
-
24
- Returns:
25
- List of dicts with 'name' and 'compute_cap' keys.
26
- Empty list if detection fails.
27
- """
28
- try:
29
- result = subprocess.run(
30
- ["nvidia-smi", "--query-gpu=name,compute_cap", "--format=csv,noheader"],
31
- capture_output=True,
32
- text=True,
33
- timeout=10
34
- )
35
- if result.returncode == 0:
36
- gpus = []
37
- for line in result.stdout.strip().split('\n'):
38
- if not line.strip():
39
- continue
40
- parts = [p.strip() for p in line.split(',')]
41
- name = parts[0] if parts else "Unknown"
42
- cc = parts[1] if len(parts) > 1 else "0.0"
43
- gpus.append({"name": name, "compute_cap": cc})
44
- return gpus
45
- except FileNotFoundError:
46
- # nvidia-smi not found - no NVIDIA GPU or driver not installed
47
- pass
48
- except subprocess.TimeoutExpired:
49
- pass
50
- except Exception:
51
- pass
52
- return []
53
-
54
-
55
- def is_blackwell_gpu(name: str, compute_cap: str) -> bool:
56
- """
57
- Check if a GPU is Blackwell architecture.
58
-
59
- Args:
60
- name: GPU name from nvidia-smi
61
- compute_cap: Compute capability string (e.g., "8.9", "12.0")
62
-
63
- Returns:
64
- True if Blackwell (requires CUDA 12.8)
65
- """
66
- name_upper = name.upper()
67
-
68
- # Check by name patterns
69
- blackwell_patterns = [
70
- "RTX 50", # RTX 5090, 5080, 5070, etc.
71
- "RTX50", # Without space
72
- "B100", # Datacenter Blackwell
73
- "B200", # Datacenter Blackwell
74
- "GB202", # Blackwell die
75
- "GB203",
76
- "GB205",
77
- "GB206",
78
- "GB207",
79
- ]
80
-
81
- if any(pattern in name_upper for pattern in blackwell_patterns):
82
- return True
83
-
84
- # Check by compute capability (10.0+ = Blackwell)
85
- try:
86
- cc = float(compute_cap)
87
- if cc >= 10.0:
88
- return True
89
- except (ValueError, TypeError):
90
- pass
91
-
92
- return False
93
-
94
-
95
- def is_legacy_gpu(compute_cap: str) -> bool:
96
- """
97
- Check if GPU is Pascal or older (requires legacy CUDA/PyTorch).
98
-
99
- Args:
100
- compute_cap: Compute capability string (e.g., "6.1", "7.5")
101
-
102
- Returns:
103
- True if Pascal or older (compute < 7.5)
104
- """
105
- try:
106
- cc = float(compute_cap)
107
- return cc < 7.5 # Turing starts at 7.5
108
- except (ValueError, TypeError):
109
- return False
110
-
111
-
112
- def detect_cuda_version() -> Optional[str]:
113
- """
114
- Get recommended CUDA version based on detected GPU.
115
-
116
- Returns:
117
- "12.4" for Pascal or older (compute < 7.5),
118
- "12.8" for Turing or newer (compute >= 7.5),
119
- None if no GPU detected and no env var override.
120
-
121
- Environment Variable Override:
122
- Set COMFY_ENV_CUDA_VERSION to override auto-detection.
123
- Useful for CI environments without GPU.
124
- Example: COMFY_ENV_CUDA_VERSION=12.8
125
-
126
- GPU Architecture Reference:
127
- - Pascal (GTX 10xx, P100): compute 6.0-6.1 → CUDA 12.4
128
- - Turing (RTX 20xx, T4): compute 7.5 → CUDA 12.8
129
- - Ampere (RTX 30xx, A100): compute 8.0-8.6 → CUDA 12.8
130
- - Ada (RTX 40xx, L40): compute 8.9 → CUDA 12.8
131
- - Hopper (H100): compute 9.0 → CUDA 12.8
132
- - Blackwell (RTX 50xx, B100/B200): compute 10.0+ → CUDA 12.8
133
- """
134
- gpus = detect_gpu_info()
135
- if not gpus:
136
- # No GPU detected - check for env var override
137
- override = os.environ.get(CUDA_VERSION_ENV_VAR)
138
- if override:
139
- # Normalize format (e.g., "12.8" or "128" -> "12.8")
140
- override = override.strip()
141
- if override and '.' not in override and len(override) >= 2:
142
- # Convert "128" -> "12.8"
143
- override = f"{override[:-1]}.{override[-1]}"
144
- return override
145
- return None
146
-
147
- # Check if any GPU is legacy (Pascal or older)
148
- for gpu in gpus:
149
- if is_legacy_gpu(gpu.get("compute_cap", "0.0")):
150
- return "12.4"
151
-
152
- # Turing or newer - use modern stack
153
- return "12.8"
154
-
155
-
156
- def get_gpu_summary() -> str:
157
- """
158
- Get a human-readable summary of detected GPUs.
159
-
160
- Returns:
161
- Summary string for logging.
162
- """
163
- gpus = detect_gpu_info()
164
-
165
- if not gpus:
166
- override = os.environ.get(CUDA_VERSION_ENV_VAR)
167
- if override:
168
- return f"No NVIDIA GPU detected (using {CUDA_VERSION_ENV_VAR}={override})"
169
- return f"No NVIDIA GPU detected (set {CUDA_VERSION_ENV_VAR} to override)"
170
-
171
- lines = []
172
- for i, gpu in enumerate(gpus):
173
- cc = gpu.get("compute_cap", "0.0")
174
- is_legacy = is_legacy_gpu(cc)
175
- if is_legacy:
176
- tag = " [Pascal - CUDA 12.4]"
177
- elif is_blackwell_gpu(gpu["name"], cc):
178
- tag = " [Blackwell - CUDA 12.8]"
179
- else:
180
- tag = " [CUDA 12.8]"
181
- lines.append(f" GPU {i}: {gpu['name']} (sm_{cc.replace('.', '')}){tag}")
182
-
183
- return "\n".join(lines)