comfy-env 0.1.14__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. comfy_env/__init__.py +115 -62
  2. comfy_env/cli.py +89 -319
  3. comfy_env/config/__init__.py +18 -8
  4. comfy_env/config/parser.py +21 -122
  5. comfy_env/config/types.py +37 -70
  6. comfy_env/detection/__init__.py +77 -0
  7. comfy_env/detection/cuda.py +61 -0
  8. comfy_env/detection/gpu.py +230 -0
  9. comfy_env/detection/platform.py +70 -0
  10. comfy_env/detection/runtime.py +103 -0
  11. comfy_env/environment/__init__.py +53 -0
  12. comfy_env/environment/cache.py +141 -0
  13. comfy_env/environment/libomp.py +41 -0
  14. comfy_env/environment/paths.py +38 -0
  15. comfy_env/environment/setup.py +88 -0
  16. comfy_env/install.py +163 -249
  17. comfy_env/isolation/__init__.py +33 -2
  18. comfy_env/isolation/tensor_utils.py +83 -0
  19. comfy_env/isolation/workers/__init__.py +16 -0
  20. comfy_env/{workers → isolation/workers}/mp.py +1 -1
  21. comfy_env/{workers → isolation/workers}/subprocess.py +2 -2
  22. comfy_env/isolation/wrap.py +149 -409
  23. comfy_env/packages/__init__.py +60 -0
  24. comfy_env/packages/apt.py +36 -0
  25. comfy_env/packages/cuda_wheels.py +97 -0
  26. comfy_env/packages/node_dependencies.py +77 -0
  27. comfy_env/packages/pixi.py +85 -0
  28. comfy_env/packages/toml_generator.py +88 -0
  29. comfy_env-0.1.16.dist-info/METADATA +279 -0
  30. comfy_env-0.1.16.dist-info/RECORD +36 -0
  31. comfy_env/cache.py +0 -331
  32. comfy_env/errors.py +0 -293
  33. comfy_env/nodes.py +0 -187
  34. comfy_env/pixi/__init__.py +0 -48
  35. comfy_env/pixi/core.py +0 -588
  36. comfy_env/pixi/cuda_detection.py +0 -303
  37. comfy_env/pixi/platform/__init__.py +0 -21
  38. comfy_env/pixi/platform/base.py +0 -96
  39. comfy_env/pixi/platform/darwin.py +0 -53
  40. comfy_env/pixi/platform/linux.py +0 -68
  41. comfy_env/pixi/platform/windows.py +0 -284
  42. comfy_env/pixi/resolver.py +0 -198
  43. comfy_env/prestartup.py +0 -192
  44. comfy_env/workers/__init__.py +0 -38
  45. comfy_env/workers/tensor_utils.py +0 -188
  46. comfy_env-0.1.14.dist-info/METADATA +0 -291
  47. comfy_env-0.1.14.dist-info/RECORD +0 -33
  48. /comfy_env/{workers → isolation/workers}/base.py +0 -0
  49. {comfy_env-0.1.14.dist-info → comfy_env-0.1.16.dist-info}/WHEEL +0 -0
  50. {comfy_env-0.1.14.dist-info → comfy_env-0.1.16.dist-info}/entry_points.txt +0 -0
  51. {comfy_env-0.1.14.dist-info → comfy_env-0.1.16.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,60 @@
1
+ """
2
+ Packages layer - Package installation with side effects.
3
+
4
+ Handles pixi, CUDA wheels, apt packages, and node dependencies.
5
+ """
6
+
7
+ from .pixi import (
8
+ PIXI_VERSION,
9
+ ensure_pixi,
10
+ get_pixi_path,
11
+ get_pixi_python,
12
+ pixi_install,
13
+ pixi_run,
14
+ pixi_clean,
15
+ )
16
+ from .cuda_wheels import (
17
+ CUDA_WHEELS_INDEX,
18
+ get_wheel_url,
19
+ find_available_wheels,
20
+ get_cuda_torch_mapping,
21
+ )
22
+ from .toml_generator import (
23
+ generate_pixi_toml,
24
+ config_to_pixi_dict,
25
+ )
26
+ from .apt import (
27
+ apt_install,
28
+ check_apt_packages,
29
+ )
30
+ from .node_dependencies import (
31
+ install_node_dependencies,
32
+ clone_node,
33
+ normalize_repo_url,
34
+ )
35
+
36
+ __all__ = [
37
+ # Pixi package manager
38
+ "PIXI_VERSION",
39
+ "ensure_pixi",
40
+ "get_pixi_path",
41
+ "get_pixi_python",
42
+ "pixi_install",
43
+ "pixi_run",
44
+ "pixi_clean",
45
+ # CUDA wheels
46
+ "CUDA_WHEELS_INDEX",
47
+ "get_wheel_url",
48
+ "find_available_wheels",
49
+ "get_cuda_torch_mapping",
50
+ # TOML generation
51
+ "generate_pixi_toml",
52
+ "config_to_pixi_dict",
53
+ # APT packages
54
+ "apt_install",
55
+ "check_apt_packages",
56
+ # Node dependencies
57
+ "install_node_dependencies",
58
+ "clone_node",
59
+ "normalize_repo_url",
60
+ ]
@@ -0,0 +1,36 @@
1
+ """APT package installation (Linux only)."""
2
+
3
+ import subprocess
4
+ import sys
5
+ from typing import Callable, List
6
+
7
+
8
+ def apt_install(packages: List[str], log: Callable[[str], None] = print) -> bool:
9
+ """Install system packages via apt-get. No-op on non-Linux."""
10
+ if not packages or sys.platform != "linux":
11
+ return True
12
+
13
+ log(f"Installing apt packages: {packages}")
14
+
15
+ subprocess.run(["sudo", "apt-get", "update"], capture_output=True, text=True)
16
+
17
+ result = subprocess.run(
18
+ ["sudo", "apt-get", "install", "-y"] + packages,
19
+ capture_output=True, text=True
20
+ )
21
+ if result.returncode != 0:
22
+ log(f"Warning: apt-get install failed: {result.stderr[:200]}")
23
+ return False
24
+
25
+ return True
26
+
27
+
28
+ def check_apt_packages(packages: List[str]) -> List[str]:
29
+ """Return list of packages NOT installed."""
30
+ if sys.platform != "linux":
31
+ return []
32
+
33
+ return [
34
+ pkg for pkg in packages
35
+ if subprocess.run(["dpkg", "-s", pkg], capture_output=True).returncode != 0
36
+ ]
@@ -0,0 +1,97 @@
1
+ """CUDA wheels index integration. See: https://pozzettiandrea.github.io/cuda-wheels/"""
2
+
3
+ import re
4
+ import sys
5
+ import urllib.request
6
+ from typing import List, Optional
7
+
8
+ CUDA_WHEELS_INDEX = "https://pozzettiandrea.github.io/cuda-wheels/"
9
+ CUDA_TORCH_MAP = {"12.8": "2.8", "12.4": "2.4"}
10
+
11
+
12
+ def get_cuda_torch_mapping() -> dict:
13
+ return CUDA_TORCH_MAP.copy()
14
+
15
+
16
+ def get_torch_version_for_cuda(cuda_version: str) -> Optional[str]:
17
+ return CUDA_TORCH_MAP.get(".".join(cuda_version.split(".")[:2]))
18
+
19
+
20
+ def _pkg_variants(package: str) -> List[str]:
21
+ return [package, package.replace("-", "_"), package.replace("_", "-")]
22
+
23
+
24
+ def _platform_tag() -> Optional[str]:
25
+ if sys.platform.startswith("linux"): return "linux_x86_64"
26
+ if sys.platform == "win32": return "win_amd64"
27
+ return None
28
+
29
+
30
+ def get_wheel_url(package: str, torch_version: str, cuda_version: str, python_version: str) -> Optional[str]:
31
+ """Get direct URL to matching wheel from cuda-wheels index."""
32
+ cuda_short = cuda_version.replace(".", "")[:3]
33
+ torch_short = torch_version.replace(".", "")[:2]
34
+ py_tag = f"cp{python_version.replace('.', '')}"
35
+ platform_tag = _platform_tag()
36
+
37
+ local_patterns = [f"+cu{cuda_short}torch{torch_short}", f"+pt{torch_short}cu{cuda_short}"]
38
+ link_pattern = re.compile(r'href="([^"]+\.whl)"[^>]*>([^<]+)</a>', re.IGNORECASE)
39
+
40
+ for pkg_dir in _pkg_variants(package):
41
+ try:
42
+ with urllib.request.urlopen(f"{CUDA_WHEELS_INDEX}{pkg_dir}/", timeout=10) as resp:
43
+ html = resp.read().decode("utf-8")
44
+ except Exception: continue
45
+
46
+ for match in link_pattern.finditer(html):
47
+ wheel_url, display = match.group(1), match.group(2)
48
+ if any(p in display for p in local_patterns) and py_tag in display:
49
+ if platform_tag is None or platform_tag in display:
50
+ return wheel_url if wheel_url.startswith("http") else f"{CUDA_WHEELS_INDEX}{pkg_dir}/{wheel_url}"
51
+ return None
52
+
53
+
54
+ def find_available_wheels(package: str) -> List[str]:
55
+ """List all available wheels for a package."""
56
+ wheels = []
57
+ link_pattern = re.compile(r'href="[^"]*?([^"/]+\.whl)"', re.IGNORECASE)
58
+ for pkg_dir in _pkg_variants(package):
59
+ try:
60
+ with urllib.request.urlopen(f"{CUDA_WHEELS_INDEX}{pkg_dir}/", timeout=10) as resp:
61
+ html = resp.read().decode("utf-8")
62
+ for match in link_pattern.finditer(html):
63
+ name = match.group(1).replace("%2B", "+")
64
+ if name not in wheels: wheels.append(name)
65
+ except Exception: continue
66
+ return wheels
67
+
68
+
69
+ def find_matching_wheel(package: str, torch_version: str, cuda_version: str) -> Optional[str]:
70
+ """Find wheel matching CUDA/torch version, return version spec."""
71
+ cuda_short = cuda_version.replace(".", "")[:3]
72
+ torch_short = torch_version.replace(".", "")[:2]
73
+ local_patterns = [f"+cu{cuda_short}torch{torch_short}", f"+pt{torch_short}cu{cuda_short}"]
74
+ wheel_pattern = re.compile(r'href="[^"]*?([^"/]+\.whl)"', re.IGNORECASE)
75
+
76
+ for pkg_dir in _pkg_variants(package):
77
+ try:
78
+ with urllib.request.urlopen(f"{CUDA_WHEELS_INDEX}{pkg_dir}/", timeout=10) as resp:
79
+ html = resp.read().decode("utf-8")
80
+ except Exception: continue
81
+
82
+ best_match = best_version = None
83
+ for match in wheel_pattern.finditer(html):
84
+ wheel_name = match.group(1).replace("%2B", "+")
85
+ for local in local_patterns:
86
+ if local in wheel_name:
87
+ parts = wheel_name.split("-")
88
+ if len(parts) >= 2 and (best_version is None or parts[1] > best_version):
89
+ best_version = parts[1]
90
+ best_match = f"{package}==={parts[1]}"
91
+ break
92
+ if best_match: return best_match
93
+ return None
94
+
95
+
96
+ def get_find_links_urls(package: str) -> List[str]:
97
+ return [f"{CUDA_WHEELS_INDEX}{p}/" for p in _pkg_variants(package)]
@@ -0,0 +1,77 @@
1
+ """Node dependency installation - clone ComfyUI nodes from [node_reqs] section."""
2
+
3
+ import shutil
4
+ import subprocess
5
+ import sys
6
+ from pathlib import Path
7
+ from typing import Callable, List, Set, TYPE_CHECKING
8
+
9
+ if TYPE_CHECKING:
10
+ from ..config import NodeDependency
11
+
12
+
13
+ def normalize_repo_url(repo: str) -> str:
14
+ if repo.startswith("http"): return repo
15
+ return f"https://github.com/{repo}"
16
+
17
+
18
+ def clone_node(repo: str, name: str, target_dir: Path, log: Callable[[str], None] = print) -> Path:
19
+ node_path = target_dir / name
20
+ url = normalize_repo_url(repo)
21
+ log(f" Cloning {name}...")
22
+ result = subprocess.run(["git", "clone", "--depth", "1", url, str(node_path)], capture_output=True, text=True)
23
+ if result.returncode != 0:
24
+ raise RuntimeError(f"Failed to clone {url}: {result.stderr.strip()}")
25
+ return node_path
26
+
27
+
28
+ def install_requirements(node_dir: Path, log: Callable[[str], None] = print) -> None:
29
+ req_file = node_dir / "requirements.txt"
30
+ if not req_file.exists(): return
31
+ log(f" Installing requirements for {node_dir.name}...")
32
+ cmd = ["uv", "pip", "install", "-r", str(req_file), "--python", sys.executable] if shutil.which("uv") else [sys.executable, "-m", "pip", "install", "-r", str(req_file)]
33
+ result = subprocess.run(cmd, cwd=node_dir, capture_output=True, text=True)
34
+ if result.returncode != 0:
35
+ log(f" Warning: requirements failed: {result.stderr.strip()[:200]}")
36
+
37
+
38
+ def run_install_script(node_dir: Path, log: Callable[[str], None] = print) -> None:
39
+ install_script = node_dir / "install.py"
40
+ if install_script.exists():
41
+ log(f" Running install.py for {node_dir.name}...")
42
+ result = subprocess.run([sys.executable, str(install_script)], cwd=node_dir, capture_output=True, text=True)
43
+ if result.returncode != 0:
44
+ log(f" Warning: install.py failed: {result.stderr.strip()[:200]}")
45
+
46
+
47
+ def install_node_dependencies(
48
+ node_deps: "List[NodeDependency]",
49
+ custom_nodes_dir: Path,
50
+ log: Callable[[str], None] = print,
51
+ visited: Set[str] = None,
52
+ ) -> None:
53
+ """Install node dependencies recursively."""
54
+ from ..config import discover_config
55
+
56
+ visited = visited or set()
57
+ for dep in node_deps:
58
+ if dep.name in visited:
59
+ log(f" {dep.name}: cycle, skipping")
60
+ continue
61
+ visited.add(dep.name)
62
+
63
+ node_path = custom_nodes_dir / dep.name
64
+ if node_path.exists():
65
+ log(f" {dep.name}: exists")
66
+ continue
67
+
68
+ try:
69
+ clone_node(dep.repo, dep.name, custom_nodes_dir, log)
70
+ install_requirements(node_path, log)
71
+ run_install_script(node_path, log)
72
+
73
+ nested_config = discover_config(node_path)
74
+ if nested_config and nested_config.node_reqs:
75
+ install_node_dependencies(nested_config.node_reqs, custom_nodes_dir, log, visited)
76
+ except Exception as e:
77
+ log(f" Warning: {dep.name} failed: {e}")
@@ -0,0 +1,85 @@
1
+ """Pixi package manager integration. See: https://pixi.sh/"""
2
+
3
+ import platform as platform_mod
4
+ import shutil
5
+ import stat
6
+ import subprocess
7
+ import sys
8
+ import urllib.request
9
+ from pathlib import Path
10
+ from typing import Callable, List, Optional
11
+
12
+ PIXI_URLS = {
13
+ ("Linux", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-unknown-linux-musl",
14
+ ("Linux", "aarch64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-unknown-linux-musl",
15
+ ("Darwin", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-apple-darwin",
16
+ ("Darwin", "arm64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-apple-darwin",
17
+ ("Windows", "AMD64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-pc-windows-msvc.exe",
18
+ }
19
+
20
+
21
+ def get_pixi_path() -> Optional[Path]:
22
+ """Find pixi in PATH or common locations."""
23
+ if cmd := shutil.which("pixi"): return Path(cmd)
24
+ home = Path.home()
25
+ for p in [home / ".pixi/bin/pixi", home / ".local/bin/pixi"]:
26
+ candidate = p.with_suffix(".exe") if sys.platform == "win32" else p
27
+ if candidate.exists(): return candidate
28
+ return None
29
+
30
+
31
+ def ensure_pixi(install_dir: Optional[Path] = None, log: Callable[[str], None] = print) -> Path:
32
+ """Ensure pixi is installed, downloading if necessary."""
33
+ if existing := get_pixi_path(): return existing
34
+
35
+ log("Pixi not found, downloading...")
36
+ install_dir = install_dir or Path.home() / ".local/bin"
37
+ install_dir.mkdir(parents=True, exist_ok=True)
38
+
39
+ system, machine = platform_mod.system(), platform_mod.machine()
40
+ if machine in ("x86_64", "AMD64"): machine = "x86_64" if system != "Windows" else "AMD64"
41
+ elif machine in ("arm64", "aarch64"): machine = "arm64" if system == "Darwin" else "aarch64"
42
+
43
+ if (system, machine) not in PIXI_URLS:
44
+ raise RuntimeError(f"No pixi for {system}/{machine}")
45
+
46
+ pixi_path = install_dir / ("pixi.exe" if system == "Windows" else "pixi")
47
+ try:
48
+ urllib.request.urlretrieve(PIXI_URLS[(system, machine)], pixi_path)
49
+ except Exception as e:
50
+ result = subprocess.run(["curl", "-fsSL", "-o", str(pixi_path), PIXI_URLS[(system, machine)]], capture_output=True, text=True)
51
+ if result.returncode != 0: raise RuntimeError(f"Failed to download pixi") from e
52
+
53
+ if system != "Windows":
54
+ pixi_path.chmod(pixi_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
55
+
56
+ log(f"Installed pixi: {pixi_path}")
57
+ return pixi_path
58
+
59
+
60
+ def get_pixi_python(node_dir: Path) -> Optional[Path]:
61
+ """Get Python path from pixi environment."""
62
+ from ..environment.cache import resolve_env_path
63
+ env_path, _, _ = resolve_env_path(node_dir)
64
+ if not env_path: return None
65
+ python_path = env_path / ("python.exe" if sys.platform == "win32" else "bin/python")
66
+ return python_path if python_path.exists() else None
67
+
68
+
69
+ def pixi_install(node_dir: Path, log: Callable[[str], None] = print) -> subprocess.CompletedProcess:
70
+ pixi_path = get_pixi_path()
71
+ if not pixi_path: raise RuntimeError("Pixi not found")
72
+ return subprocess.run([str(pixi_path), "install"], cwd=node_dir, capture_output=True, text=True)
73
+
74
+
75
+ def pixi_run(command: List[str], node_dir: Path, log: Callable[[str], None] = print) -> subprocess.CompletedProcess:
76
+ pixi_path = get_pixi_path()
77
+ if not pixi_path: raise RuntimeError("Pixi not found")
78
+ return subprocess.run([str(pixi_path), "run"] + command, cwd=node_dir, capture_output=True, text=True)
79
+
80
+
81
+ def pixi_clean(node_dir: Path, log: Callable[[str], None] = print) -> None:
82
+ """Remove pixi artifacts (pixi.toml, pixi.lock, .pixi/)."""
83
+ for path in [node_dir / "pixi.toml", node_dir / "pixi.lock"]:
84
+ if path.exists(): path.unlink()
85
+ if (node_dir / ".pixi").exists(): shutil.rmtree(node_dir / ".pixi")
@@ -0,0 +1,88 @@
1
+ """Generate pixi.toml from ComfyEnvConfig."""
2
+
3
+ import copy
4
+ import sys
5
+ from pathlib import Path
6
+ from typing import Any, Callable, Dict
7
+
8
+ from ..config import ComfyEnvConfig
9
+ from ..detection import get_recommended_cuda_version, get_pixi_platform
10
+ from .cuda_wheels import CUDA_TORCH_MAP
11
+
12
+
13
+ def _require_tomli_w():
14
+ try:
15
+ import tomli_w
16
+ return tomli_w
17
+ except ImportError:
18
+ raise ImportError("tomli-w required: pip install tomli-w")
19
+
20
+
21
+ def generate_pixi_toml(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None] = print) -> str:
22
+ return _require_tomli_w().dumps(config_to_pixi_dict(cfg, node_dir, log))
23
+
24
+
25
+ def write_pixi_toml(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None] = print) -> Path:
26
+ tomli_w = _require_tomli_w()
27
+ pixi_toml = node_dir / "pixi.toml"
28
+ with open(pixi_toml, "wb") as f:
29
+ tomli_w.dump(config_to_pixi_dict(cfg, node_dir, log), f)
30
+ log(f"Generated {pixi_toml}")
31
+ return pixi_toml
32
+
33
+
34
+ def config_to_pixi_dict(cfg: ComfyEnvConfig, node_dir: Path, log: Callable[[str], None] = print) -> Dict[str, Any]:
35
+ pixi_data = copy.deepcopy(cfg.pixi_passthrough)
36
+
37
+ cuda_version = torch_version = None
38
+ if cfg.has_cuda and sys.platform != "darwin":
39
+ cuda_version = get_recommended_cuda_version()
40
+ if cuda_version:
41
+ torch_version = CUDA_TORCH_MAP.get(".".join(cuda_version.split(".")[:2]), "2.8")
42
+ log(f"CUDA {cuda_version} -> PyTorch {torch_version}")
43
+
44
+ # Workspace
45
+ workspace = pixi_data.setdefault("workspace", {})
46
+ workspace.setdefault("name", node_dir.name)
47
+ workspace.setdefault("version", "0.1.0")
48
+ workspace.setdefault("channels", ["conda-forge"])
49
+ workspace.setdefault("platforms", [get_pixi_platform()])
50
+
51
+ # System requirements
52
+ if sys.platform.startswith("linux") or cuda_version:
53
+ system_reqs = pixi_data.setdefault("system-requirements", {})
54
+ if sys.platform.startswith("linux"):
55
+ system_reqs.setdefault("libc", {"family": "glibc", "version": "2.35"})
56
+ if cuda_version:
57
+ system_reqs["cuda"] = cuda_version.split(".")[0]
58
+
59
+ # Dependencies
60
+ dependencies = pixi_data.setdefault("dependencies", {})
61
+ py_version = cfg.python or f"{sys.version_info.major}.{sys.version_info.minor}"
62
+ dependencies.setdefault("python", f"{py_version}.*")
63
+ dependencies.setdefault("pip", "*")
64
+
65
+ # PyTorch CUDA index
66
+ if cfg.has_cuda and cuda_version:
67
+ pypi_options = pixi_data.setdefault("pypi-options", {})
68
+ pytorch_index = f"https://download.pytorch.org/whl/cu{cuda_version.replace('.', '')[:3]}"
69
+ extra_urls = pypi_options.setdefault("extra-index-urls", [])
70
+ if pytorch_index not in extra_urls: extra_urls.append(pytorch_index)
71
+
72
+ # Enforce torch version
73
+ if cfg.has_cuda and torch_version:
74
+ pypi_deps = pixi_data.setdefault("pypi-dependencies", {})
75
+ torch_minor = int(torch_version.split(".")[1])
76
+ pypi_deps["torch"] = f">={torch_version},<{torch_version.split('.')[0]}.{torch_minor + 1}"
77
+
78
+ return pixi_data
79
+
80
+
81
+ def deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
82
+ result = copy.deepcopy(base)
83
+ for k, v in override.items():
84
+ if k in result and isinstance(result[k], dict) and isinstance(v, dict):
85
+ result[k] = deep_merge(result[k], v)
86
+ else:
87
+ result[k] = copy.deepcopy(v)
88
+ return result