comfy-env 0.0.19__py3-none-any.whl → 0.0.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +19 -1
- comfy_env/decorator.py +28 -9
- comfy_env/env/config.py +18 -0
- comfy_env/env/config_file.py +20 -7
- comfy_env/install.py +8 -16
- comfy_env/pixi.py +397 -0
- comfy_env/workers/torch_mp.py +204 -1
- {comfy_env-0.0.19.dist-info → comfy_env-0.0.22.dist-info}/METADATA +2 -2
- {comfy_env-0.0.19.dist-info → comfy_env-0.0.22.dist-info}/RECORD +12 -11
- {comfy_env-0.0.19.dist-info → comfy_env-0.0.22.dist-info}/WHEEL +0 -0
- {comfy_env-0.0.19.dist-info → comfy_env-0.0.22.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.0.19.dist-info → comfy_env-0.0.22.dist-info}/licenses/LICENSE +0 -0
comfy_env/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
__version__ = "0.0.14"
|
|
2
2
|
|
|
3
|
-
from .env.config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq
|
|
3
|
+
from .env.config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq, CondaConfig
|
|
4
4
|
from .env.config_file import (
|
|
5
5
|
load_env_from_file,
|
|
6
6
|
discover_env_config,
|
|
@@ -33,6 +33,16 @@ from .decorator import isolated, shutdown_all_processes
|
|
|
33
33
|
# New in-place installation API
|
|
34
34
|
from .install import install, verify_installation
|
|
35
35
|
from .resolver import RuntimeEnv, WheelResolver
|
|
36
|
+
|
|
37
|
+
# Pixi integration (for conda packages)
|
|
38
|
+
from .pixi import (
|
|
39
|
+
ensure_pixi,
|
|
40
|
+
get_pixi_path,
|
|
41
|
+
pixi_install,
|
|
42
|
+
create_pixi_toml,
|
|
43
|
+
get_pixi_python,
|
|
44
|
+
pixi_run,
|
|
45
|
+
)
|
|
36
46
|
from .errors import (
|
|
37
47
|
EnvManagerError,
|
|
38
48
|
ConfigError,
|
|
@@ -73,6 +83,13 @@ __all__ = [
|
|
|
73
83
|
"verify_installation",
|
|
74
84
|
"RuntimeEnv",
|
|
75
85
|
"WheelResolver",
|
|
86
|
+
# Pixi integration (for conda packages)
|
|
87
|
+
"ensure_pixi",
|
|
88
|
+
"get_pixi_path",
|
|
89
|
+
"pixi_install",
|
|
90
|
+
"create_pixi_toml",
|
|
91
|
+
"get_pixi_python",
|
|
92
|
+
"pixi_run",
|
|
76
93
|
# Errors
|
|
77
94
|
"EnvManagerError",
|
|
78
95
|
"ConfigError",
|
|
@@ -93,6 +110,7 @@ __all__ = [
|
|
|
93
110
|
"EnvManagerConfig",
|
|
94
111
|
"LocalConfig",
|
|
95
112
|
"NodeReq",
|
|
113
|
+
"CondaConfig",
|
|
96
114
|
"IsolatedEnvManager",
|
|
97
115
|
# Config file loading
|
|
98
116
|
"load_env_from_file",
|
comfy_env/decorator.py
CHANGED
|
@@ -113,6 +113,31 @@ def _clone_tensor_if_needed(obj: Any, smart_clone: bool = True) -> Any:
|
|
|
113
113
|
return obj
|
|
114
114
|
|
|
115
115
|
|
|
116
|
+
def _find_node_package_dir(source_file: Path) -> Path:
|
|
117
|
+
"""
|
|
118
|
+
Find the node package root directory by searching for comfy-env.toml.
|
|
119
|
+
|
|
120
|
+
Walks up from the source file's directory until it finds a config file,
|
|
121
|
+
or falls back to heuristics if not found.
|
|
122
|
+
"""
|
|
123
|
+
from .env.config_file import CONFIG_FILE_NAMES
|
|
124
|
+
|
|
125
|
+
current = source_file.parent
|
|
126
|
+
|
|
127
|
+
# Walk up the directory tree looking for config file
|
|
128
|
+
while current != current.parent: # Stop at filesystem root
|
|
129
|
+
for config_name in CONFIG_FILE_NAMES:
|
|
130
|
+
if (current / config_name).exists():
|
|
131
|
+
return current
|
|
132
|
+
current = current.parent
|
|
133
|
+
|
|
134
|
+
# Fallback: use old heuristic if no config found
|
|
135
|
+
node_dir = source_file.parent
|
|
136
|
+
if node_dir.name == "nodes":
|
|
137
|
+
return node_dir.parent
|
|
138
|
+
return node_dir
|
|
139
|
+
|
|
140
|
+
|
|
116
141
|
# ---------------------------------------------------------------------------
|
|
117
142
|
# Worker Management
|
|
118
143
|
# ---------------------------------------------------------------------------
|
|
@@ -262,10 +287,7 @@ def isolated(
|
|
|
262
287
|
# Get source file info for sys.path setup
|
|
263
288
|
source_file = Path(inspect.getfile(cls))
|
|
264
289
|
node_dir = source_file.parent
|
|
265
|
-
|
|
266
|
-
node_package_dir = node_dir.parent
|
|
267
|
-
else:
|
|
268
|
-
node_package_dir = node_dir
|
|
290
|
+
node_package_dir = _find_node_package_dir(source_file)
|
|
269
291
|
|
|
270
292
|
# Build sys.path for worker
|
|
271
293
|
sys_path_additions = [str(node_dir)]
|
|
@@ -367,13 +389,10 @@ def isolated(
|
|
|
367
389
|
call_kwargs = {k: _clone_tensor_if_needed(v) for k, v in call_kwargs.items()}
|
|
368
390
|
|
|
369
391
|
# Get module name for import in worker
|
|
392
|
+
# Note: ComfyUI uses full filesystem paths as module names for custom nodes.
|
|
393
|
+
# The worker's _execute_method_call handles this by using file-based imports.
|
|
370
394
|
module_name = cls.__module__
|
|
371
395
|
|
|
372
|
-
# Handle ComfyUI's dynamic import which can set __module__ to a path
|
|
373
|
-
if module_name.startswith('/') or module_name.startswith('\\'):
|
|
374
|
-
# Module name is a filesystem path - use the source file stem instead
|
|
375
|
-
module_name = source_file.stem
|
|
376
|
-
|
|
377
396
|
# Call worker using appropriate method
|
|
378
397
|
if worker_config.python is None:
|
|
379
398
|
# TorchMPWorker - use call_method protocol (avoids pickle issues)
|
comfy_env/env/config.py
CHANGED
|
@@ -28,6 +28,17 @@ class LocalConfig:
|
|
|
28
28
|
requirements: List[str] = field(default_factory=list)
|
|
29
29
|
|
|
30
30
|
|
|
31
|
+
@dataclass
|
|
32
|
+
class CondaConfig:
|
|
33
|
+
"""Configuration for conda packages (installed via pixi).
|
|
34
|
+
|
|
35
|
+
When present, the environment uses pixi as the backend instead of uv.
|
|
36
|
+
Pixi manages both conda and pip packages in a unified environment.
|
|
37
|
+
"""
|
|
38
|
+
channels: List[str] = field(default_factory=list) # conda channels
|
|
39
|
+
packages: List[str] = field(default_factory=list) # conda packages
|
|
40
|
+
|
|
41
|
+
|
|
31
42
|
@dataclass
|
|
32
43
|
class NodeReq:
|
|
33
44
|
"""A node dependency (another ComfyUI node pack)."""
|
|
@@ -139,6 +150,8 @@ class IsolatedEnv:
|
|
|
139
150
|
# Worker configuration
|
|
140
151
|
worker_package: Optional[str] = None # e.g., "worker" -> worker/__main__.py
|
|
141
152
|
worker_script: Optional[str] = None # e.g., "worker.py" -> worker.py
|
|
153
|
+
# Conda configuration (uses pixi backend when present)
|
|
154
|
+
conda: Optional["CondaConfig"] = None
|
|
142
155
|
|
|
143
156
|
def __post_init__(self):
|
|
144
157
|
"""Validate and normalize configuration."""
|
|
@@ -175,3 +188,8 @@ class IsolatedEnv:
|
|
|
175
188
|
if self.env_dir is not None:
|
|
176
189
|
return self.env_dir
|
|
177
190
|
return base_dir / f"_env_{self.name}"
|
|
191
|
+
|
|
192
|
+
@property
|
|
193
|
+
def uses_conda(self) -> bool:
|
|
194
|
+
"""Check if this environment uses conda packages (pixi backend)."""
|
|
195
|
+
return self.conda is not None and bool(self.conda.packages)
|
comfy_env/env/config_file.py
CHANGED
|
@@ -63,7 +63,7 @@ else:
|
|
|
63
63
|
except ImportError:
|
|
64
64
|
tomllib = None # type: ignore
|
|
65
65
|
|
|
66
|
-
from .config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq, SystemConfig, ToolConfig
|
|
66
|
+
from .config import IsolatedEnv, EnvManagerConfig, LocalConfig, NodeReq, SystemConfig, ToolConfig, CondaConfig
|
|
67
67
|
from .cuda_gpu_detection import detect_cuda_version
|
|
68
68
|
|
|
69
69
|
|
|
@@ -515,15 +515,27 @@ def _parse_single_env(name: str, env_data: Dict[str, Any], base_dir: Path) -> Is
|
|
|
515
515
|
if pytorch == "auto":
|
|
516
516
|
pytorch = _get_default_pytorch_version(cuda)
|
|
517
517
|
|
|
518
|
-
# Parse [envname.
|
|
518
|
+
# Parse [envname.conda] - conda packages (uses pixi backend)
|
|
519
|
+
conda_section = env_data.get("conda", {})
|
|
520
|
+
conda_config = None
|
|
521
|
+
if isinstance(conda_section, dict) and ("channels" in conda_section or "packages" in conda_section):
|
|
522
|
+
# This is a conda config with channels/packages
|
|
523
|
+
channels = conda_section.get("channels", [])
|
|
524
|
+
packages = conda_section.get("packages", [])
|
|
525
|
+
if packages:
|
|
526
|
+
conda_config = CondaConfig(channels=channels, packages=packages)
|
|
527
|
+
|
|
528
|
+
# Parse [envname.cuda] - CUDA packages (separate from conda)
|
|
519
529
|
cuda_section = env_data.get("cuda", {})
|
|
520
530
|
no_deps_requirements = []
|
|
521
531
|
if isinstance(cuda_section, dict):
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
532
|
+
# Skip if this looks like a conda section (has channels/packages keys)
|
|
533
|
+
if not ("channels" in cuda_section or "packages" in cuda_section):
|
|
534
|
+
for pkg, ver in cuda_section.items():
|
|
535
|
+
if ver == "*" or ver == "":
|
|
536
|
+
no_deps_requirements.append(pkg)
|
|
537
|
+
else:
|
|
538
|
+
no_deps_requirements.append(f"{pkg}=={ver}")
|
|
527
539
|
|
|
528
540
|
# Parse [envname.packages] - regular packages
|
|
529
541
|
packages_section = env_data.get("packages", {})
|
|
@@ -567,6 +579,7 @@ def _parse_single_env(name: str, env_data: Dict[str, Any], base_dir: Path) -> Is
|
|
|
567
579
|
windows_requirements=windows_reqs,
|
|
568
580
|
linux_requirements=linux_reqs,
|
|
569
581
|
darwin_requirements=darwin_reqs,
|
|
582
|
+
conda=conda_config,
|
|
570
583
|
)
|
|
571
584
|
|
|
572
585
|
|
comfy_env/install.py
CHANGED
|
@@ -25,10 +25,11 @@ from pathlib import Path
|
|
|
25
25
|
from typing import Any, Callable, Dict, List, Optional, Union
|
|
26
26
|
|
|
27
27
|
from .env.config import IsolatedEnv, SystemConfig
|
|
28
|
-
from .env.config_file import
|
|
28
|
+
from .env.config_file import load_config, discover_config
|
|
29
29
|
from .env.manager import IsolatedEnvManager
|
|
30
30
|
from .errors import CUDANotFoundError, DependencyError, InstallError, WheelNotFoundError
|
|
31
|
-
from .
|
|
31
|
+
from .pixi import pixi_install
|
|
32
|
+
from .registry import PACKAGE_REGISTRY, get_cuda_short2
|
|
32
33
|
from .resolver import RuntimeEnv, WheelResolver, parse_wheel_requirement
|
|
33
34
|
|
|
34
35
|
|
|
@@ -198,6 +199,11 @@ def install(
|
|
|
198
199
|
if env_config:
|
|
199
200
|
log(f"Found configuration: {env_config.name}")
|
|
200
201
|
|
|
202
|
+
# Check if environment uses conda packages (pixi backend)
|
|
203
|
+
if env_config and env_config.uses_conda:
|
|
204
|
+
log(f"Environment uses conda packages - using pixi backend")
|
|
205
|
+
return pixi_install(env_config, node_dir, log, dry_run)
|
|
206
|
+
|
|
201
207
|
if mode == "isolated" and env_config:
|
|
202
208
|
return _install_isolated(env_config, node_dir, log, dry_run)
|
|
203
209
|
elif env_config:
|
|
@@ -206,20 +212,6 @@ def install(
|
|
|
206
212
|
return True
|
|
207
213
|
|
|
208
214
|
|
|
209
|
-
def _load_config(
|
|
210
|
-
config: Optional[Union[str, Path]],
|
|
211
|
-
node_dir: Path,
|
|
212
|
-
) -> Optional[IsolatedEnv]:
|
|
213
|
-
"""Load configuration from file or auto-discover."""
|
|
214
|
-
if config is not None:
|
|
215
|
-
config_path = Path(config)
|
|
216
|
-
if not config_path.is_absolute():
|
|
217
|
-
config_path = node_dir / config_path
|
|
218
|
-
return load_env_from_file(config_path, node_dir)
|
|
219
|
-
|
|
220
|
-
return discover_env_config(node_dir)
|
|
221
|
-
|
|
222
|
-
|
|
223
215
|
def _load_full_config(config: Optional[Union[str, Path]], node_dir: Path):
|
|
224
216
|
"""Load full EnvManagerConfig (includes tools)."""
|
|
225
217
|
from .env.config import EnvManagerConfig
|
comfy_env/pixi.py
ADDED
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pixi integration for comfy-env.
|
|
3
|
+
|
|
4
|
+
Pixi is a fast package manager that supports both conda and pip packages.
|
|
5
|
+
When an environment has conda packages defined, we use pixi as the backend
|
|
6
|
+
instead of uv.
|
|
7
|
+
|
|
8
|
+
See: https://pixi.sh/
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import os
|
|
12
|
+
import platform
|
|
13
|
+
import shutil
|
|
14
|
+
import stat
|
|
15
|
+
import subprocess
|
|
16
|
+
import sys
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Callable, List, Optional
|
|
19
|
+
|
|
20
|
+
from .env.config import IsolatedEnv, CondaConfig
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Pixi download URLs by platform
|
|
24
|
+
PIXI_URLS = {
|
|
25
|
+
("Linux", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-unknown-linux-musl",
|
|
26
|
+
("Linux", "aarch64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-unknown-linux-musl",
|
|
27
|
+
("Darwin", "x86_64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-apple-darwin",
|
|
28
|
+
("Darwin", "arm64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-aarch64-apple-darwin",
|
|
29
|
+
("Windows", "AMD64"): "https://github.com/prefix-dev/pixi/releases/latest/download/pixi-x86_64-pc-windows-msvc.exe",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_pixi_path() -> Optional[Path]:
|
|
34
|
+
"""
|
|
35
|
+
Find the pixi executable.
|
|
36
|
+
|
|
37
|
+
Checks:
|
|
38
|
+
1. System PATH
|
|
39
|
+
2. ~/.pixi/bin/pixi
|
|
40
|
+
3. ~/.local/bin/pixi
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Path to pixi executable, or None if not found.
|
|
44
|
+
"""
|
|
45
|
+
# Check system PATH
|
|
46
|
+
pixi_cmd = shutil.which("pixi")
|
|
47
|
+
if pixi_cmd:
|
|
48
|
+
return Path(pixi_cmd)
|
|
49
|
+
|
|
50
|
+
# Check common install locations
|
|
51
|
+
home = Path.home()
|
|
52
|
+
candidates = [
|
|
53
|
+
home / ".pixi" / "bin" / "pixi",
|
|
54
|
+
home / ".local" / "bin" / "pixi",
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
# Add .exe on Windows
|
|
58
|
+
if sys.platform == "win32":
|
|
59
|
+
candidates = [p.with_suffix(".exe") for p in candidates]
|
|
60
|
+
|
|
61
|
+
for candidate in candidates:
|
|
62
|
+
if candidate.exists():
|
|
63
|
+
return candidate
|
|
64
|
+
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def ensure_pixi(
|
|
69
|
+
install_dir: Optional[Path] = None,
|
|
70
|
+
log: Callable[[str], None] = print,
|
|
71
|
+
) -> Path:
|
|
72
|
+
"""
|
|
73
|
+
Ensure pixi is installed, downloading if necessary.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
install_dir: Directory to install pixi to. Defaults to ~/.local/bin/
|
|
77
|
+
log: Logging callback.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Path to pixi executable.
|
|
81
|
+
|
|
82
|
+
Raises:
|
|
83
|
+
RuntimeError: If pixi cannot be installed.
|
|
84
|
+
"""
|
|
85
|
+
# Check if already installed
|
|
86
|
+
existing = get_pixi_path()
|
|
87
|
+
if existing:
|
|
88
|
+
log(f"Found pixi at: {existing}")
|
|
89
|
+
return existing
|
|
90
|
+
|
|
91
|
+
log("Pixi not found, downloading...")
|
|
92
|
+
|
|
93
|
+
# Determine install location
|
|
94
|
+
if install_dir is None:
|
|
95
|
+
install_dir = Path.home() / ".local" / "bin"
|
|
96
|
+
install_dir.mkdir(parents=True, exist_ok=True)
|
|
97
|
+
|
|
98
|
+
# Determine download URL
|
|
99
|
+
system = platform.system()
|
|
100
|
+
machine = platform.machine()
|
|
101
|
+
|
|
102
|
+
# Normalize machine name
|
|
103
|
+
if machine in ("x86_64", "AMD64"):
|
|
104
|
+
machine = "x86_64" if system != "Windows" else "AMD64"
|
|
105
|
+
elif machine in ("arm64", "aarch64"):
|
|
106
|
+
machine = "arm64" if system == "Darwin" else "aarch64"
|
|
107
|
+
|
|
108
|
+
url_key = (system, machine)
|
|
109
|
+
if url_key not in PIXI_URLS:
|
|
110
|
+
raise RuntimeError(
|
|
111
|
+
f"No pixi download available for {system}/{machine}. "
|
|
112
|
+
f"Available: {list(PIXI_URLS.keys())}"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
url = PIXI_URLS[url_key]
|
|
116
|
+
pixi_path = install_dir / ("pixi.exe" if system == "Windows" else "pixi")
|
|
117
|
+
|
|
118
|
+
log(f"Downloading pixi from: {url}")
|
|
119
|
+
|
|
120
|
+
# Download using curl or urllib
|
|
121
|
+
try:
|
|
122
|
+
import urllib.request
|
|
123
|
+
urllib.request.urlretrieve(url, pixi_path)
|
|
124
|
+
except Exception as e:
|
|
125
|
+
# Try curl as fallback
|
|
126
|
+
result = subprocess.run(
|
|
127
|
+
["curl", "-fsSL", "-o", str(pixi_path), url],
|
|
128
|
+
capture_output=True,
|
|
129
|
+
text=True,
|
|
130
|
+
)
|
|
131
|
+
if result.returncode != 0:
|
|
132
|
+
raise RuntimeError(f"Failed to download pixi: {result.stderr}") from e
|
|
133
|
+
|
|
134
|
+
# Make executable on Unix
|
|
135
|
+
if system != "Windows":
|
|
136
|
+
pixi_path.chmod(pixi_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
137
|
+
|
|
138
|
+
# Verify installation
|
|
139
|
+
result = subprocess.run([str(pixi_path), "--version"], capture_output=True, text=True)
|
|
140
|
+
if result.returncode != 0:
|
|
141
|
+
raise RuntimeError(f"Pixi installation failed: {result.stderr}")
|
|
142
|
+
|
|
143
|
+
log(f"Installed pixi {result.stdout.strip()} to: {pixi_path}")
|
|
144
|
+
return pixi_path
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def create_pixi_toml(
|
|
148
|
+
env_config: IsolatedEnv,
|
|
149
|
+
node_dir: Path,
|
|
150
|
+
log: Callable[[str], None] = print,
|
|
151
|
+
) -> Path:
|
|
152
|
+
"""
|
|
153
|
+
Generate a pixi.toml file from the environment configuration.
|
|
154
|
+
|
|
155
|
+
The generated pixi.toml includes:
|
|
156
|
+
- Project metadata
|
|
157
|
+
- Conda channels
|
|
158
|
+
- Conda dependencies
|
|
159
|
+
- PyPI dependencies (from requirements + no_deps_requirements)
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
env_config: The isolated environment configuration.
|
|
163
|
+
node_dir: Directory to write pixi.toml to.
|
|
164
|
+
log: Logging callback.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
Path to the generated pixi.toml file.
|
|
168
|
+
"""
|
|
169
|
+
if not env_config.conda:
|
|
170
|
+
raise ValueError("Environment has no conda configuration")
|
|
171
|
+
|
|
172
|
+
conda = env_config.conda
|
|
173
|
+
pixi_toml_path = node_dir / "pixi.toml"
|
|
174
|
+
|
|
175
|
+
# Build pixi.toml content
|
|
176
|
+
lines = []
|
|
177
|
+
|
|
178
|
+
# Project section
|
|
179
|
+
lines.append("[project]")
|
|
180
|
+
lines.append(f'name = "{env_config.name}"')
|
|
181
|
+
lines.append('version = "0.1.0"')
|
|
182
|
+
|
|
183
|
+
# Channels
|
|
184
|
+
channels = conda.channels or ["conda-forge"]
|
|
185
|
+
channels_str = ", ".join(f'"{ch}"' for ch in channels)
|
|
186
|
+
lines.append(f"channels = [{channels_str}]")
|
|
187
|
+
|
|
188
|
+
# Platforms
|
|
189
|
+
if sys.platform == "linux":
|
|
190
|
+
lines.append('platforms = ["linux-64"]')
|
|
191
|
+
elif sys.platform == "darwin":
|
|
192
|
+
if platform.machine() == "arm64":
|
|
193
|
+
lines.append('platforms = ["osx-arm64"]')
|
|
194
|
+
else:
|
|
195
|
+
lines.append('platforms = ["osx-64"]')
|
|
196
|
+
elif sys.platform == "win32":
|
|
197
|
+
lines.append('platforms = ["win-64"]')
|
|
198
|
+
|
|
199
|
+
lines.append("")
|
|
200
|
+
|
|
201
|
+
# Dependencies section (conda packages)
|
|
202
|
+
lines.append("[dependencies]")
|
|
203
|
+
lines.append(f'python = "{env_config.python}.*"')
|
|
204
|
+
|
|
205
|
+
for pkg in conda.packages:
|
|
206
|
+
# Parse package spec (name=version or name>=version or just name)
|
|
207
|
+
if "=" in pkg and not pkg.startswith("="):
|
|
208
|
+
# Has version spec
|
|
209
|
+
if ">=" in pkg:
|
|
210
|
+
name, version = pkg.split(">=", 1)
|
|
211
|
+
lines.append(f'{name} = ">={version}"')
|
|
212
|
+
elif "==" in pkg:
|
|
213
|
+
name, version = pkg.split("==", 1)
|
|
214
|
+
lines.append(f'{name} = "=={version}"')
|
|
215
|
+
else:
|
|
216
|
+
# Single = means exact version in conda
|
|
217
|
+
name, version = pkg.split("=", 1)
|
|
218
|
+
lines.append(f'{name} = "=={version}"')
|
|
219
|
+
else:
|
|
220
|
+
# No version, use any
|
|
221
|
+
lines.append(f'{pkg} = "*"')
|
|
222
|
+
|
|
223
|
+
lines.append("")
|
|
224
|
+
|
|
225
|
+
# PyPI dependencies section
|
|
226
|
+
pypi_deps = []
|
|
227
|
+
|
|
228
|
+
# Add regular requirements
|
|
229
|
+
if env_config.requirements:
|
|
230
|
+
pypi_deps.extend(env_config.requirements)
|
|
231
|
+
|
|
232
|
+
# Add CUDA packages (no_deps_requirements)
|
|
233
|
+
if env_config.no_deps_requirements:
|
|
234
|
+
pypi_deps.extend(env_config.no_deps_requirements)
|
|
235
|
+
|
|
236
|
+
# Add platform-specific requirements
|
|
237
|
+
if sys.platform == "linux" and env_config.linux_requirements:
|
|
238
|
+
pypi_deps.extend(env_config.linux_requirements)
|
|
239
|
+
elif sys.platform == "darwin" and env_config.darwin_requirements:
|
|
240
|
+
pypi_deps.extend(env_config.darwin_requirements)
|
|
241
|
+
elif sys.platform == "win32" and env_config.windows_requirements:
|
|
242
|
+
pypi_deps.extend(env_config.windows_requirements)
|
|
243
|
+
|
|
244
|
+
if pypi_deps:
|
|
245
|
+
lines.append("[pypi-dependencies]")
|
|
246
|
+
for dep in pypi_deps:
|
|
247
|
+
# Parse pip requirement format to pixi format
|
|
248
|
+
dep_clean = dep.strip()
|
|
249
|
+
if ">=" in dep_clean:
|
|
250
|
+
name, version = dep_clean.split(">=", 1)
|
|
251
|
+
# Handle complex version specs like ">=1.0,<2.0"
|
|
252
|
+
name = name.strip()
|
|
253
|
+
version = version.strip()
|
|
254
|
+
lines.append(f'{name} = ">={version}"')
|
|
255
|
+
elif "==" in dep_clean:
|
|
256
|
+
name, version = dep_clean.split("==", 1)
|
|
257
|
+
lines.append(f'{name.strip()} = "=={version.strip()}"')
|
|
258
|
+
elif ">" in dep_clean:
|
|
259
|
+
name, version = dep_clean.split(">", 1)
|
|
260
|
+
lines.append(f'{name.strip()} = ">{version.strip()}"')
|
|
261
|
+
elif "<" in dep_clean:
|
|
262
|
+
name, version = dep_clean.split("<", 1)
|
|
263
|
+
lines.append(f'{name.strip()} = "<{version.strip()}"')
|
|
264
|
+
else:
|
|
265
|
+
# No version spec
|
|
266
|
+
lines.append(f'{dep_clean} = "*"')
|
|
267
|
+
|
|
268
|
+
content = "\n".join(lines) + "\n"
|
|
269
|
+
|
|
270
|
+
# Write the file
|
|
271
|
+
pixi_toml_path.write_text(content)
|
|
272
|
+
log(f"Generated pixi.toml at: {pixi_toml_path}")
|
|
273
|
+
|
|
274
|
+
return pixi_toml_path
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def pixi_install(
|
|
278
|
+
env_config: IsolatedEnv,
|
|
279
|
+
node_dir: Path,
|
|
280
|
+
log: Callable[[str], None] = print,
|
|
281
|
+
dry_run: bool = False,
|
|
282
|
+
) -> bool:
|
|
283
|
+
"""
|
|
284
|
+
Install conda and pip packages using pixi.
|
|
285
|
+
|
|
286
|
+
This is the main entry point for pixi-based installation. It:
|
|
287
|
+
1. Ensures pixi is installed
|
|
288
|
+
2. Generates pixi.toml from the config
|
|
289
|
+
3. Runs `pixi install` to install all dependencies
|
|
290
|
+
|
|
291
|
+
Args:
|
|
292
|
+
env_config: The isolated environment configuration.
|
|
293
|
+
node_dir: Directory containing the node (where pixi.toml will be created).
|
|
294
|
+
log: Logging callback.
|
|
295
|
+
dry_run: If True, only show what would be done.
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
True if installation succeeded.
|
|
299
|
+
|
|
300
|
+
Raises:
|
|
301
|
+
RuntimeError: If installation fails.
|
|
302
|
+
"""
|
|
303
|
+
log(f"Installing {env_config.name} with pixi backend...")
|
|
304
|
+
|
|
305
|
+
if dry_run:
|
|
306
|
+
log("Dry run - would:")
|
|
307
|
+
log(f" - Ensure pixi is installed")
|
|
308
|
+
log(f" - Generate pixi.toml in {node_dir}")
|
|
309
|
+
if env_config.conda:
|
|
310
|
+
log(f" - Install {len(env_config.conda.packages)} conda packages")
|
|
311
|
+
if env_config.requirements:
|
|
312
|
+
log(f" - Install {len(env_config.requirements)} pip packages")
|
|
313
|
+
return True
|
|
314
|
+
|
|
315
|
+
# Ensure pixi is installed
|
|
316
|
+
pixi_path = ensure_pixi(log=log)
|
|
317
|
+
|
|
318
|
+
# Generate pixi.toml
|
|
319
|
+
pixi_toml = create_pixi_toml(env_config, node_dir, log)
|
|
320
|
+
|
|
321
|
+
# Run pixi install
|
|
322
|
+
log("Running pixi install...")
|
|
323
|
+
result = subprocess.run(
|
|
324
|
+
[str(pixi_path), "install"],
|
|
325
|
+
cwd=node_dir,
|
|
326
|
+
capture_output=True,
|
|
327
|
+
text=True,
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
if result.returncode != 0:
|
|
331
|
+
log(f"pixi install failed:")
|
|
332
|
+
log(result.stderr)
|
|
333
|
+
raise RuntimeError(f"pixi install failed: {result.stderr}")
|
|
334
|
+
|
|
335
|
+
if result.stdout:
|
|
336
|
+
# Log output, but filter for key info
|
|
337
|
+
for line in result.stdout.strip().split("\n"):
|
|
338
|
+
if line.strip():
|
|
339
|
+
log(f" {line}")
|
|
340
|
+
|
|
341
|
+
log("pixi install completed successfully!")
|
|
342
|
+
return True
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def get_pixi_python(node_dir: Path) -> Optional[Path]:
|
|
346
|
+
"""
|
|
347
|
+
Get the path to the Python interpreter in the pixi environment.
|
|
348
|
+
|
|
349
|
+
Args:
|
|
350
|
+
node_dir: Directory containing pixi.toml.
|
|
351
|
+
|
|
352
|
+
Returns:
|
|
353
|
+
Path to Python executable in the pixi env, or None if not found.
|
|
354
|
+
"""
|
|
355
|
+
# Pixi creates .pixi/envs/default/ in the project directory
|
|
356
|
+
env_dir = node_dir / ".pixi" / "envs" / "default"
|
|
357
|
+
|
|
358
|
+
if sys.platform == "win32":
|
|
359
|
+
python_path = env_dir / "python.exe"
|
|
360
|
+
else:
|
|
361
|
+
python_path = env_dir / "bin" / "python"
|
|
362
|
+
|
|
363
|
+
if python_path.exists():
|
|
364
|
+
return python_path
|
|
365
|
+
|
|
366
|
+
return None
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def pixi_run(
|
|
370
|
+
command: List[str],
|
|
371
|
+
node_dir: Path,
|
|
372
|
+
log: Callable[[str], None] = print,
|
|
373
|
+
) -> subprocess.CompletedProcess:
|
|
374
|
+
"""
|
|
375
|
+
Run a command in the pixi environment.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
command: Command and arguments to run.
|
|
379
|
+
node_dir: Directory containing pixi.toml.
|
|
380
|
+
log: Logging callback.
|
|
381
|
+
|
|
382
|
+
Returns:
|
|
383
|
+
CompletedProcess result.
|
|
384
|
+
"""
|
|
385
|
+
pixi_path = get_pixi_path()
|
|
386
|
+
if not pixi_path:
|
|
387
|
+
raise RuntimeError("Pixi not found")
|
|
388
|
+
|
|
389
|
+
full_cmd = [str(pixi_path), "run"] + command
|
|
390
|
+
log(f"Running: pixi run {' '.join(command)}")
|
|
391
|
+
|
|
392
|
+
return subprocess.run(
|
|
393
|
+
full_cmd,
|
|
394
|
+
cwd=node_dir,
|
|
395
|
+
capture_output=True,
|
|
396
|
+
text=True,
|
|
397
|
+
)
|
comfy_env/workers/torch_mp.py
CHANGED
|
@@ -106,6 +106,190 @@ def _worker_loop(queue_in, queue_out, sys_path_additions=None):
|
|
|
106
106
|
break
|
|
107
107
|
|
|
108
108
|
|
|
109
|
+
class PathBasedModuleFinder:
|
|
110
|
+
"""
|
|
111
|
+
Meta path finder that handles ComfyUI's path-based module names.
|
|
112
|
+
|
|
113
|
+
ComfyUI uses full filesystem paths as module names for custom nodes.
|
|
114
|
+
This finder intercepts imports of such modules and loads them from disk.
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
def find_spec(self, fullname, path, target=None):
|
|
118
|
+
import importlib.util
|
|
119
|
+
import os
|
|
120
|
+
|
|
121
|
+
# Only handle path-based module names (starting with /)
|
|
122
|
+
if not fullname.startswith('/'):
|
|
123
|
+
return None
|
|
124
|
+
|
|
125
|
+
# Parse the module name to find base path and submodule parts
|
|
126
|
+
parts = fullname.split('.')
|
|
127
|
+
base_path = parts[0]
|
|
128
|
+
submodule_parts = parts[1:] if len(parts) > 1 else []
|
|
129
|
+
|
|
130
|
+
# Walk through parts to find where path ends and module begins
|
|
131
|
+
for i, part in enumerate(submodule_parts):
|
|
132
|
+
test_path = os.path.join(base_path, part)
|
|
133
|
+
if os.path.exists(test_path):
|
|
134
|
+
base_path = test_path
|
|
135
|
+
else:
|
|
136
|
+
# Remaining parts are module names
|
|
137
|
+
submodule_parts = submodule_parts[i:]
|
|
138
|
+
break
|
|
139
|
+
else:
|
|
140
|
+
# All parts were path components
|
|
141
|
+
submodule_parts = []
|
|
142
|
+
|
|
143
|
+
# Determine the file to load
|
|
144
|
+
if submodule_parts:
|
|
145
|
+
# We're importing a submodule
|
|
146
|
+
current_path = base_path
|
|
147
|
+
for part in submodule_parts[:-1]:
|
|
148
|
+
current_path = os.path.join(current_path, part)
|
|
149
|
+
|
|
150
|
+
submod = submodule_parts[-1]
|
|
151
|
+
submod_file = os.path.join(current_path, submod + '.py')
|
|
152
|
+
submod_pkg = os.path.join(current_path, submod, '__init__.py')
|
|
153
|
+
|
|
154
|
+
if os.path.exists(submod_file):
|
|
155
|
+
return importlib.util.spec_from_file_location(fullname, submod_file)
|
|
156
|
+
elif os.path.exists(submod_pkg):
|
|
157
|
+
return importlib.util.spec_from_file_location(
|
|
158
|
+
fullname, submod_pkg,
|
|
159
|
+
submodule_search_locations=[os.path.join(current_path, submod)]
|
|
160
|
+
)
|
|
161
|
+
else:
|
|
162
|
+
# Top-level path-based module
|
|
163
|
+
if os.path.isdir(base_path):
|
|
164
|
+
init_path = os.path.join(base_path, "__init__.py")
|
|
165
|
+
if os.path.exists(init_path):
|
|
166
|
+
return importlib.util.spec_from_file_location(
|
|
167
|
+
fullname, init_path,
|
|
168
|
+
submodule_search_locations=[base_path]
|
|
169
|
+
)
|
|
170
|
+
elif os.path.isfile(base_path):
|
|
171
|
+
return importlib.util.spec_from_file_location(fullname, base_path)
|
|
172
|
+
|
|
173
|
+
return None
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
# Global flag to track if we've installed the finder
|
|
177
|
+
_path_finder_installed = False
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _ensure_path_finder_installed():
|
|
181
|
+
"""Install the PathBasedModuleFinder if not already installed."""
|
|
182
|
+
import sys
|
|
183
|
+
global _path_finder_installed
|
|
184
|
+
if not _path_finder_installed:
|
|
185
|
+
sys.meta_path.insert(0, PathBasedModuleFinder())
|
|
186
|
+
_path_finder_installed = True
|
|
187
|
+
logger.debug("[comfy_env] Installed PathBasedModuleFinder for path-based module names")
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def _load_path_based_module(module_name: str):
|
|
191
|
+
"""
|
|
192
|
+
Load a module that has a filesystem path as its name.
|
|
193
|
+
|
|
194
|
+
ComfyUI uses full filesystem paths as module names for custom nodes.
|
|
195
|
+
This function handles that case by using file-based imports.
|
|
196
|
+
"""
|
|
197
|
+
import importlib.util
|
|
198
|
+
import os
|
|
199
|
+
import sys
|
|
200
|
+
|
|
201
|
+
# Check if it's already in sys.modules
|
|
202
|
+
if module_name in sys.modules:
|
|
203
|
+
return sys.modules[module_name]
|
|
204
|
+
|
|
205
|
+
# Check if module_name contains submodule parts (e.g., "/path/to/pkg.submod.subsubmod")
|
|
206
|
+
# In this case, we need to load the parent packages first
|
|
207
|
+
if '.' in module_name:
|
|
208
|
+
parts = module_name.split('.')
|
|
209
|
+
# Find where the path ends and module parts begin
|
|
210
|
+
# The path part won't exist as a directory when combined with module parts
|
|
211
|
+
base_path = parts[0]
|
|
212
|
+
submodule_parts = []
|
|
213
|
+
|
|
214
|
+
for i, part in enumerate(parts[1:], 1):
|
|
215
|
+
test_path = os.path.join(base_path, part)
|
|
216
|
+
if os.path.exists(test_path):
|
|
217
|
+
base_path = test_path
|
|
218
|
+
else:
|
|
219
|
+
# This and remaining parts are module names, not path components
|
|
220
|
+
submodule_parts = parts[i:]
|
|
221
|
+
break
|
|
222
|
+
|
|
223
|
+
if submodule_parts:
|
|
224
|
+
# Load parent package first
|
|
225
|
+
parent_module = _load_path_based_module(base_path)
|
|
226
|
+
|
|
227
|
+
# Now load submodules
|
|
228
|
+
current_module = parent_module
|
|
229
|
+
current_name = base_path
|
|
230
|
+
for submod in submodule_parts:
|
|
231
|
+
current_name = f"{current_name}.{submod}"
|
|
232
|
+
if current_name in sys.modules:
|
|
233
|
+
current_module = sys.modules[current_name]
|
|
234
|
+
else:
|
|
235
|
+
# Try to import as attribute or load from file
|
|
236
|
+
if hasattr(current_module, submod):
|
|
237
|
+
current_module = getattr(current_module, submod)
|
|
238
|
+
else:
|
|
239
|
+
# Try to load the submodule file
|
|
240
|
+
if hasattr(current_module, '__path__'):
|
|
241
|
+
for parent_path in current_module.__path__:
|
|
242
|
+
submod_file = os.path.join(parent_path, submod + '.py')
|
|
243
|
+
submod_pkg = os.path.join(parent_path, submod, '__init__.py')
|
|
244
|
+
if os.path.exists(submod_file):
|
|
245
|
+
spec = importlib.util.spec_from_file_location(current_name, submod_file)
|
|
246
|
+
current_module = importlib.util.module_from_spec(spec)
|
|
247
|
+
current_module.__package__ = f"{base_path}.{'.'.join(submodule_parts[:-1])}" if len(submodule_parts) > 1 else base_path
|
|
248
|
+
sys.modules[current_name] = current_module
|
|
249
|
+
spec.loader.exec_module(current_module)
|
|
250
|
+
break
|
|
251
|
+
elif os.path.exists(submod_pkg):
|
|
252
|
+
spec = importlib.util.spec_from_file_location(current_name, submod_pkg,
|
|
253
|
+
submodule_search_locations=[os.path.dirname(submod_pkg)])
|
|
254
|
+
current_module = importlib.util.module_from_spec(spec)
|
|
255
|
+
sys.modules[current_name] = current_module
|
|
256
|
+
spec.loader.exec_module(current_module)
|
|
257
|
+
break
|
|
258
|
+
else:
|
|
259
|
+
raise ModuleNotFoundError(f"Cannot find submodule {submod} in {current_name}")
|
|
260
|
+
return current_module
|
|
261
|
+
|
|
262
|
+
# Simple path-based module (no submodule parts)
|
|
263
|
+
if os.path.isdir(module_name):
|
|
264
|
+
init_path = os.path.join(module_name, "__init__.py")
|
|
265
|
+
submodule_search_locations = [module_name]
|
|
266
|
+
else:
|
|
267
|
+
init_path = module_name
|
|
268
|
+
submodule_search_locations = None
|
|
269
|
+
|
|
270
|
+
if not os.path.exists(init_path):
|
|
271
|
+
raise ModuleNotFoundError(f"Cannot find module at path: {module_name}")
|
|
272
|
+
|
|
273
|
+
spec = importlib.util.spec_from_file_location(
|
|
274
|
+
module_name,
|
|
275
|
+
init_path,
|
|
276
|
+
submodule_search_locations=submodule_search_locations
|
|
277
|
+
)
|
|
278
|
+
module = importlib.util.module_from_spec(spec)
|
|
279
|
+
|
|
280
|
+
# Set up package attributes for relative imports
|
|
281
|
+
if os.path.isdir(module_name):
|
|
282
|
+
module.__path__ = [module_name]
|
|
283
|
+
module.__package__ = module_name
|
|
284
|
+
else:
|
|
285
|
+
module.__package__ = module_name.rsplit('.', 1)[0] if '.' in module_name else ''
|
|
286
|
+
|
|
287
|
+
sys.modules[module_name] = module
|
|
288
|
+
spec.loader.exec_module(module)
|
|
289
|
+
|
|
290
|
+
return module
|
|
291
|
+
|
|
292
|
+
|
|
109
293
|
def _execute_method_call(module_name: str, class_name: str, method_name: str,
|
|
110
294
|
self_state: dict, kwargs: dict) -> Any:
|
|
111
295
|
"""
|
|
@@ -114,9 +298,28 @@ def _execute_method_call(module_name: str, class_name: str, method_name: str,
|
|
|
114
298
|
This function imports the class fresh and calls the original (un-decorated) method.
|
|
115
299
|
"""
|
|
116
300
|
import importlib
|
|
301
|
+
import os
|
|
302
|
+
import sys
|
|
117
303
|
|
|
118
304
|
# Import the module
|
|
119
|
-
|
|
305
|
+
logger.debug(f"Attempting to import module_name={module_name}")
|
|
306
|
+
|
|
307
|
+
# Check if module_name is a filesystem path (ComfyUI uses paths as module names)
|
|
308
|
+
# This happens because ComfyUI's load_custom_node uses the full path as sys_module_name
|
|
309
|
+
if module_name.startswith('/') or (os.sep in module_name and not module_name.startswith('.')):
|
|
310
|
+
# Check if the base path exists to confirm it's a path-based module
|
|
311
|
+
base_path = module_name.split('.')[0] if '.' in module_name else module_name
|
|
312
|
+
if os.path.exists(base_path):
|
|
313
|
+
logger.debug(f"Detected path-based module name, using file-based import")
|
|
314
|
+
# Install the meta path finder to handle relative imports within the package
|
|
315
|
+
_ensure_path_finder_installed()
|
|
316
|
+
module = _load_path_based_module(module_name)
|
|
317
|
+
else:
|
|
318
|
+
# Doesn't look like a valid path, try standard import
|
|
319
|
+
module = importlib.import_module(module_name)
|
|
320
|
+
else:
|
|
321
|
+
# Standard module name - use importlib.import_module
|
|
322
|
+
module = importlib.import_module(module_name)
|
|
120
323
|
cls = getattr(module, class_name)
|
|
121
324
|
|
|
122
325
|
# Create instance with proper __slots__ handling
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: comfy-env
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.22
|
|
4
4
|
Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
|
|
5
5
|
Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
|
|
6
6
|
Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
|
|
@@ -126,7 +126,7 @@ comfy-env resolve nvdiffrast==0.4.0
|
|
|
126
126
|
comfy-env doctor
|
|
127
127
|
```
|
|
128
128
|
|
|
129
|
-
##
|
|
129
|
+
## Configurations
|
|
130
130
|
|
|
131
131
|
### comfy-env.toml
|
|
132
132
|
|
|
@@ -1,13 +1,14 @@
|
|
|
1
|
-
comfy_env/__init__.py,sha256=
|
|
1
|
+
comfy_env/__init__.py,sha256=u2uTyoysPQyNMcRp5U4VTMJF11FBW6Goqu0DN-BdUuY,3678
|
|
2
2
|
comfy_env/cli.py,sha256=X-GCQMP0mtMcE3ZgkT-VLQ4Gq3UUvcb_Ux_NClEFhgI,15975
|
|
3
|
-
comfy_env/decorator.py,sha256=
|
|
3
|
+
comfy_env/decorator.py,sha256=6JCKwLHaZtOLVDexs_gh_-NtS2ZK0V7nGCPqkyeYEAA,16688
|
|
4
4
|
comfy_env/errors.py,sha256=8hN8NDlo8oBUdapc-eT3ZluigI5VBzfqsSBvQdfWlz4,9943
|
|
5
|
-
comfy_env/install.py,sha256=
|
|
5
|
+
comfy_env/install.py,sha256=CA5O0-ghkTdV67fVyKJy6wt1vL35inBeqWtA52udjHI,24301
|
|
6
|
+
comfy_env/pixi.py,sha256=rzKdwI8ZQhK9Hozhu0-c00YqFOBTKmACHVH0kRcPWqQ,12294
|
|
6
7
|
comfy_env/registry.py,sha256=uFCtGmWYvwGCqObXgzmArX7o5JsFNsHXxayofk3m6no,2569
|
|
7
8
|
comfy_env/resolver.py,sha256=l-AnmCE1puG6CvdpDB-KrsfG_cn_3uO2DryYizUnG_4,12474
|
|
8
9
|
comfy_env/env/__init__.py,sha256=imQdoQEQvrRT-QDtyNpFlkVbm2fBzgACdpQwRPd09fI,1157
|
|
9
|
-
comfy_env/env/config.py,sha256=
|
|
10
|
-
comfy_env/env/config_file.py,sha256
|
|
10
|
+
comfy_env/env/config.py,sha256=5rK7r2uRItMXJnKAn8DmVQoadLo2njHTuaxrWybhppU,7469
|
|
11
|
+
comfy_env/env/config_file.py,sha256=1UdcL1TwKceGaSunCnsHiuPyxpCSq1JpelScUEsCBn8,23669
|
|
11
12
|
comfy_env/env/cuda_gpu_detection.py,sha256=YLuXUdWg6FeKdNyLlQAHPlveg4rTenXJ2VbeAaEi9QE,9755
|
|
12
13
|
comfy_env/env/manager.py,sha256=bbV1MpURNGuBJ1sSWg_2oSU0J-dW-FhBCuHHHQxgrSM,24785
|
|
13
14
|
comfy_env/env/security.py,sha256=dNSitAnfBNVdvxgBBntYw33AJaCs_S1MHb7KJhAVYzM,8171
|
|
@@ -29,11 +30,11 @@ comfy_env/workers/__init__.py,sha256=IKZwOvrWOGqBLDUIFAalg4CdqzJ_YnAdxo2Ha7gZTJ0
|
|
|
29
30
|
comfy_env/workers/base.py,sha256=ZILYXlvGCWuCZXmjKqfG8VeD19ihdYaASdlbasl2BMo,2312
|
|
30
31
|
comfy_env/workers/pool.py,sha256=MtjeOWfvHSCockq8j1gfnxIl-t01GSB79T5N4YB82Lg,6956
|
|
31
32
|
comfy_env/workers/tensor_utils.py,sha256=TCuOAjJymrSbkgfyvcKtQ_KbVWTqSwP9VH_bCaFLLq8,6409
|
|
32
|
-
comfy_env/workers/torch_mp.py,sha256=
|
|
33
|
+
comfy_env/workers/torch_mp.py,sha256=4YSNPn7hALrvMVbkO4RkTeFTcc0lhfLMk5QTWjY4PHw,22134
|
|
33
34
|
comfy_env/workers/venv.py,sha256=_ekHfZPqBIPY08DjqiXm6cTBQH4DrbxRWR3AAv3mit8,31589
|
|
34
35
|
comfy_env/wheel_sources.yml,sha256=nSZ8XB_I5JXQGB7AgC6lHs_IXMd9Kcno10artNL8BKw,7775
|
|
35
|
-
comfy_env-0.0.
|
|
36
|
-
comfy_env-0.0.
|
|
37
|
-
comfy_env-0.0.
|
|
38
|
-
comfy_env-0.0.
|
|
39
|
-
comfy_env-0.0.
|
|
36
|
+
comfy_env-0.0.22.dist-info/METADATA,sha256=1T-aoJcHUO5KXABxqmsqD2_rKvSqy7IZdI_ejMoHw4Y,5400
|
|
37
|
+
comfy_env-0.0.22.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
38
|
+
comfy_env-0.0.22.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
|
|
39
|
+
comfy_env-0.0.22.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
|
|
40
|
+
comfy_env-0.0.22.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|