comfy-env 0.1.15__py3-none-any.whl → 0.1.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfy_env/__init__.py +116 -41
- comfy_env/cli.py +89 -317
- comfy_env/config/__init__.py +18 -6
- comfy_env/config/parser.py +22 -76
- comfy_env/config/types.py +37 -0
- comfy_env/detection/__init__.py +77 -0
- comfy_env/detection/cuda.py +61 -0
- comfy_env/detection/gpu.py +230 -0
- comfy_env/detection/platform.py +70 -0
- comfy_env/detection/runtime.py +103 -0
- comfy_env/environment/__init__.py +53 -0
- comfy_env/environment/cache.py +141 -0
- comfy_env/environment/libomp.py +41 -0
- comfy_env/environment/paths.py +38 -0
- comfy_env/environment/setup.py +88 -0
- comfy_env/install.py +127 -329
- comfy_env/isolation/__init__.py +32 -2
- comfy_env/isolation/tensor_utils.py +83 -0
- comfy_env/isolation/workers/__init__.py +16 -0
- comfy_env/{workers → isolation/workers}/mp.py +1 -1
- comfy_env/{workers → isolation/workers}/subprocess.py +1 -1
- comfy_env/isolation/wrap.py +128 -509
- comfy_env/packages/__init__.py +60 -0
- comfy_env/packages/apt.py +36 -0
- comfy_env/packages/cuda_wheels.py +97 -0
- comfy_env/packages/node_dependencies.py +77 -0
- comfy_env/packages/pixi.py +85 -0
- comfy_env/packages/toml_generator.py +88 -0
- comfy_env-0.1.16.dist-info/METADATA +279 -0
- comfy_env-0.1.16.dist-info/RECORD +36 -0
- comfy_env/cache.py +0 -203
- comfy_env/nodes.py +0 -187
- comfy_env/pixi/__init__.py +0 -48
- comfy_env/pixi/core.py +0 -587
- comfy_env/pixi/cuda_detection.py +0 -303
- comfy_env/pixi/platform/__init__.py +0 -21
- comfy_env/pixi/platform/base.py +0 -96
- comfy_env/pixi/platform/darwin.py +0 -53
- comfy_env/pixi/platform/linux.py +0 -68
- comfy_env/pixi/platform/windows.py +0 -284
- comfy_env/pixi/resolver.py +0 -198
- comfy_env/prestartup.py +0 -208
- comfy_env/workers/__init__.py +0 -38
- comfy_env/workers/tensor_utils.py +0 -188
- comfy_env-0.1.15.dist-info/METADATA +0 -291
- comfy_env-0.1.15.dist-info/RECORD +0 -31
- /comfy_env/{workers → isolation/workers}/base.py +0 -0
- {comfy_env-0.1.15.dist-info → comfy_env-0.1.16.dist-info}/WHEEL +0 -0
- {comfy_env-0.1.15.dist-info → comfy_env-0.1.16.dist-info}/entry_points.txt +0 -0
- {comfy_env-0.1.15.dist-info → comfy_env-0.1.16.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: comfy-env
|
|
3
|
+
Version: 0.1.16
|
|
4
|
+
Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
|
|
5
|
+
Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
|
|
6
|
+
Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
|
|
7
|
+
Project-URL: Issues, https://github.com/PozzettiAndrea/comfy-env/issues
|
|
8
|
+
Author: Andrea Pozzetti
|
|
9
|
+
License: MIT
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Keywords: comfyui,cuda,environment,isolation,process,venv,wheels
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
19
|
+
Requires-Python: >=3.10
|
|
20
|
+
Requires-Dist: pip>=21.0
|
|
21
|
+
Requires-Dist: tomli-w>=1.0.0
|
|
22
|
+
Requires-Dist: tomli>=2.0.0
|
|
23
|
+
Requires-Dist: uv>=0.4.0
|
|
24
|
+
Provides-Extra: dev
|
|
25
|
+
Requires-Dist: mypy; extra == 'dev'
|
|
26
|
+
Requires-Dist: pytest; extra == 'dev'
|
|
27
|
+
Requires-Dist: ruff; extra == 'dev'
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
|
|
30
|
+
# comfy-env
|
|
31
|
+
|
|
32
|
+
Environment management for ComfyUI custom nodes.
|
|
33
|
+
|
|
34
|
+
## Quick Start
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
pip install comfy-env
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
**1. Create `comfy-env.toml` in your node directory:**
|
|
41
|
+
|
|
42
|
+
```toml
|
|
43
|
+
[cuda]
|
|
44
|
+
packages = ["nvdiffrast", "pytorch3d"]
|
|
45
|
+
|
|
46
|
+
[pypi-dependencies]
|
|
47
|
+
trimesh = { version = "*", extras = ["easy"] }
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
**2. In `install.py`:**
|
|
51
|
+
|
|
52
|
+
```python
|
|
53
|
+
from comfy_env import install
|
|
54
|
+
install()
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
**3. In `prestartup_script.py`:**
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
from comfy_env import setup_env
|
|
61
|
+
setup_env()
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
That's it. CUDA wheels install without compilation, and the environment is ready.
|
|
65
|
+
|
|
66
|
+
---
|
|
67
|
+
|
|
68
|
+
## Configuration
|
|
69
|
+
|
|
70
|
+
Create `comfy-env.toml` in your node directory:
|
|
71
|
+
|
|
72
|
+
```toml
|
|
73
|
+
# Python version for isolated environment (optional)
|
|
74
|
+
python = "3.11"
|
|
75
|
+
|
|
76
|
+
# CUDA packages from cuda-wheels index (no compilation needed)
|
|
77
|
+
[cuda]
|
|
78
|
+
packages = ["nvdiffrast", "pytorch3d", "flash-attn"]
|
|
79
|
+
|
|
80
|
+
# System packages (Linux only)
|
|
81
|
+
[apt]
|
|
82
|
+
packages = ["libgl1-mesa-glx", "libglu1-mesa"]
|
|
83
|
+
|
|
84
|
+
# Environment variables
|
|
85
|
+
[env_vars]
|
|
86
|
+
KMP_DUPLICATE_LIB_OK = "TRUE"
|
|
87
|
+
OMP_NUM_THREADS = "1"
|
|
88
|
+
|
|
89
|
+
# Dependent custom nodes to auto-install
|
|
90
|
+
[node_reqs]
|
|
91
|
+
ComfyUI_essentials = "cubiq/ComfyUI_essentials"
|
|
92
|
+
|
|
93
|
+
# Conda packages (via pixi)
|
|
94
|
+
[dependencies]
|
|
95
|
+
cgal = "*"
|
|
96
|
+
|
|
97
|
+
# PyPI packages
|
|
98
|
+
[pypi-dependencies]
|
|
99
|
+
trimesh = { version = "*", extras = ["easy"] }
|
|
100
|
+
numpy = "*"
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
---
|
|
104
|
+
|
|
105
|
+
## Process Isolation
|
|
106
|
+
|
|
107
|
+
For nodes with conflicting dependencies, use isolated execution:
|
|
108
|
+
|
|
109
|
+
```python
|
|
110
|
+
# In nodes/__init__.py
|
|
111
|
+
from pathlib import Path
|
|
112
|
+
from comfy_env import wrap_isolated_nodes
|
|
113
|
+
|
|
114
|
+
# Import your isolated nodes
|
|
115
|
+
from .cgal import NODE_CLASS_MAPPINGS as cgal_mappings
|
|
116
|
+
|
|
117
|
+
# Wrap them for isolated execution
|
|
118
|
+
NODE_CLASS_MAPPINGS = wrap_isolated_nodes(
|
|
119
|
+
cgal_mappings,
|
|
120
|
+
Path(__file__).parent / "cgal" # Directory with comfy-env.toml
|
|
121
|
+
)
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
Each wrapped node runs in a subprocess with its own Python environment.
|
|
125
|
+
|
|
126
|
+
---
|
|
127
|
+
|
|
128
|
+
## CLI Commands
|
|
129
|
+
|
|
130
|
+
```bash
|
|
131
|
+
# Show detected environment
|
|
132
|
+
comfy-env info
|
|
133
|
+
|
|
134
|
+
# Install dependencies
|
|
135
|
+
comfy-env install
|
|
136
|
+
|
|
137
|
+
# Preview without installing
|
|
138
|
+
comfy-env install --dry-run
|
|
139
|
+
|
|
140
|
+
# Verify packages
|
|
141
|
+
comfy-env doctor
|
|
142
|
+
|
|
143
|
+
# Install system packages
|
|
144
|
+
comfy-env apt-install
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
---
|
|
148
|
+
|
|
149
|
+
## API Reference
|
|
150
|
+
|
|
151
|
+
### install()
|
|
152
|
+
|
|
153
|
+
Install dependencies from comfy-env.toml:
|
|
154
|
+
|
|
155
|
+
```python
|
|
156
|
+
from comfy_env import install
|
|
157
|
+
|
|
158
|
+
install() # Auto-detect config
|
|
159
|
+
install(dry_run=True) # Preview only
|
|
160
|
+
install(config="path.toml") # Explicit config
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
### setup_env()
|
|
164
|
+
|
|
165
|
+
Set up environment at ComfyUI startup:
|
|
166
|
+
|
|
167
|
+
```python
|
|
168
|
+
from comfy_env import setup_env
|
|
169
|
+
|
|
170
|
+
setup_env() # Auto-detects node directory from caller
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
Sets library paths, environment variables, and injects site-packages.
|
|
174
|
+
|
|
175
|
+
### wrap_isolated_nodes()
|
|
176
|
+
|
|
177
|
+
Wrap nodes for subprocess isolation:
|
|
178
|
+
|
|
179
|
+
```python
|
|
180
|
+
from comfy_env import wrap_isolated_nodes
|
|
181
|
+
|
|
182
|
+
wrapped = wrap_isolated_nodes(NODE_CLASS_MAPPINGS, node_dir)
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
### Detection
|
|
186
|
+
|
|
187
|
+
```python
|
|
188
|
+
from comfy_env import (
|
|
189
|
+
detect_cuda_version, # Returns "12.8", "12.4", or None
|
|
190
|
+
detect_gpu, # Returns GPUInfo or None
|
|
191
|
+
get_gpu_summary, # Human-readable string
|
|
192
|
+
RuntimeEnv, # Combined runtime info
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
env = RuntimeEnv.detect()
|
|
196
|
+
print(env) # Python 3.11, CUDA 12.8, PyTorch 2.8.0, GPU: RTX 4090
|
|
197
|
+
```
|
|
198
|
+
|
|
199
|
+
### Workers
|
|
200
|
+
|
|
201
|
+
Low-level process isolation:
|
|
202
|
+
|
|
203
|
+
```python
|
|
204
|
+
from comfy_env import MPWorker, SubprocessWorker
|
|
205
|
+
|
|
206
|
+
# Same Python version (multiprocessing)
|
|
207
|
+
worker = MPWorker()
|
|
208
|
+
result = worker.call(my_function, arg1, arg2)
|
|
209
|
+
|
|
210
|
+
# Different Python version (subprocess)
|
|
211
|
+
worker = SubprocessWorker(python="/path/to/python")
|
|
212
|
+
result = worker.call(my_function, arg1, arg2)
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
---
|
|
216
|
+
|
|
217
|
+
## Real Example
|
|
218
|
+
|
|
219
|
+
See [ComfyUI-GeometryPack](https://github.com/PozzettiAndrea/ComfyUI-GeometryPack) for a production example with:
|
|
220
|
+
|
|
221
|
+
- Multiple isolated environments (CGAL, Blender, GPU)
|
|
222
|
+
- Per-subdirectory comfy-env.toml
|
|
223
|
+
- Prestartup asset copying
|
|
224
|
+
- Different Python versions (3.11 for Blender API)
|
|
225
|
+
|
|
226
|
+
---
|
|
227
|
+
|
|
228
|
+
## Architecture
|
|
229
|
+
|
|
230
|
+
### Layers
|
|
231
|
+
|
|
232
|
+
```
|
|
233
|
+
comfy_env/
|
|
234
|
+
├── detection/ # Pure functions - CUDA, GPU, platform detection
|
|
235
|
+
├── config/ # Pure parsing - comfy-env.toml → typed config
|
|
236
|
+
├── environment/ # Side effects - cache, paths, setup
|
|
237
|
+
├── packages/ # Side effects - pixi, cuda-wheels, apt
|
|
238
|
+
├── isolation/ # Side effects - subprocess workers, node wrapping
|
|
239
|
+
└── install.py # Orchestration
|
|
240
|
+
```
|
|
241
|
+
|
|
242
|
+
### Why Isolation?
|
|
243
|
+
|
|
244
|
+
ComfyUI nodes share a single Python environment. This breaks when:
|
|
245
|
+
|
|
246
|
+
1. **Dependency conflicts**: Node A needs `torch==2.4`, Node B needs `torch==2.8`
|
|
247
|
+
2. **Native library conflicts**: Two packages bundle incompatible libomp
|
|
248
|
+
3. **Python version requirements**: Blender API requires Python 3.11
|
|
249
|
+
|
|
250
|
+
Solution: Run each node group in its own subprocess with isolated dependencies.
|
|
251
|
+
|
|
252
|
+
### Why CUDA Wheels?
|
|
253
|
+
|
|
254
|
+
Installing packages like `nvdiffrast` normally requires:
|
|
255
|
+
- CUDA toolkit
|
|
256
|
+
- C++ compiler
|
|
257
|
+
- 30+ minutes of compilation
|
|
258
|
+
|
|
259
|
+
CUDA wheels from [cuda-wheels](https://pozzettiandrea.github.io/cuda-wheels/) are pre-built for common configurations:
|
|
260
|
+
|
|
261
|
+
| GPU | CUDA | PyTorch |
|
|
262
|
+
|-----|------|---------|
|
|
263
|
+
| Blackwell (sm_100+) | 12.8 | 2.8 |
|
|
264
|
+
| Ada/Hopper/Ampere | 12.8 | 2.8 |
|
|
265
|
+
| Turing | 12.8 | 2.8 |
|
|
266
|
+
| Pascal | 12.4 | 2.4 |
|
|
267
|
+
|
|
268
|
+
### How Environments Work
|
|
269
|
+
|
|
270
|
+
1. **Central cache**: Environments stored at `~/.comfy-env/envs/`
|
|
271
|
+
2. **Marker files**: `.comfy-env-marker.toml` links node → env
|
|
272
|
+
3. **Orphan cleanup**: Envs deleted when their node is removed
|
|
273
|
+
4. **Hash-based naming**: Config changes create new envs
|
|
274
|
+
|
|
275
|
+
---
|
|
276
|
+
|
|
277
|
+
## License
|
|
278
|
+
|
|
279
|
+
MIT
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
comfy_env/__init__.py,sha256=uoHq-m1274tOeinWsIpW81MkRQilHx9z8_coJSqH9qc,4641
|
|
2
|
+
comfy_env/cli.py,sha256=abdUBItk8OkzxKBu7cKRPizO5_i5DFflvx7pYpPc7OM,6696
|
|
3
|
+
comfy_env/install.py,sha256=BV8OfY3Rt-BiKFzpDNHfs4mfLYalbsiIMNU-y9CmLDg,10467
|
|
4
|
+
comfy_env/config/__init__.py,sha256=noqhycE0UF4ZntV5NnWcFApppQludQDHvJOtLsQgUIo,542
|
|
5
|
+
comfy_env/config/parser.py,sha256=79C8LIwCO4920MU7CJ08hfMWh-5Ehzr-i5XQna5K81A,1951
|
|
6
|
+
comfy_env/config/types.py,sha256=vFgWeEl_p26OmcoUv0wAOlHe9GBio2isjIWl7kACKFY,1096
|
|
7
|
+
comfy_env/detection/__init__.py,sha256=dH84PlSRJfs8MRJp2gp9_NX8ZzGIDHR8iZXy7_B8Ez4,1671
|
|
8
|
+
comfy_env/detection/cuda.py,sha256=BOaRQOGP2yoaPCO9eqPvWBB5Us_MNo-sSbadQsIjHqM,1708
|
|
9
|
+
comfy_env/detection/gpu.py,sha256=Rf7pgtZXzUbJqcXzZXQi-yK5naeuSP1FiL6SdreeADM,8393
|
|
10
|
+
comfy_env/detection/platform.py,sha256=Xe01dIZm7JT19kIH-j11h7KIBVRaKTLh8u4TzI3uZ6E,2127
|
|
11
|
+
comfy_env/detection/runtime.py,sha256=gDplni7ZPGW7WjNJuqWbtgSwkWz27kBWSFvYbhXun6o,3756
|
|
12
|
+
comfy_env/environment/__init__.py,sha256=WfZnyOvbI0MrDQPYTtOG2kHn0XCSCrqKcOlJcmB29nU,1009
|
|
13
|
+
comfy_env/environment/cache.py,sha256=RGfVW2caMO0Dd1nX2otUQP0xW3pVS7iSOP4vIUAMdEA,4568
|
|
14
|
+
comfy_env/environment/libomp.py,sha256=nzr3kDnRLgcf9CZ_WF4ItWskqEDS2S0geqZS43XoKig,1319
|
|
15
|
+
comfy_env/environment/paths.py,sha256=5TFFAkOZXa8R3cHfVHDEFnwy6_JcHilVBOHJuy-yqR0,1129
|
|
16
|
+
comfy_env/environment/setup.py,sha256=34To-cJX85sZ5W33dxcNosedrrICNUzVzqBcJkq4FLI,3013
|
|
17
|
+
comfy_env/isolation/__init__.py,sha256=XfMLEiBIcEzHG_k2vk9fT9GvFfmOsfbpM26czuxbdRI,800
|
|
18
|
+
comfy_env/isolation/tensor_utils.py,sha256=2_f4jjylqCPaPldD1Jw-es5CyOtuF5I1ROdyEIxsg-U,2951
|
|
19
|
+
comfy_env/isolation/wrap.py,sha256=K7GAkqU_Uxe717eUtPsFv5kcr_Jfbh3x79A-8vbY1nY,8592
|
|
20
|
+
comfy_env/isolation/workers/__init__.py,sha256=Zp6sZSRBcb5Negqgzqs3jPjfO9T1u3nNrQhp6WqTAuc,325
|
|
21
|
+
comfy_env/isolation/workers/base.py,sha256=4ZYTaQ4J0kBHCoO_OfZnsowm4rJCoqinZUaOtgkOPbw,2307
|
|
22
|
+
comfy_env/isolation/workers/mp.py,sha256=ygOgx2iyLN7l5fWkKI4lqzQsDyfAAd9Gb4gTYLp7o1A,34061
|
|
23
|
+
comfy_env/isolation/workers/subprocess.py,sha256=ML6I9IenReagP8iT0Cd2ipet6JPK1gnDbOianOuFwOw,57164
|
|
24
|
+
comfy_env/packages/__init__.py,sha256=6PTwUfUdJDTbIw46dCiA42qk4zUe_gw29xOaklBiMMc,1193
|
|
25
|
+
comfy_env/packages/apt.py,sha256=pxy3A5ZHv3X8ExCVyohODY8Fcy9ji4izIVPfYoxhqT4,1027
|
|
26
|
+
comfy_env/packages/cuda_wheels.py,sha256=G_CnlwNcfeWlEU24aCVBpeqQQ05y8_02dDLBwBFNwII,3980
|
|
27
|
+
comfy_env/packages/node_dependencies.py,sha256=AX_CY6j43tTY5KhyPfU7Wz6zgLAfWF0o0JkTrcNSecg,2966
|
|
28
|
+
comfy_env/packages/pixi.py,sha256=RPu8x5sSOLE1CYAhWMMjoQrbFGGt00fdsbqtRcTz7LQ,3871
|
|
29
|
+
comfy_env/packages/toml_generator.py,sha256=Vhc8F9euHhMTwH1TV6t96-D9Pjrn9jIN4e9WXrCIFE8,3414
|
|
30
|
+
comfy_env/templates/comfy-env-instructions.txt,sha256=ve1RAthW7ouumU9h6DM7mIRX1MS8_Tyonq2U4tcrFu8,1031
|
|
31
|
+
comfy_env/templates/comfy-env.toml,sha256=ROIqi4BlPL1MEdL1VgebfTHpdwPNYGHwWeigI9Kw-1I,4831
|
|
32
|
+
comfy_env-0.1.16.dist-info/METADATA,sha256=SLbnLOFz5AbqS8YT9-TWslRzo4QghO2EwUtQwn2B0OE,6468
|
|
33
|
+
comfy_env-0.1.16.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
34
|
+
comfy_env-0.1.16.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
|
|
35
|
+
comfy_env-0.1.16.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
|
|
36
|
+
comfy_env-0.1.16.dist-info/RECORD,,
|
comfy_env/cache.py
DELETED
|
@@ -1,203 +0,0 @@
|
|
|
1
|
-
"""Central environment cache management."""
|
|
2
|
-
|
|
3
|
-
import hashlib
|
|
4
|
-
import os
|
|
5
|
-
import shutil
|
|
6
|
-
import sys
|
|
7
|
-
from datetime import datetime
|
|
8
|
-
from pathlib import Path
|
|
9
|
-
from typing import Optional, Tuple, Callable
|
|
10
|
-
|
|
11
|
-
# Import version
|
|
12
|
-
try:
|
|
13
|
-
from . import __version__
|
|
14
|
-
except ImportError:
|
|
15
|
-
__version__ = "0.0.0-dev"
|
|
16
|
-
|
|
17
|
-
import tomli
|
|
18
|
-
import tomli_w
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
# Constants
|
|
22
|
-
CACHE_DIR = Path.home() / ".comfy-env" / "envs"
|
|
23
|
-
MARKER_FILE = ".comfy-env-marker.toml"
|
|
24
|
-
METADATA_FILE = ".comfy-env-metadata.toml"
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def get_cache_dir() -> Path:
|
|
28
|
-
"""Get central cache directory, create if needed."""
|
|
29
|
-
CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
|
30
|
-
return CACHE_DIR
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def compute_config_hash(config_path: Path) -> str:
|
|
34
|
-
"""Compute hash of comfy-env.toml content (first 8 chars of SHA256)."""
|
|
35
|
-
content = config_path.read_bytes()
|
|
36
|
-
return hashlib.sha256(content).hexdigest()[:8]
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def sanitize_name(name: str) -> str:
|
|
40
|
-
"""Sanitize a name for use in filesystem paths."""
|
|
41
|
-
# Lowercase and replace problematic chars
|
|
42
|
-
name = name.lower()
|
|
43
|
-
for prefix in ("comfyui-", "comfyui_"):
|
|
44
|
-
if name.startswith(prefix):
|
|
45
|
-
name = name[len(prefix):]
|
|
46
|
-
return name.replace("-", "_").replace(" ", "_")
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def get_env_name(node_dir: Path, config_path: Path) -> str:
|
|
50
|
-
"""Generate env name: <nodename>_<subfolder>_<hash>."""
|
|
51
|
-
# Get node name
|
|
52
|
-
node_name = sanitize_name(node_dir.name)
|
|
53
|
-
|
|
54
|
-
# Get subfolder (relative path from node_dir to config parent)
|
|
55
|
-
config_parent = config_path.parent
|
|
56
|
-
if config_parent == node_dir:
|
|
57
|
-
subfolder = ""
|
|
58
|
-
else:
|
|
59
|
-
try:
|
|
60
|
-
rel_path = config_parent.relative_to(node_dir)
|
|
61
|
-
subfolder = rel_path.as_posix().replace("/", "_")
|
|
62
|
-
except ValueError:
|
|
63
|
-
# config_path not under node_dir - use parent folder name
|
|
64
|
-
subfolder = sanitize_name(config_parent.name)
|
|
65
|
-
|
|
66
|
-
# Compute hash
|
|
67
|
-
config_hash = compute_config_hash(config_path)
|
|
68
|
-
|
|
69
|
-
return f"{node_name}_{subfolder}_{config_hash}"
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def get_central_env_path(node_dir: Path, config_path: Path) -> Path:
|
|
73
|
-
"""Get path to central environment for this config."""
|
|
74
|
-
env_name = get_env_name(node_dir, config_path)
|
|
75
|
-
return get_cache_dir() / env_name
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def write_marker(config_path: Path, env_path: Path) -> None:
|
|
79
|
-
"""Write marker file linking node to central env."""
|
|
80
|
-
marker_path = config_path.parent / MARKER_FILE
|
|
81
|
-
marker_data = {
|
|
82
|
-
"env": {
|
|
83
|
-
"name": env_path.name,
|
|
84
|
-
"path": str(env_path),
|
|
85
|
-
"config_hash": compute_config_hash(config_path),
|
|
86
|
-
"created": datetime.now().isoformat(),
|
|
87
|
-
"comfy_env_version": __version__,
|
|
88
|
-
}
|
|
89
|
-
}
|
|
90
|
-
marker_path.write_text(tomli_w.dumps(marker_data))
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def write_env_metadata(env_path: Path, marker_path: Path) -> None:
|
|
94
|
-
"""Write metadata file for orphan detection."""
|
|
95
|
-
metadata_path = env_path / METADATA_FILE
|
|
96
|
-
metadata = {
|
|
97
|
-
"marker_path": str(marker_path),
|
|
98
|
-
"created": datetime.now().isoformat(),
|
|
99
|
-
}
|
|
100
|
-
metadata_path.write_text(tomli_w.dumps(metadata))
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def read_marker(marker_path: Path) -> Optional[dict]:
|
|
104
|
-
"""Read marker file, return None if invalid/missing."""
|
|
105
|
-
if not marker_path.exists():
|
|
106
|
-
return None
|
|
107
|
-
try:
|
|
108
|
-
with open(marker_path, "rb") as f:
|
|
109
|
-
return tomli.load(f)
|
|
110
|
-
except Exception:
|
|
111
|
-
return None
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def read_env_metadata(env_path: Path) -> Optional[dict]:
|
|
115
|
-
"""Read metadata file from env, return None if invalid/missing."""
|
|
116
|
-
metadata_path = env_path / METADATA_FILE
|
|
117
|
-
if not metadata_path.exists():
|
|
118
|
-
return None
|
|
119
|
-
try:
|
|
120
|
-
with open(metadata_path, "rb") as f:
|
|
121
|
-
return tomli.load(f)
|
|
122
|
-
except Exception:
|
|
123
|
-
return None
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def resolve_env_path(node_dir: Path) -> Tuple[Optional[Path], Optional[Path], Optional[Path]]:
|
|
127
|
-
"""Resolve environment path. Returns (env_path, site_packages, lib_dir)."""
|
|
128
|
-
# 1. Check marker file -> central cache
|
|
129
|
-
marker_path = node_dir / MARKER_FILE
|
|
130
|
-
marker = read_marker(marker_path)
|
|
131
|
-
if marker and "env" in marker:
|
|
132
|
-
env_path = Path(marker["env"]["path"])
|
|
133
|
-
if env_path.exists():
|
|
134
|
-
return _get_env_paths(env_path)
|
|
135
|
-
|
|
136
|
-
# 2. Check _env_<name>
|
|
137
|
-
node_name = sanitize_name(node_dir.name)
|
|
138
|
-
env_name = f"_env_{node_name}"
|
|
139
|
-
local_env = node_dir / env_name
|
|
140
|
-
if local_env.exists():
|
|
141
|
-
return _get_env_paths(local_env)
|
|
142
|
-
|
|
143
|
-
# 3. Check .pixi/envs/default
|
|
144
|
-
pixi_env = node_dir / ".pixi" / "envs" / "default"
|
|
145
|
-
if pixi_env.exists():
|
|
146
|
-
return _get_env_paths(pixi_env)
|
|
147
|
-
|
|
148
|
-
# 4. Check .venv
|
|
149
|
-
venv_dir = node_dir / ".venv"
|
|
150
|
-
if venv_dir.exists():
|
|
151
|
-
return _get_env_paths(venv_dir)
|
|
152
|
-
|
|
153
|
-
return None, None, None
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
def _get_env_paths(env_path: Path) -> Tuple[Path, Optional[Path], Optional[Path]]:
|
|
157
|
-
"""Get site-packages and lib paths from an environment."""
|
|
158
|
-
import glob
|
|
159
|
-
|
|
160
|
-
if sys.platform == "win32":
|
|
161
|
-
site_packages = env_path / "Lib" / "site-packages"
|
|
162
|
-
lib_dir = env_path / "Library" / "bin"
|
|
163
|
-
else:
|
|
164
|
-
# Linux/Mac: lib/python*/site-packages
|
|
165
|
-
matches = glob.glob(str(env_path / "lib" / "python*" / "site-packages"))
|
|
166
|
-
site_packages = Path(matches[0]) if matches else None
|
|
167
|
-
lib_dir = env_path / "lib"
|
|
168
|
-
|
|
169
|
-
return env_path, site_packages, lib_dir
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
def cleanup_orphaned_envs(log: Callable[[str], None] = print) -> int:
|
|
173
|
-
"""Remove orphaned environments. Returns count cleaned."""
|
|
174
|
-
cache_dir = get_cache_dir()
|
|
175
|
-
if not cache_dir.exists():
|
|
176
|
-
return 0
|
|
177
|
-
|
|
178
|
-
cleaned = 0
|
|
179
|
-
for env_dir in cache_dir.iterdir():
|
|
180
|
-
if not env_dir.is_dir():
|
|
181
|
-
continue
|
|
182
|
-
|
|
183
|
-
# Skip if no metadata (might be manually created or old format)
|
|
184
|
-
metadata = read_env_metadata(env_dir)
|
|
185
|
-
if not metadata:
|
|
186
|
-
continue
|
|
187
|
-
|
|
188
|
-
# Check if marker file still exists
|
|
189
|
-
marker_path_str = metadata.get("marker_path", "")
|
|
190
|
-
if not marker_path_str:
|
|
191
|
-
continue
|
|
192
|
-
|
|
193
|
-
marker_path = Path(marker_path_str)
|
|
194
|
-
if not marker_path.exists():
|
|
195
|
-
# Marker gone = node was deleted = orphan
|
|
196
|
-
log(f"[comfy-env] Cleaning orphaned env: {env_dir.name}")
|
|
197
|
-
try:
|
|
198
|
-
shutil.rmtree(env_dir)
|
|
199
|
-
cleaned += 1
|
|
200
|
-
except Exception as e:
|
|
201
|
-
log(f"[comfy-env] Failed to cleanup {env_dir.name}: {e}")
|
|
202
|
-
|
|
203
|
-
return cleaned
|
comfy_env/nodes.py
DELETED
|
@@ -1,187 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Node dependency installation for comfy-env.
|
|
3
|
-
|
|
4
|
-
This module handles installation of dependent ComfyUI custom nodes
|
|
5
|
-
specified in the [node_reqs] section of comfy-env.toml.
|
|
6
|
-
|
|
7
|
-
Example configuration:
|
|
8
|
-
[node_reqs]
|
|
9
|
-
ComfyUI_essentials = "cubiq/ComfyUI_essentials"
|
|
10
|
-
ComfyUI-DepthAnythingV2 = "kijai/ComfyUI-DepthAnythingV2"
|
|
11
|
-
"""
|
|
12
|
-
|
|
13
|
-
import shutil
|
|
14
|
-
import subprocess
|
|
15
|
-
import sys
|
|
16
|
-
from pathlib import Path
|
|
17
|
-
from typing import TYPE_CHECKING, Callable, List, Set
|
|
18
|
-
|
|
19
|
-
if TYPE_CHECKING:
|
|
20
|
-
from .config.parser import NodeReq
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def normalize_repo_url(repo: str) -> str:
|
|
24
|
-
"""
|
|
25
|
-
Convert GitHub shorthand to full URL.
|
|
26
|
-
|
|
27
|
-
Args:
|
|
28
|
-
repo: Either 'owner/repo' or full URL like 'https://github.com/owner/repo'
|
|
29
|
-
|
|
30
|
-
Returns:
|
|
31
|
-
Full GitHub URL
|
|
32
|
-
"""
|
|
33
|
-
if repo.startswith("http://") or repo.startswith("https://"):
|
|
34
|
-
return repo
|
|
35
|
-
return f"https://github.com/{repo}"
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def clone_node(
|
|
39
|
-
repo: str,
|
|
40
|
-
name: str,
|
|
41
|
-
target_dir: Path,
|
|
42
|
-
log: Callable[[str], None],
|
|
43
|
-
) -> Path:
|
|
44
|
-
"""
|
|
45
|
-
Clone a node repository to target_dir/name.
|
|
46
|
-
|
|
47
|
-
Args:
|
|
48
|
-
repo: GitHub repo path (e.g., 'owner/repo') or full URL
|
|
49
|
-
name: Directory name for the cloned repo
|
|
50
|
-
target_dir: Parent directory (usually custom_nodes/)
|
|
51
|
-
log: Logging callback
|
|
52
|
-
|
|
53
|
-
Returns:
|
|
54
|
-
Path to the cloned node directory
|
|
55
|
-
|
|
56
|
-
Raises:
|
|
57
|
-
RuntimeError: If git clone fails
|
|
58
|
-
"""
|
|
59
|
-
node_path = target_dir / name
|
|
60
|
-
url = normalize_repo_url(repo)
|
|
61
|
-
|
|
62
|
-
log(f" Cloning {name} from {url}...")
|
|
63
|
-
result = subprocess.run(
|
|
64
|
-
["git", "clone", "--depth", "1", url, str(node_path)],
|
|
65
|
-
capture_output=True,
|
|
66
|
-
text=True,
|
|
67
|
-
)
|
|
68
|
-
|
|
69
|
-
if result.returncode != 0:
|
|
70
|
-
raise RuntimeError(f"Failed to clone {url}: {result.stderr.strip()}")
|
|
71
|
-
|
|
72
|
-
return node_path
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def install_requirements(
|
|
76
|
-
node_dir: Path,
|
|
77
|
-
log: Callable[[str], None],
|
|
78
|
-
) -> None:
|
|
79
|
-
"""
|
|
80
|
-
Install requirements.txt in a node directory if it exists.
|
|
81
|
-
|
|
82
|
-
Args:
|
|
83
|
-
node_dir: Path to the node directory
|
|
84
|
-
log: Logging callback
|
|
85
|
-
"""
|
|
86
|
-
requirements_file = node_dir / "requirements.txt"
|
|
87
|
-
|
|
88
|
-
if not requirements_file.exists():
|
|
89
|
-
return
|
|
90
|
-
|
|
91
|
-
log(f" Installing requirements for {node_dir.name}...")
|
|
92
|
-
|
|
93
|
-
# Try uv first, fall back to pip if uv not in PATH
|
|
94
|
-
if shutil.which("uv"):
|
|
95
|
-
cmd = ["uv", "pip", "install", "-r", str(requirements_file), "--python", sys.executable]
|
|
96
|
-
else:
|
|
97
|
-
cmd = [sys.executable, "-m", "pip", "install", "-r", str(requirements_file)]
|
|
98
|
-
|
|
99
|
-
result = subprocess.run(cmd, cwd=node_dir, capture_output=True, text=True)
|
|
100
|
-
if result.returncode != 0:
|
|
101
|
-
log(f" Warning: requirements.txt install failed for {node_dir.name}: {result.stderr.strip()[:200]}")
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def run_install_script(
|
|
105
|
-
node_dir: Path,
|
|
106
|
-
log: Callable[[str], None],
|
|
107
|
-
) -> None:
|
|
108
|
-
"""
|
|
109
|
-
Run install.py in a node directory if it exists.
|
|
110
|
-
|
|
111
|
-
Args:
|
|
112
|
-
node_dir: Path to the node directory
|
|
113
|
-
log: Logging callback
|
|
114
|
-
"""
|
|
115
|
-
install_script = node_dir / "install.py"
|
|
116
|
-
|
|
117
|
-
if install_script.exists():
|
|
118
|
-
log(f" Running install.py for {node_dir.name}...")
|
|
119
|
-
result = subprocess.run(
|
|
120
|
-
[sys.executable, str(install_script)],
|
|
121
|
-
cwd=node_dir,
|
|
122
|
-
capture_output=True,
|
|
123
|
-
text=True,
|
|
124
|
-
)
|
|
125
|
-
if result.returncode != 0:
|
|
126
|
-
log(f" Warning: install.py failed for {node_dir.name}: {result.stderr.strip()[:200]}")
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
def install_node_deps(
|
|
130
|
-
node_reqs: "List[NodeReq]",
|
|
131
|
-
custom_nodes_dir: Path,
|
|
132
|
-
log: Callable[[str], None],
|
|
133
|
-
visited: Set[str],
|
|
134
|
-
) -> None:
|
|
135
|
-
"""
|
|
136
|
-
Install node dependencies recursively.
|
|
137
|
-
|
|
138
|
-
Args:
|
|
139
|
-
node_reqs: List of NodeReq objects to install
|
|
140
|
-
custom_nodes_dir: Path to custom_nodes directory
|
|
141
|
-
log: Logging callback
|
|
142
|
-
visited: Set of already-processed node names (for cycle detection)
|
|
143
|
-
"""
|
|
144
|
-
from .config.parser import discover_config
|
|
145
|
-
|
|
146
|
-
for req in node_reqs:
|
|
147
|
-
# Skip if already visited (cycle detection)
|
|
148
|
-
if req.name in visited:
|
|
149
|
-
log(f" {req.name}: already in dependency chain, skipping")
|
|
150
|
-
continue
|
|
151
|
-
|
|
152
|
-
visited.add(req.name)
|
|
153
|
-
|
|
154
|
-
node_path = custom_nodes_dir / req.name
|
|
155
|
-
|
|
156
|
-
# Skip if already installed (directory exists)
|
|
157
|
-
if node_path.exists():
|
|
158
|
-
log(f" {req.name}: already installed, skipping")
|
|
159
|
-
continue
|
|
160
|
-
|
|
161
|
-
try:
|
|
162
|
-
# Clone the repository
|
|
163
|
-
clone_node(req.repo, req.name, custom_nodes_dir, log)
|
|
164
|
-
|
|
165
|
-
# Install requirements.txt if present
|
|
166
|
-
install_requirements(node_path, log)
|
|
167
|
-
|
|
168
|
-
# Run install.py if present
|
|
169
|
-
run_install_script(node_path, log)
|
|
170
|
-
|
|
171
|
-
# Recursively process nested node_reqs
|
|
172
|
-
try:
|
|
173
|
-
nested_config = discover_config(node_path)
|
|
174
|
-
if nested_config and nested_config.node_reqs:
|
|
175
|
-
log(f" {req.name}: found {len(nested_config.node_reqs)} nested dependencies")
|
|
176
|
-
install_node_deps(
|
|
177
|
-
nested_config.node_reqs,
|
|
178
|
-
custom_nodes_dir,
|
|
179
|
-
log,
|
|
180
|
-
visited,
|
|
181
|
-
)
|
|
182
|
-
except Exception as e:
|
|
183
|
-
# Don't fail if we can't parse nested config
|
|
184
|
-
log(f" {req.name}: could not check for nested deps: {e}")
|
|
185
|
-
|
|
186
|
-
except Exception as e:
|
|
187
|
-
log(f" Warning: Failed to install {req.name}: {e}")
|