comfy-env 0.0.48__py3-none-any.whl → 0.0.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,310 @@
1
+ """
2
+ Import stub system for isolated node packs.
3
+
4
+ This module provides automatic import stubbing for packages that exist only
5
+ in the isolated pixi environment, not in the host ComfyUI Python.
6
+
7
+ How it works:
8
+ 1. Read package names from comfy-env.toml
9
+ 2. Look up their import names from top_level.txt in the pixi environment
10
+ 3. Register import hooks that provide stub modules for those imports
11
+ 4. Stubs allow class definitions to parse without the real packages
12
+ 5. Real packages are used when FUNCTION runs in the isolated worker
13
+
14
+ Usage:
15
+ # In node pack's __init__.py, BEFORE importing nodes:
16
+ from comfy_env import setup_isolated_imports
17
+ setup_isolated_imports(__file__)
18
+
19
+ from .nodes import NODE_CLASS_MAPPINGS # Now works!
20
+ """
21
+
22
+ import sys
23
+ import types
24
+ from pathlib import Path
25
+ from typing import Dict, List, Optional, Set
26
+
27
+
28
+ class _StubModule(types.ModuleType):
29
+ """
30
+ A stub module that accepts any attribute access or call.
31
+ """
32
+
33
+ def __init__(self, name: str):
34
+ super().__init__(name)
35
+ self.__path__ = [] # Make it a package
36
+ self.__file__ = f"<stub:{name}>"
37
+ self._stub_name = name
38
+
39
+ def __getattr__(self, name: str):
40
+ if name.startswith('_'):
41
+ raise AttributeError(name)
42
+ return _StubObject(f"{self._stub_name}.{name}")
43
+
44
+ def __repr__(self):
45
+ return f"<StubModule '{self._stub_name}'>"
46
+
47
+
48
+ class _StubObject:
49
+ """
50
+ A stub object that accepts any operation.
51
+ """
52
+
53
+ def __init__(self, name: str = "stub"):
54
+ self._stub_name = name
55
+
56
+ def __getattr__(self, name: str):
57
+ if name.startswith('_'):
58
+ raise AttributeError(name)
59
+ return _StubObject(f"{self._stub_name}.{name}")
60
+
61
+ def __call__(self, *args, **kwargs):
62
+ return _StubObject(f"{self._stub_name}()")
63
+
64
+ def __iter__(self):
65
+ return iter([])
66
+
67
+ def __len__(self):
68
+ return 0
69
+
70
+ def __bool__(self):
71
+ return False
72
+
73
+ def __enter__(self):
74
+ return self
75
+
76
+ def __exit__(self, *args):
77
+ pass
78
+
79
+ def __repr__(self):
80
+ return f"<Stub '{self._stub_name}'>"
81
+
82
+ def __add__(self, other): return self
83
+ def __radd__(self, other): return self
84
+ def __sub__(self, other): return self
85
+ def __rsub__(self, other): return self
86
+ def __mul__(self, other): return self
87
+ def __rmul__(self, other): return self
88
+ def __truediv__(self, other): return self
89
+ def __rtruediv__(self, other): return self
90
+ def __eq__(self, other): return False
91
+ def __ne__(self, other): return True
92
+ def __lt__(self, other): return False
93
+ def __le__(self, other): return False
94
+ def __gt__(self, other): return False
95
+ def __ge__(self, other): return False
96
+ def __hash__(self): return hash(self._stub_name)
97
+ def __getitem__(self, key): return _StubObject(f"{self._stub_name}[{key}]")
98
+ def __setitem__(self, key, value): pass
99
+ def __contains__(self, item): return False
100
+
101
+
102
+ class _StubFinder:
103
+ """Import hook finder that provides stub modules for specified packages."""
104
+
105
+ def __init__(self, stub_packages: Set[str]):
106
+ self.stub_packages = stub_packages
107
+
108
+ def find_module(self, fullname: str, path=None):
109
+ top_level = fullname.split('.')[0]
110
+ if top_level in self.stub_packages:
111
+ return _StubLoader(self.stub_packages)
112
+ return None
113
+
114
+
115
+ class _StubLoader:
116
+ """Import hook loader that creates stub modules."""
117
+
118
+ def __init__(self, stub_packages: Set[str]):
119
+ self.stub_packages = stub_packages
120
+
121
+ def load_module(self, fullname: str):
122
+ if fullname in sys.modules:
123
+ return sys.modules[fullname]
124
+
125
+ module = _StubModule(fullname)
126
+ module.__loader__ = self
127
+
128
+ if '.' in fullname:
129
+ parent = fullname.rsplit('.', 1)[0]
130
+ module.__package__ = parent
131
+ if parent not in sys.modules:
132
+ self.load_module(parent)
133
+ else:
134
+ module.__package__ = fullname
135
+
136
+ sys.modules[fullname] = module
137
+ return module
138
+
139
+
140
+ def _normalize_package_name(name: str) -> str:
141
+ """Normalize package name for comparison (PEP 503)."""
142
+ return name.lower().replace('-', '_').replace('.', '_')
143
+
144
+
145
+ def _get_import_names_from_pixi(node_dir: Path) -> Set[str]:
146
+ """
147
+ Get import names by scanning the pixi environment's site-packages.
148
+
149
+ Finds all importable packages by looking for:
150
+ 1. Directories with __init__.py (packages)
151
+ 2. .py files (single-file modules)
152
+ 3. .so/.pyd files (extension modules)
153
+
154
+ Returns:
155
+ Set of import names that should be stubbed.
156
+ """
157
+ import_names = set()
158
+
159
+ # Find the pixi site-packages
160
+ pixi_lib = node_dir / ".pixi" / "envs" / "default" / "lib"
161
+
162
+ if not pixi_lib.exists():
163
+ return import_names
164
+
165
+ # Find the python version directory (e.g., python3.11)
166
+ python_dirs = list(pixi_lib.glob("python3.*"))
167
+ if not python_dirs:
168
+ return import_names
169
+
170
+ site_packages = python_dirs[0] / "site-packages"
171
+ if not site_packages.exists():
172
+ return import_names
173
+
174
+ # Scan for importable modules
175
+ for item in site_packages.iterdir():
176
+ name = item.name
177
+
178
+ # Skip private/internal items
179
+ if name.startswith('_') or name.startswith('.'):
180
+ continue
181
+
182
+ # Skip dist-info and egg-info directories
183
+ if name.endswith('.dist-info') or name.endswith('.egg-info'):
184
+ continue
185
+
186
+ # Skip common non-module items
187
+ if name in {'bin', 'share', 'include', 'etc'}:
188
+ continue
189
+
190
+ # Package directory (has __init__.py)
191
+ if item.is_dir():
192
+ if (item / "__init__.py").exists():
193
+ import_names.add(name)
194
+ continue
195
+
196
+ # Single-file module (.py)
197
+ if name.endswith('.py'):
198
+ import_names.add(name[:-3])
199
+ continue
200
+
201
+ # Extension module (.so on Linux, .pyd on Windows)
202
+ if '.cpython-' in name and (name.endswith('.so') or name.endswith('.pyd')):
203
+ # Extract module name: foo.cpython-311-x86_64-linux-gnu.so -> foo
204
+ module_name = name.split('.')[0]
205
+ import_names.add(module_name)
206
+ continue
207
+
208
+ return import_names
209
+
210
+
211
+ def _filter_to_missing(import_names: Set[str]) -> Set[str]:
212
+ """Filter to only imports not available in host Python."""
213
+ missing = set()
214
+
215
+ for name in import_names:
216
+ # Skip if already in sys.modules
217
+ if name in sys.modules:
218
+ continue
219
+
220
+ # Try to import
221
+ try:
222
+ __import__(name)
223
+ except ImportError:
224
+ missing.add(name)
225
+ except Exception:
226
+ # Other errors - don't stub, let real error surface
227
+ pass
228
+
229
+ return missing
230
+
231
+
232
+ # Track whether we've already set up stubs
233
+ _stub_finder: Optional[_StubFinder] = None
234
+
235
+
236
+ def setup_isolated_imports(init_file: str) -> List[str]:
237
+ """
238
+ Set up import stubs for packages in the pixi environment but not in host Python.
239
+
240
+ Call this BEFORE importing your nodes module.
241
+
242
+ Args:
243
+ init_file: The __file__ of the calling module (usually __file__ from __init__.py)
244
+
245
+ Returns:
246
+ List of import names that were stubbed.
247
+
248
+ Example:
249
+ from comfy_env import setup_isolated_imports
250
+ setup_isolated_imports(__file__)
251
+
252
+ from .nodes import NODE_CLASS_MAPPINGS # Now works!
253
+ """
254
+ global _stub_finder
255
+
256
+ node_dir = Path(init_file).resolve().parent
257
+
258
+ # Get all import names from pixi environment
259
+ pixi_imports = _get_import_names_from_pixi(node_dir)
260
+
261
+ if not pixi_imports:
262
+ print("[comfy-env] No pixi environment found, skipping import stubbing")
263
+ return []
264
+
265
+ # Filter to only those missing in host
266
+ missing = _filter_to_missing(pixi_imports)
267
+
268
+ if not missing:
269
+ print("[comfy-env] All pixi packages available in host, no stubbing needed")
270
+ return []
271
+
272
+ # Remove old finder if exists
273
+ if _stub_finder is not None:
274
+ try:
275
+ sys.meta_path.remove(_stub_finder)
276
+ except ValueError:
277
+ pass
278
+
279
+ # Register new finder
280
+ _stub_finder = _StubFinder(missing)
281
+ sys.meta_path.insert(0, _stub_finder)
282
+
283
+ stubbed = sorted(missing)
284
+ if len(stubbed) <= 10:
285
+ print(f"[comfy-env] Stubbed {len(stubbed)} imports: {', '.join(stubbed)}")
286
+ else:
287
+ print(f"[comfy-env] Stubbed {len(stubbed)} imports: {', '.join(stubbed[:10])}... and {len(stubbed)-10} more")
288
+
289
+ return stubbed
290
+
291
+
292
+ def cleanup_stubs():
293
+ """Remove the stub import hooks."""
294
+ global _stub_finder
295
+
296
+ if _stub_finder is not None:
297
+ try:
298
+ sys.meta_path.remove(_stub_finder)
299
+ except ValueError:
300
+ pass
301
+
302
+ # Remove stubbed modules from sys.modules
303
+ to_remove = [
304
+ name for name in sys.modules
305
+ if isinstance(sys.modules[name], _StubModule)
306
+ ]
307
+ for name in to_remove:
308
+ del sys.modules[name]
309
+
310
+ _stub_finder = None
@@ -0,0 +1,103 @@
1
+ comfy-env Setup Instructions
2
+ ============================
3
+
4
+ comfy-env handles CUDA wheel installation and process isolation for ComfyUI
5
+ custom nodes.
6
+
7
+ QUICK START
8
+ -----------
9
+ 1. Install comfy-env:
10
+ pip install comfy-env
11
+
12
+ 2. Initialize config in your node directory:
13
+ comfy-env init
14
+
15
+ 3. Edit comfy-env.toml to add your dependencies
16
+
17
+ 4. Add to your __init__.py (at the top, before other imports):
18
+ from comfy_env import install
19
+ install()
20
+
21
+ 5. Test locally:
22
+ comfy-env install --dry-run # Preview what will be installed
23
+ comfy-env install # Actually install
24
+ comfy-env doctor # Verify all packages work
25
+
26
+
27
+ COMMON USE CASES
28
+ ----------------
29
+
30
+ Case 1: Just need CUDA packages (nvdiffrast, pytorch3d, etc.)
31
+ - Add packages to [cuda] section
32
+ - Call install() in your __init__.py
33
+
34
+ Case 2: Need process isolation (conflicting dependencies, conda packages)
35
+ - Define an isolated environment with `isolated = true`
36
+ - Use enable_isolation(NODE_CLASS_MAPPINGS) in your __init__.py
37
+ - See PROCESS ISOLATION section below
38
+
39
+ Case 3: Need system packages (apt)
40
+ - Add to [system] linux = ["package1", "package2"]
41
+
42
+
43
+ CLI COMMANDS
44
+ ------------
45
+ comfy-env init Create comfy-env.toml template
46
+ comfy-env install Install dependencies from config
47
+ comfy-env install --dry-run Preview without installing
48
+ comfy-env info Show detected environment (Python, CUDA, PyTorch)
49
+ comfy-env resolve PKG Show resolved wheel URL for a package
50
+ comfy-env doctor Verify installation
51
+ comfy-env list-packages Show all packages in built-in registry
52
+
53
+
54
+ PROCESS ISOLATION
55
+ -----------------
56
+ For nodes that need isolated dependencies:
57
+
58
+ RECOMMENDED: Pack-wide isolation (all nodes in same isolated env)
59
+
60
+ from comfy_env import setup_isolated_imports, enable_isolation
61
+
62
+ # Setup import stubs BEFORE importing nodes
63
+ setup_isolated_imports(__file__)
64
+
65
+ from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
66
+
67
+ # Enable isolation for all nodes
68
+ enable_isolation(NODE_CLASS_MAPPINGS)
69
+
70
+ Requires `isolated = true` in comfy-env.toml:
71
+
72
+ [mypack]
73
+ python = "3.11"
74
+ isolated = true
75
+
76
+ [mypack.packages]
77
+ requirements = ["trimesh", "scipy"]
78
+
79
+ ALTERNATIVE: Per-node isolation (for multiple isolated envs)
80
+
81
+ from comfy_env import isolated
82
+
83
+ @isolated(env="myenv")
84
+ class MyNode:
85
+ FUNCTION = "process"
86
+ def process(self, image):
87
+ import conflicting_lib
88
+ return (result,)
89
+
90
+ How it works:
91
+ - Runs FUNCTION methods in a separate Python process
92
+ - Tensors/numpy arrays passed by value (efficient)
93
+ - Complex objects (meshes, etc.) passed by reference
94
+
95
+
96
+ TROUBLESHOOTING
97
+ ---------------
98
+ - "Package X not found in registry": Add custom wheel URL to [wheel_sources]
99
+ - "CUDA not detected": Ensure PyTorch with CUDA is installed in ComfyUI
100
+ - "Worker failed to connect": Check the isolated env was set up correctly
101
+ - Import errors: Run `comfy-env doctor` to verify packages
102
+
103
+ For more help: https://github.com/PozzettiAndrea/comfy-env
@@ -0,0 +1,186 @@
1
+ # =============================================================================
2
+ # comfy-env.toml - Environment configuration for ComfyUI custom nodes
3
+ # Documentation: https://github.com/PozzettiAndrea/comfy-env
4
+ # =============================================================================
5
+ #
6
+ # This file configures dependencies for your ComfyUI custom node.
7
+ # comfy-env handles two main challenges:
8
+ # 1. Installing CUDA packages that require compilation (nvdiffrast, pytorch3d, etc.)
9
+ # 2. Running nodes in isolated environments with their own dependencies
10
+ #
11
+ # Quick start:
12
+ # pip install comfy-env
13
+ # comfy-env init # Creates this template
14
+ # comfy-env install # Installs dependencies
15
+ # comfy-env doctor # Verifies installation
16
+
17
+
18
+ # =============================================================================
19
+ # SYSTEM PACKAGES (optional)
20
+ # =============================================================================
21
+ # System-level packages installed via apt (Linux) before Python packages.
22
+ # Useful for packages that need system libraries to compile or run.
23
+
24
+ [system]
25
+ linux = []
26
+ # Examples:
27
+ # linux = ["libgl1", "libopengl0"] # For OpenGL rendering (nvdiffrast)
28
+ # linux = ["python3-dev"] # For packages that compile C extensions
29
+ # linux = ["ffmpeg"] # For video processing nodes
30
+
31
+
32
+ # =============================================================================
33
+ # CUDA PACKAGES
34
+ # =============================================================================
35
+ # CUDA packages from the built-in registry. These are pre-compiled wheels
36
+ # that install without needing a compiler. Run `comfy-env list-packages`
37
+ # to see all available packages.
38
+
39
+ [cuda]
40
+ # Use exact package names from `comfy-env list-packages` (hyphens vs underscores matter!)
41
+ # nvdiffrast = "0.4.0" # NVIDIA differentiable rasterizer
42
+ # pytorch3d = "0.7.9" # PyTorch3D - 3D deep learning
43
+ # gsplat = "1.5.3" # Gaussian splatting rasterization
44
+ # torch-scatter = "2.1.2" # PyG scatter operations
45
+ # torch-cluster = "1.6.3" # PyG clustering algorithms
46
+ # flash-attn = "2.7.4" # Flash Attention (Linux x86_64 only)
47
+ # spconv = "2.3.8" # Sparse convolution library
48
+ # sageattention = "2.2.0" # SageAttention - faster than FlashAttention
49
+ # dpvo_cuda = "0.0.0" # DPVO CUDA extensions
50
+
51
+
52
+ # =============================================================================
53
+ # REGULAR PACKAGES
54
+ # =============================================================================
55
+ # Standard pip packages (no special CUDA handling needed).
56
+
57
+ [packages]
58
+ requirements = []
59
+ # Examples:
60
+ # requirements = [
61
+ # "transformers>=4.56",
62
+ # "pillow",
63
+ # "opencv-python-headless",
64
+ # "trimesh",
65
+ # ]
66
+
67
+
68
+ # =============================================================================
69
+ # CUSTOM WHEEL SOURCES (optional)
70
+ # =============================================================================
71
+ # Override built-in wheel URLs or add packages not in the registry.
72
+ # Template variables: {version}, {cuda_short}, {torch_mm}, {py_tag}, {platform}
73
+
74
+ [wheel_sources]
75
+ # my-custom-package = "https://my-server.com/my-package-{version}+cu{cuda_short}-{py_tag}-{platform}.whl"
76
+
77
+
78
+ # =============================================================================
79
+ # NODE DEPENDENCIES (optional)
80
+ # =============================================================================
81
+ # Other ComfyUI custom nodes this node depends on.
82
+
83
+ [node_reqs]
84
+ # VideoHelperSuite = "Kosinkadink/ComfyUI-VideoHelperSuite"
85
+ # ComfyUI-Impact-Pack = "ltdrdata/ComfyUI-Impact-Pack"
86
+
87
+
88
+ # =============================================================================
89
+ # EXTERNAL TOOLS (optional)
90
+ # =============================================================================
91
+ # External applications required by the node.
92
+
93
+ [tools]
94
+ # blender = "4.2"
95
+
96
+
97
+ # #############################################################################
98
+ #
99
+ # PROCESS ISOLATION (ADVANCED)
100
+ #
101
+ # #############################################################################
102
+ #
103
+ # For nodes that need completely isolated dependencies (different Python
104
+ # version, conda packages, conflicting native libraries), define an isolated
105
+ # environment with `isolated = true`.
106
+ #
107
+ # RECOMMENDED: Pack-wide isolation (all nodes in one environment)
108
+ # ----------------------------------------------------------------
109
+ # This is the simplest approach - all your nodes run in the same isolated env.
110
+ #
111
+ # Step 1: Define environment in comfy-env.toml (this file)
112
+ # Step 2: In __init__.py:
113
+ #
114
+ # from comfy_env import setup_isolated_imports, enable_isolation
115
+ #
116
+ # # Setup import stubs BEFORE importing nodes
117
+ # setup_isolated_imports(__file__)
118
+ #
119
+ # from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
120
+ #
121
+ # # Enable isolation for all nodes
122
+ # enable_isolation(NODE_CLASS_MAPPINGS)
123
+ #
124
+ # =============================================================================
125
+
126
+
127
+ # -----------------------------------------------------------------------------
128
+ # Example: Full pack isolation with conda packages (RECOMMENDED)
129
+ # -----------------------------------------------------------------------------
130
+ # Uses pixi to create an isolated environment with conda + pip packages.
131
+
132
+ # [mypack]
133
+ # python = "3.11"
134
+ # isolated = true # Required for enable_isolation()
135
+ #
136
+ # [mypack.conda]
137
+ # channels = ["conda-forge"]
138
+ # packages = ["cgal", "openmesh"]
139
+ #
140
+ # [mypack.packages]
141
+ # requirements = ["trimesh[easy]>=4.0", "numpy", "scipy"]
142
+
143
+
144
+ # -----------------------------------------------------------------------------
145
+ # Example: Multiple isolated environments (per-node control)
146
+ # -----------------------------------------------------------------------------
147
+ # Use @isolated(env="envname") decorator when different nodes need different envs.
148
+ #
149
+ # from comfy_env import isolated
150
+ #
151
+ # @isolated(env="env-preprocessing")
152
+ # class PreprocessNode: ...
153
+ #
154
+ # @isolated(env="env-inference")
155
+ # class InferenceNode: ...
156
+
157
+ # [env-preprocessing]
158
+ # python = "3.11"
159
+ #
160
+ # [env-preprocessing.packages]
161
+ # requirements = ["opencv-python-headless", "pillow"]
162
+
163
+ # [env-inference]
164
+ # python = "3.10"
165
+ #
166
+ # [env-inference.cuda]
167
+ # torch-scatter = "2.1.2"
168
+
169
+
170
+ # -----------------------------------------------------------------------------
171
+ # Example: Platform-specific packages
172
+ # -----------------------------------------------------------------------------
173
+ # Different packages for Windows vs Linux.
174
+
175
+ # [crossplatform]
176
+ # python = "3.11"
177
+ # isolated = true
178
+ #
179
+ # [crossplatform.packages]
180
+ # requirements = ["numpy", "pillow"]
181
+ #
182
+ # [crossplatform.packages.windows]
183
+ # requirements = ["pywin32"]
184
+ #
185
+ # [crossplatform.packages.linux]
186
+ # requirements = ["python-xlib"]
@@ -107,7 +107,7 @@ packages:
107
107
  default_version: "2.2.0"
108
108
  description: SageAttention - 2-5x faster than FlashAttention with quantized kernels
109
109
 
110
- dpvo_cuda:
110
+ dpvo-cuda:
111
111
  wheel_template: "https://github.com/PozzettiAndrea/cuda-wheels/releases/download/dpvo_cuda-latest/dpvo_cuda-{version}%2Bcu{cuda_short}torch{torch_mm}-{py_tag}-{py_tag}-{platform}.whl"
112
112
  default_version: "0.0.0"
113
113
  description: DPVO CUDA extensions (cuda_corr, cuda_ba, lietorch_backends) - torch 2.4 only
@@ -50,8 +50,12 @@ def _worker_loop(queue_in, queue_out, sys_path_additions=None):
50
50
  - _SHUTDOWN: Shutdown the worker
51
51
 
52
52
  Runs until receiving _SHUTDOWN sentinel.
53
+
54
+ Args:
55
+ queue_in: Input queue for receiving work items
56
+ queue_out: Output queue for sending results
57
+ sys_path_additions: Paths to add to sys.path
53
58
  """
54
- import importlib
55
59
  import os
56
60
  import sys
57
61
  from pathlib import Path