comfygit-core 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfygit_core/analyzers/custom_node_scanner.py +109 -0
- comfygit_core/analyzers/git_change_parser.py +156 -0
- comfygit_core/analyzers/model_scanner.py +318 -0
- comfygit_core/analyzers/node_classifier.py +58 -0
- comfygit_core/analyzers/node_git_analyzer.py +77 -0
- comfygit_core/analyzers/status_scanner.py +362 -0
- comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
- comfygit_core/caching/__init__.py +16 -0
- comfygit_core/caching/api_cache.py +210 -0
- comfygit_core/caching/base.py +212 -0
- comfygit_core/caching/comfyui_cache.py +100 -0
- comfygit_core/caching/custom_node_cache.py +320 -0
- comfygit_core/caching/workflow_cache.py +797 -0
- comfygit_core/clients/__init__.py +4 -0
- comfygit_core/clients/civitai_client.py +412 -0
- comfygit_core/clients/github_client.py +349 -0
- comfygit_core/clients/registry_client.py +230 -0
- comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
- comfygit_core/configs/comfyui_models.py +62 -0
- comfygit_core/configs/model_config.py +151 -0
- comfygit_core/constants.py +82 -0
- comfygit_core/core/environment.py +1635 -0
- comfygit_core/core/workspace.py +898 -0
- comfygit_core/factories/environment_factory.py +419 -0
- comfygit_core/factories/uv_factory.py +61 -0
- comfygit_core/factories/workspace_factory.py +109 -0
- comfygit_core/infrastructure/sqlite_manager.py +156 -0
- comfygit_core/integrations/__init__.py +7 -0
- comfygit_core/integrations/uv_command.py +318 -0
- comfygit_core/logging/logging_config.py +15 -0
- comfygit_core/managers/environment_git_orchestrator.py +316 -0
- comfygit_core/managers/environment_model_manager.py +296 -0
- comfygit_core/managers/export_import_manager.py +116 -0
- comfygit_core/managers/git_manager.py +667 -0
- comfygit_core/managers/model_download_manager.py +252 -0
- comfygit_core/managers/model_symlink_manager.py +166 -0
- comfygit_core/managers/node_manager.py +1378 -0
- comfygit_core/managers/pyproject_manager.py +1321 -0
- comfygit_core/managers/user_content_symlink_manager.py +436 -0
- comfygit_core/managers/uv_project_manager.py +569 -0
- comfygit_core/managers/workflow_manager.py +1944 -0
- comfygit_core/models/civitai.py +432 -0
- comfygit_core/models/commit.py +18 -0
- comfygit_core/models/environment.py +293 -0
- comfygit_core/models/exceptions.py +378 -0
- comfygit_core/models/manifest.py +132 -0
- comfygit_core/models/node_mapping.py +201 -0
- comfygit_core/models/protocols.py +248 -0
- comfygit_core/models/registry.py +63 -0
- comfygit_core/models/shared.py +356 -0
- comfygit_core/models/sync.py +42 -0
- comfygit_core/models/system.py +204 -0
- comfygit_core/models/workflow.py +914 -0
- comfygit_core/models/workspace_config.py +71 -0
- comfygit_core/py.typed +0 -0
- comfygit_core/repositories/migrate_paths.py +49 -0
- comfygit_core/repositories/model_repository.py +958 -0
- comfygit_core/repositories/node_mappings_repository.py +246 -0
- comfygit_core/repositories/workflow_repository.py +57 -0
- comfygit_core/repositories/workspace_config_repository.py +121 -0
- comfygit_core/resolvers/global_node_resolver.py +459 -0
- comfygit_core/resolvers/model_resolver.py +250 -0
- comfygit_core/services/import_analyzer.py +218 -0
- comfygit_core/services/model_downloader.py +422 -0
- comfygit_core/services/node_lookup_service.py +251 -0
- comfygit_core/services/registry_data_manager.py +161 -0
- comfygit_core/strategies/__init__.py +4 -0
- comfygit_core/strategies/auto.py +72 -0
- comfygit_core/strategies/confirmation.py +69 -0
- comfygit_core/utils/comfyui_ops.py +125 -0
- comfygit_core/utils/common.py +164 -0
- comfygit_core/utils/conflict_parser.py +232 -0
- comfygit_core/utils/dependency_parser.py +231 -0
- comfygit_core/utils/download.py +216 -0
- comfygit_core/utils/environment_cleanup.py +111 -0
- comfygit_core/utils/filesystem.py +178 -0
- comfygit_core/utils/git.py +1184 -0
- comfygit_core/utils/input_signature.py +145 -0
- comfygit_core/utils/model_categories.py +52 -0
- comfygit_core/utils/pytorch.py +71 -0
- comfygit_core/utils/requirements.py +211 -0
- comfygit_core/utils/retry.py +242 -0
- comfygit_core/utils/symlink_utils.py +119 -0
- comfygit_core/utils/system_detector.py +258 -0
- comfygit_core/utils/uuid.py +28 -0
- comfygit_core/utils/uv_error_handler.py +158 -0
- comfygit_core/utils/version.py +73 -0
- comfygit_core/utils/workflow_hash.py +90 -0
- comfygit_core/validation/resolution_tester.py +297 -0
- comfygit_core-0.2.0.dist-info/METADATA +939 -0
- comfygit_core-0.2.0.dist-info/RECORD +93 -0
- comfygit_core-0.2.0.dist-info/WHEEL +4 -0
- comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
|
@@ -0,0 +1,1635 @@
|
|
|
1
|
+
"""Simplified Environment - owns everything about a single ComfyUI environment."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import shutil
|
|
5
|
+
import subprocess
|
|
6
|
+
from functools import cached_property
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
from ..analyzers.status_scanner import StatusScanner
|
|
11
|
+
from ..factories.uv_factory import create_uv_for_environment
|
|
12
|
+
from ..logging.logging_config import get_logger
|
|
13
|
+
from ..managers.environment_git_orchestrator import EnvironmentGitOrchestrator
|
|
14
|
+
from ..managers.environment_model_manager import EnvironmentModelManager
|
|
15
|
+
from ..managers.git_manager import GitManager
|
|
16
|
+
from ..managers.model_symlink_manager import ModelSymlinkManager
|
|
17
|
+
from ..managers.node_manager import NodeManager
|
|
18
|
+
from ..managers.pyproject_manager import PyprojectManager
|
|
19
|
+
from ..managers.user_content_symlink_manager import UserContentSymlinkManager
|
|
20
|
+
from ..managers.uv_project_manager import UVProjectManager
|
|
21
|
+
from ..managers.workflow_manager import WorkflowManager
|
|
22
|
+
from ..models.environment import EnvironmentStatus
|
|
23
|
+
from ..models.shared import (
|
|
24
|
+
ModelSourceResult,
|
|
25
|
+
ModelSourceStatus,
|
|
26
|
+
NodeInfo,
|
|
27
|
+
NodeRemovalResult,
|
|
28
|
+
UpdateResult,
|
|
29
|
+
)
|
|
30
|
+
from ..models.sync import SyncResult
|
|
31
|
+
from ..strategies.confirmation import ConfirmationStrategy
|
|
32
|
+
from ..utils.common import run_command
|
|
33
|
+
from ..utils.pytorch import extract_pip_show_package_version
|
|
34
|
+
from ..validation.resolution_tester import ResolutionTester
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from comfygit_core.core.workspace import Workspace
|
|
38
|
+
from comfygit_core.models.protocols import (
|
|
39
|
+
ExportCallbacks,
|
|
40
|
+
ImportCallbacks,
|
|
41
|
+
ModelResolutionStrategy,
|
|
42
|
+
NodeResolutionStrategy,
|
|
43
|
+
RollbackStrategy,
|
|
44
|
+
SyncCallbacks,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
from ..caching.workflow_cache import WorkflowCacheRepository
|
|
48
|
+
from ..models.workflow import (
|
|
49
|
+
BatchDownloadCallbacks,
|
|
50
|
+
DetailedWorkflowStatus,
|
|
51
|
+
NodeInstallCallbacks,
|
|
52
|
+
ResolutionResult,
|
|
53
|
+
WorkflowSyncStatus,
|
|
54
|
+
)
|
|
55
|
+
from ..services.node_lookup_service import NodeLookupService
|
|
56
|
+
|
|
57
|
+
logger = get_logger(__name__)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class Environment:
|
|
61
|
+
"""A ComfyUI environment - manages its own state through pyproject.toml."""
|
|
62
|
+
|
|
63
|
+
def __init__(
|
|
64
|
+
self,
|
|
65
|
+
name: str,
|
|
66
|
+
path: Path,
|
|
67
|
+
workspace: Workspace,
|
|
68
|
+
torch_backend: str | None = None,
|
|
69
|
+
):
|
|
70
|
+
self.name = name
|
|
71
|
+
self.path = path
|
|
72
|
+
self.workspace = workspace
|
|
73
|
+
self.torch_backend = torch_backend
|
|
74
|
+
|
|
75
|
+
# Workspace-level paths
|
|
76
|
+
self.workspace_paths = workspace.paths
|
|
77
|
+
self.global_models_path = workspace.workspace_config_manager.get_models_directory()
|
|
78
|
+
|
|
79
|
+
# Workspace-level services
|
|
80
|
+
self.model_repository = workspace.model_repository
|
|
81
|
+
self.node_mapping_repository = workspace.node_mapping_repository
|
|
82
|
+
self.workspace_config_manager = workspace.workspace_config_manager
|
|
83
|
+
self.model_downloader = workspace.model_downloader
|
|
84
|
+
|
|
85
|
+
# Core paths
|
|
86
|
+
self.cec_path = path / ".cec"
|
|
87
|
+
self.pyproject_path = self.cec_path / "pyproject.toml"
|
|
88
|
+
self.comfyui_path = path / "ComfyUI"
|
|
89
|
+
self.custom_nodes_path = self.comfyui_path / "custom_nodes"
|
|
90
|
+
self.venv_path = path / ".venv"
|
|
91
|
+
self.models_path = self.comfyui_path / "models"
|
|
92
|
+
|
|
93
|
+
## Cached properties ##
|
|
94
|
+
#
|
|
95
|
+
# Orchestrators coordinate git and model operations with environment state:
|
|
96
|
+
# - git_orchestrator: Wraps git operations with node reconciliation + package sync + workflow restore
|
|
97
|
+
# - model_manager: Coordinates model operations across pyproject, repository, and downloader
|
|
98
|
+
#
|
|
99
|
+
# This pattern keeps environment.py thin by delegating complex multi-step operations.
|
|
100
|
+
|
|
101
|
+
@cached_property
|
|
102
|
+
def uv_manager(self) -> UVProjectManager:
|
|
103
|
+
return create_uv_for_environment(
|
|
104
|
+
self.workspace_paths.root,
|
|
105
|
+
cec_path=self.cec_path,
|
|
106
|
+
venv_path=self.venv_path,
|
|
107
|
+
torch_backend=self.torch_backend,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
@cached_property
|
|
111
|
+
def pyproject(self) -> PyprojectManager:
|
|
112
|
+
return PyprojectManager(self.pyproject_path)
|
|
113
|
+
|
|
114
|
+
@cached_property
|
|
115
|
+
def node_lookup(self) -> NodeLookupService:
|
|
116
|
+
from ..services.node_lookup_service import NodeLookupService
|
|
117
|
+
return NodeLookupService(
|
|
118
|
+
cache_path=self.workspace_paths.cache,
|
|
119
|
+
node_mappings_repository=self.node_mapping_repository,
|
|
120
|
+
workspace_config_repository=self.workspace_config_manager,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
@cached_property
|
|
124
|
+
def resolution_tester(self) -> ResolutionTester:
|
|
125
|
+
return ResolutionTester(self.workspace_paths.root)
|
|
126
|
+
|
|
127
|
+
@cached_property
|
|
128
|
+
def node_manager(self) -> NodeManager:
|
|
129
|
+
return NodeManager(
|
|
130
|
+
self.pyproject,
|
|
131
|
+
self.uv_manager,
|
|
132
|
+
self.node_lookup,
|
|
133
|
+
self.resolution_tester,
|
|
134
|
+
self.custom_nodes_path,
|
|
135
|
+
self.node_mapping_repository
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
@cached_property
|
|
139
|
+
def model_symlink_manager(self) -> ModelSymlinkManager:
|
|
140
|
+
"""Get model symlink manager."""
|
|
141
|
+
return ModelSymlinkManager(
|
|
142
|
+
self.comfyui_path, self.global_models_path
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
@cached_property
|
|
146
|
+
def user_content_manager(self) -> UserContentSymlinkManager:
|
|
147
|
+
"""Get user content symlink manager for input/output directories."""
|
|
148
|
+
return UserContentSymlinkManager(
|
|
149
|
+
self.comfyui_path,
|
|
150
|
+
self.name,
|
|
151
|
+
self.workspace_paths.input,
|
|
152
|
+
self.workspace_paths.output,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
@cached_property
|
|
156
|
+
def workflow_cache(self) -> WorkflowCacheRepository:
|
|
157
|
+
"""Get workflow cache repository."""
|
|
158
|
+
from ..caching.workflow_cache import WorkflowCacheRepository
|
|
159
|
+
cache_db_path = self.workspace_paths.cache / "workflows.db"
|
|
160
|
+
return WorkflowCacheRepository(
|
|
161
|
+
cache_db_path,
|
|
162
|
+
pyproject_manager=self.pyproject,
|
|
163
|
+
model_repository=self.model_repository,
|
|
164
|
+
workspace_config_manager=self.workspace_config_manager
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
@cached_property
|
|
168
|
+
def workflow_manager(self) -> WorkflowManager:
|
|
169
|
+
return WorkflowManager(
|
|
170
|
+
self.comfyui_path,
|
|
171
|
+
self.cec_path,
|
|
172
|
+
self.pyproject,
|
|
173
|
+
self.model_repository,
|
|
174
|
+
self.node_mapping_repository,
|
|
175
|
+
self.model_downloader,
|
|
176
|
+
self.workflow_cache,
|
|
177
|
+
self.name
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
@cached_property
|
|
181
|
+
def git_manager(self) -> GitManager:
|
|
182
|
+
return GitManager(self.cec_path)
|
|
183
|
+
|
|
184
|
+
@cached_property
|
|
185
|
+
def git_orchestrator(self) -> EnvironmentGitOrchestrator:
|
|
186
|
+
"""Get environment-aware git orchestrator."""
|
|
187
|
+
return EnvironmentGitOrchestrator(
|
|
188
|
+
git_manager=self.git_manager,
|
|
189
|
+
node_manager=self.node_manager,
|
|
190
|
+
pyproject_manager=self.pyproject,
|
|
191
|
+
uv_manager=self.uv_manager,
|
|
192
|
+
workflow_manager=self.workflow_manager,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
@cached_property
|
|
196
|
+
def model_manager(self) -> EnvironmentModelManager:
|
|
197
|
+
"""Get environment model manager."""
|
|
198
|
+
return EnvironmentModelManager(
|
|
199
|
+
pyproject=self.pyproject,
|
|
200
|
+
model_repository=self.model_repository,
|
|
201
|
+
model_downloader=self.model_downloader,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
## Helper methods ##
|
|
205
|
+
|
|
206
|
+
## Public methods ##
|
|
207
|
+
|
|
208
|
+
# =====================================================
|
|
209
|
+
# Environment Management
|
|
210
|
+
# =====================================================
|
|
211
|
+
|
|
212
|
+
def status(self) -> EnvironmentStatus:
|
|
213
|
+
"""Get environment sync and git status."""
|
|
214
|
+
# Each subsystem provides its complete status
|
|
215
|
+
scanner = StatusScanner(
|
|
216
|
+
comfyui_path=self.comfyui_path,
|
|
217
|
+
venv_path=self.venv_path,
|
|
218
|
+
uv=self.uv_manager,
|
|
219
|
+
pyproject=self.pyproject
|
|
220
|
+
)
|
|
221
|
+
comparison = scanner.get_full_comparison()
|
|
222
|
+
|
|
223
|
+
git_status = self.git_manager.get_status(self.pyproject)
|
|
224
|
+
|
|
225
|
+
workflow_status = self.workflow_manager.get_workflow_status()
|
|
226
|
+
|
|
227
|
+
# Detect missing models
|
|
228
|
+
missing_models = self.model_manager.detect_missing_models()
|
|
229
|
+
|
|
230
|
+
# Assemble final status
|
|
231
|
+
return EnvironmentStatus.create(
|
|
232
|
+
comparison=comparison,
|
|
233
|
+
git_status=git_status,
|
|
234
|
+
workflow_status=workflow_status,
|
|
235
|
+
missing_models=missing_models
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
def sync(
|
|
239
|
+
self,
|
|
240
|
+
dry_run: bool = False,
|
|
241
|
+
model_strategy: str = "skip",
|
|
242
|
+
model_callbacks: BatchDownloadCallbacks | None = None,
|
|
243
|
+
node_callbacks: NodeInstallCallbacks | None = None,
|
|
244
|
+
remove_extra_nodes: bool = True,
|
|
245
|
+
sync_callbacks: SyncCallbacks | None = None,
|
|
246
|
+
verbose: bool = False
|
|
247
|
+
) -> SyncResult:
|
|
248
|
+
"""Apply changes: sync packages, nodes, workflows, and models with environment.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
dry_run: If True, don't actually apply changes
|
|
252
|
+
model_strategy: Model download strategy - "all", "required", or "skip" (default: skip)
|
|
253
|
+
model_callbacks: Optional callbacks for model download progress
|
|
254
|
+
node_callbacks: Optional callbacks for node installation progress
|
|
255
|
+
remove_extra_nodes: If True, remove extra nodes. If False, only warn (default: True)
|
|
256
|
+
verbose: If True, show uv output in real-time during dependency installation
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
SyncResult with details of what was synced
|
|
260
|
+
|
|
261
|
+
Raises:
|
|
262
|
+
UVCommandError: If sync fails
|
|
263
|
+
"""
|
|
264
|
+
result = SyncResult()
|
|
265
|
+
|
|
266
|
+
logger.info("Syncing environment...")
|
|
267
|
+
|
|
268
|
+
# Sync packages with UV - progressive installation
|
|
269
|
+
try:
|
|
270
|
+
sync_result = self.uv_manager.sync_dependencies_progressive(
|
|
271
|
+
dry_run=dry_run,
|
|
272
|
+
callbacks=sync_callbacks,
|
|
273
|
+
verbose=verbose
|
|
274
|
+
)
|
|
275
|
+
result.packages_synced = sync_result["packages_synced"]
|
|
276
|
+
result.dependency_groups_installed.extend(sync_result["dependency_groups_installed"])
|
|
277
|
+
result.dependency_groups_failed.extend(sync_result["dependency_groups_failed"])
|
|
278
|
+
except Exception as e:
|
|
279
|
+
# Progressive sync handles optional groups gracefully
|
|
280
|
+
# Only base or required groups cause this exception
|
|
281
|
+
logger.error(f"Package sync failed: {e}")
|
|
282
|
+
result.errors.append(f"Package sync failed: {e}")
|
|
283
|
+
result.success = False
|
|
284
|
+
|
|
285
|
+
# Sync custom nodes to filesystem
|
|
286
|
+
try:
|
|
287
|
+
# Pass remove_extra flag (default True for aggressive repair behavior)
|
|
288
|
+
self.node_manager.sync_nodes_to_filesystem(
|
|
289
|
+
remove_extra=remove_extra_nodes and not dry_run,
|
|
290
|
+
callbacks=node_callbacks
|
|
291
|
+
)
|
|
292
|
+
# For now, we just note it happened
|
|
293
|
+
except Exception as e:
|
|
294
|
+
logger.error(f"Node sync failed: {e}")
|
|
295
|
+
result.errors.append(f"Node sync failed: {e}")
|
|
296
|
+
result.success = False
|
|
297
|
+
|
|
298
|
+
# Restore workflows from .cec/ to ComfyUI (for git pull workflow)
|
|
299
|
+
if not dry_run:
|
|
300
|
+
try:
|
|
301
|
+
self.workflow_manager.restore_all_from_cec()
|
|
302
|
+
logger.info("Restored workflows from .cec/")
|
|
303
|
+
except Exception as e:
|
|
304
|
+
logger.warning(f"Failed to restore workflows: {e}")
|
|
305
|
+
result.errors.append(f"Workflow restore failed: {e}")
|
|
306
|
+
# Non-fatal - continue
|
|
307
|
+
|
|
308
|
+
# Handle missing models
|
|
309
|
+
if not dry_run and model_strategy != "skip":
|
|
310
|
+
try:
|
|
311
|
+
# Reuse existing import machinery
|
|
312
|
+
workflows_with_intents = self.model_manager.prepare_import_with_model_strategy(
|
|
313
|
+
strategy=model_strategy
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
if workflows_with_intents:
|
|
317
|
+
logger.info(f"Downloading models for {len(workflows_with_intents)} workflow(s)")
|
|
318
|
+
|
|
319
|
+
# Resolve each workflow (triggers downloads)
|
|
320
|
+
from ..strategies.auto import AutoModelStrategy, AutoNodeStrategy
|
|
321
|
+
|
|
322
|
+
for workflow_name in workflows_with_intents:
|
|
323
|
+
try:
|
|
324
|
+
logger.debug(f"Resolving workflow: {workflow_name}")
|
|
325
|
+
|
|
326
|
+
# Resolve workflow (analyzes and prepares downloads)
|
|
327
|
+
resolution_result = self.resolve_workflow(
|
|
328
|
+
name=workflow_name,
|
|
329
|
+
model_strategy=AutoModelStrategy(),
|
|
330
|
+
node_strategy=AutoNodeStrategy(),
|
|
331
|
+
download_callbacks=model_callbacks
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
# Track downloads from actual download results (not stale ResolvedModel objects)
|
|
335
|
+
# Note: Download results are populated by _execute_pending_downloads() during resolve_workflow()
|
|
336
|
+
for dr in resolution_result.download_results:
|
|
337
|
+
if dr.success:
|
|
338
|
+
result.models_downloaded.append(dr.filename)
|
|
339
|
+
else:
|
|
340
|
+
result.models_failed.append((dr.filename, dr.error or "Download failed"))
|
|
341
|
+
|
|
342
|
+
except Exception as e:
|
|
343
|
+
logger.error(f"Failed to resolve {workflow_name}: {e}", exc_info=True)
|
|
344
|
+
result.errors.append(f"Failed to resolve {workflow_name}: {e}")
|
|
345
|
+
|
|
346
|
+
except Exception as e:
|
|
347
|
+
logger.warning(f"Model download failed: {e}", exc_info=True)
|
|
348
|
+
result.errors.append(f"Model download failed: {e}")
|
|
349
|
+
# Non-fatal - continue
|
|
350
|
+
|
|
351
|
+
# Ensure model symlink exists
|
|
352
|
+
try:
|
|
353
|
+
self.model_symlink_manager.create_symlink()
|
|
354
|
+
result.model_paths_configured = True
|
|
355
|
+
except Exception as e:
|
|
356
|
+
logger.warning(f"Failed to ensure model symlink: {e}")
|
|
357
|
+
result.errors.append(f"Model symlink configuration failed: {e}")
|
|
358
|
+
# Continue anyway - symlink might already exist from environment creation
|
|
359
|
+
|
|
360
|
+
# Auto-migrate existing environments (one-time operation)
|
|
361
|
+
# Check if input/output are real directories with content
|
|
362
|
+
needs_migration = False
|
|
363
|
+
if self.comfyui_path.exists():
|
|
364
|
+
from ..utils.symlink_utils import is_link
|
|
365
|
+
|
|
366
|
+
input_path = self.comfyui_path / "input"
|
|
367
|
+
output_path = self.comfyui_path / "output"
|
|
368
|
+
|
|
369
|
+
if input_path.exists() and not is_link(input_path):
|
|
370
|
+
needs_migration = True
|
|
371
|
+
if output_path.exists() and not is_link(output_path):
|
|
372
|
+
needs_migration = True
|
|
373
|
+
|
|
374
|
+
if needs_migration:
|
|
375
|
+
logger.info("Detected pre-symlink environment, migrating user data...")
|
|
376
|
+
try:
|
|
377
|
+
migration_stats = self.user_content_manager.migrate_existing_data()
|
|
378
|
+
total_moved = (
|
|
379
|
+
migration_stats["input_files_moved"] +
|
|
380
|
+
migration_stats["output_files_moved"]
|
|
381
|
+
)
|
|
382
|
+
if total_moved > 0:
|
|
383
|
+
logger.info(
|
|
384
|
+
f"Migration complete: {total_moved} files moved to workspace-level storage"
|
|
385
|
+
)
|
|
386
|
+
except Exception as e:
|
|
387
|
+
logger.error(f"Migration failed: {e}")
|
|
388
|
+
result.errors.append(f"User data migration failed: {e}")
|
|
389
|
+
# Don't fail sync - user can migrate manually
|
|
390
|
+
|
|
391
|
+
# Ensure user content symlinks exist
|
|
392
|
+
try:
|
|
393
|
+
self.user_content_manager.create_directories()
|
|
394
|
+
self.user_content_manager.create_symlinks()
|
|
395
|
+
logger.debug("User content symlinks configured")
|
|
396
|
+
except Exception as e:
|
|
397
|
+
logger.warning(f"Failed to ensure user content symlinks: {e}")
|
|
398
|
+
result.errors.append(f"User content symlink configuration failed: {e}")
|
|
399
|
+
# Continue anyway - symlinks might already exist
|
|
400
|
+
|
|
401
|
+
# Mark environment as complete after successful sync (repair operation)
|
|
402
|
+
# This ensures environments that lost .complete (e.g., from manual git pull) are visible
|
|
403
|
+
if result.success and not dry_run:
|
|
404
|
+
from ..utils.environment_cleanup import mark_environment_complete
|
|
405
|
+
mark_environment_complete(self.cec_path)
|
|
406
|
+
logger.debug("Marked environment as complete")
|
|
407
|
+
|
|
408
|
+
if result.success:
|
|
409
|
+
logger.info("Successfully synced environment")
|
|
410
|
+
else:
|
|
411
|
+
logger.warning(f"Sync completed with {len(result.errors)} errors")
|
|
412
|
+
|
|
413
|
+
return result
|
|
414
|
+
|
|
415
|
+
def pull_and_repair(
|
|
416
|
+
self,
|
|
417
|
+
remote: str = "origin",
|
|
418
|
+
branch: str | None = None,
|
|
419
|
+
model_strategy: str = "all",
|
|
420
|
+
model_callbacks: BatchDownloadCallbacks | None = None,
|
|
421
|
+
node_callbacks: NodeInstallCallbacks | None = None,
|
|
422
|
+
) -> dict:
|
|
423
|
+
"""Pull from remote and auto-repair environment (atomic operation).
|
|
424
|
+
|
|
425
|
+
If sync fails, git changes are rolled back automatically.
|
|
426
|
+
This ensures the environment is never left in a half-pulled state.
|
|
427
|
+
|
|
428
|
+
Args:
|
|
429
|
+
remote: Remote name (default: origin)
|
|
430
|
+
branch: Branch to pull (default: current)
|
|
431
|
+
model_strategy: Model download strategy ("all", "required", "skip")
|
|
432
|
+
model_callbacks: Optional callbacks for model download progress
|
|
433
|
+
node_callbacks: Optional callbacks for node installation progress
|
|
434
|
+
|
|
435
|
+
Returns:
|
|
436
|
+
Dict with pull results and sync_result
|
|
437
|
+
|
|
438
|
+
Raises:
|
|
439
|
+
CDEnvironmentError: If uncommitted changes exist or sync fails
|
|
440
|
+
ValueError: If merge conflicts
|
|
441
|
+
OSError: If pull or repair fails
|
|
442
|
+
"""
|
|
443
|
+
from ..models.exceptions import CDEnvironmentError
|
|
444
|
+
from ..utils.git import git_reset_hard, git_rev_parse
|
|
445
|
+
|
|
446
|
+
# Check for uncommitted changes
|
|
447
|
+
if self.git_manager.has_uncommitted_changes():
|
|
448
|
+
raise CDEnvironmentError(
|
|
449
|
+
"Cannot pull with uncommitted changes.\n"
|
|
450
|
+
" • Commit: comfygit commit -m 'message'\n"
|
|
451
|
+
" • Discard: comfygit reset --hard"
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
# Capture pre-pull state for atomic rollback
|
|
455
|
+
pre_pull_commit = git_rev_parse(self.cec_path, "HEAD")
|
|
456
|
+
if not pre_pull_commit:
|
|
457
|
+
raise CDEnvironmentError(
|
|
458
|
+
"Cannot determine current commit state.\n"
|
|
459
|
+
"The .cec repository may be corrupted. Try:\n"
|
|
460
|
+
" • Check git status: cd .cec && git status\n"
|
|
461
|
+
" • Repair repository: cd .cec && git fsck"
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
# Pull (fetch + merge)
|
|
466
|
+
logger.info("Pulling from remote...")
|
|
467
|
+
pull_result = self.git_manager.pull(remote, branch)
|
|
468
|
+
|
|
469
|
+
# Auto-repair (restores workflows, installs nodes, downloads models)
|
|
470
|
+
logger.info("Syncing environment after pull...")
|
|
471
|
+
sync_result = self.sync(
|
|
472
|
+
model_strategy=model_strategy,
|
|
473
|
+
model_callbacks=model_callbacks,
|
|
474
|
+
node_callbacks=node_callbacks
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
# Check for sync failures
|
|
478
|
+
if not sync_result.success:
|
|
479
|
+
logger.error("Sync failed - rolling back git changes")
|
|
480
|
+
git_reset_hard(self.cec_path, pre_pull_commit)
|
|
481
|
+
raise CDEnvironmentError(
|
|
482
|
+
"Sync failed after pull. Git changes rolled back.\n"
|
|
483
|
+
f"Errors: {', '.join(sync_result.errors)}"
|
|
484
|
+
)
|
|
485
|
+
|
|
486
|
+
# Return both pull result and sync result for CLI to display
|
|
487
|
+
return {
|
|
488
|
+
**pull_result,
|
|
489
|
+
'sync_result': sync_result
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
except Exception as e:
|
|
493
|
+
# Any failure during sync - rollback git changes
|
|
494
|
+
# (merge conflicts raise before this point, so don't rollback those)
|
|
495
|
+
if "Merge conflict" not in str(e):
|
|
496
|
+
logger.error(f"Pull failed: {e} - rolling back git changes")
|
|
497
|
+
git_reset_hard(self.cec_path, pre_pull_commit)
|
|
498
|
+
raise
|
|
499
|
+
|
|
500
|
+
def push_commits(self, remote: str = "origin", branch: str | None = None, force: bool = False) -> str:
|
|
501
|
+
"""Push commits to remote (requires clean working directory).
|
|
502
|
+
|
|
503
|
+
Args:
|
|
504
|
+
remote: Remote name (default: origin)
|
|
505
|
+
branch: Branch to push (default: current)
|
|
506
|
+
force: Use --force-with-lease for force push (default: False)
|
|
507
|
+
|
|
508
|
+
Returns:
|
|
509
|
+
Push output
|
|
510
|
+
|
|
511
|
+
Raises:
|
|
512
|
+
CDEnvironmentError: If uncommitted changes exist
|
|
513
|
+
ValueError: If no remote or detached HEAD
|
|
514
|
+
OSError: If push fails
|
|
515
|
+
"""
|
|
516
|
+
from ..models.exceptions import CDEnvironmentError
|
|
517
|
+
|
|
518
|
+
# Check for uncommitted git changes (not workflow sync state)
|
|
519
|
+
# Push only cares about git state in .cec/, not whether workflows are synced to ComfyUI
|
|
520
|
+
if self.git_manager.has_uncommitted_changes():
|
|
521
|
+
raise CDEnvironmentError(
|
|
522
|
+
"Cannot push with uncommitted changes.\n"
|
|
523
|
+
" Run: comfygit commit -m 'message' first"
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
# Note: Workflow issue validation happens during commit (execute_commit checks is_commit_safe).
|
|
527
|
+
# By the time we reach push, all committed changes have already been validated.
|
|
528
|
+
# No need to re-check workflow issues here.
|
|
529
|
+
|
|
530
|
+
# Push
|
|
531
|
+
logger.info("Pushing commits to remote...")
|
|
532
|
+
return self.git_manager.push(remote, branch, force=force)
|
|
533
|
+
|
|
534
|
+
def checkout(
|
|
535
|
+
self,
|
|
536
|
+
ref: str,
|
|
537
|
+
strategy: RollbackStrategy | None = None,
|
|
538
|
+
force: bool = False
|
|
539
|
+
) -> None:
|
|
540
|
+
"""Checkout commit/branch without auto-committing.
|
|
541
|
+
|
|
542
|
+
Args:
|
|
543
|
+
ref: Git reference (commit hash, branch, tag)
|
|
544
|
+
strategy: Optional strategy for confirming destructive checkout
|
|
545
|
+
force: If True, discard uncommitted changes without confirmation
|
|
546
|
+
|
|
547
|
+
Raises:
|
|
548
|
+
ValueError: If ref doesn't exist
|
|
549
|
+
CDEnvironmentError: If uncommitted changes exist and no strategy/force
|
|
550
|
+
"""
|
|
551
|
+
self.git_orchestrator.checkout(ref, strategy, force)
|
|
552
|
+
|
|
553
|
+
def reset(
|
|
554
|
+
self,
|
|
555
|
+
ref: str | None = None,
|
|
556
|
+
mode: str = "hard",
|
|
557
|
+
strategy: RollbackStrategy | None = None,
|
|
558
|
+
force: bool = False
|
|
559
|
+
) -> None:
|
|
560
|
+
"""Reset HEAD to ref with git reset semantics.
|
|
561
|
+
|
|
562
|
+
Args:
|
|
563
|
+
ref: Git reference to reset to (None = HEAD)
|
|
564
|
+
mode: Reset mode (hard/mixed/soft)
|
|
565
|
+
strategy: Optional strategy for confirming destructive reset
|
|
566
|
+
force: If True, skip confirmation
|
|
567
|
+
|
|
568
|
+
Raises:
|
|
569
|
+
ValueError: If ref doesn't exist or invalid mode
|
|
570
|
+
CDEnvironmentError: If uncommitted changes exist (hard mode only)
|
|
571
|
+
"""
|
|
572
|
+
self.git_orchestrator.reset(ref, mode, strategy, force)
|
|
573
|
+
|
|
574
|
+
def create_branch(self, name: str, start_point: str = "HEAD") -> None:
|
|
575
|
+
"""Create new branch at start_point.
|
|
576
|
+
|
|
577
|
+
Args:
|
|
578
|
+
name: Branch name
|
|
579
|
+
start_point: Commit to branch from (default: HEAD)
|
|
580
|
+
"""
|
|
581
|
+
self.git_orchestrator.create_branch(name, start_point)
|
|
582
|
+
|
|
583
|
+
def delete_branch(self, name: str, force: bool = False) -> None:
|
|
584
|
+
"""Delete branch.
|
|
585
|
+
|
|
586
|
+
Args:
|
|
587
|
+
name: Branch name
|
|
588
|
+
force: Force delete even if unmerged
|
|
589
|
+
"""
|
|
590
|
+
self.git_orchestrator.delete_branch(name, force)
|
|
591
|
+
|
|
592
|
+
def switch_branch(self, branch: str, create: bool = False) -> None:
|
|
593
|
+
"""Switch to branch and sync environment.
|
|
594
|
+
|
|
595
|
+
Args:
|
|
596
|
+
branch: Branch name
|
|
597
|
+
create: Create branch if it doesn't exist
|
|
598
|
+
|
|
599
|
+
Raises:
|
|
600
|
+
CDEnvironmentError: If uncommitted workflow changes would be overwritten
|
|
601
|
+
"""
|
|
602
|
+
self.git_orchestrator.switch_branch(branch, create)
|
|
603
|
+
|
|
604
|
+
def list_branches(self) -> list[tuple[str, bool]]:
|
|
605
|
+
"""List all branches with current branch marked.
|
|
606
|
+
|
|
607
|
+
Returns:
|
|
608
|
+
List of (branch_name, is_current) tuples
|
|
609
|
+
"""
|
|
610
|
+
return self.git_manager.list_branches()
|
|
611
|
+
|
|
612
|
+
def get_current_branch(self) -> str | None:
|
|
613
|
+
"""Get current branch name.
|
|
614
|
+
|
|
615
|
+
Returns:
|
|
616
|
+
Branch name or None if detached HEAD
|
|
617
|
+
"""
|
|
618
|
+
return self.git_manager.get_current_branch()
|
|
619
|
+
|
|
620
|
+
def merge_branch(self, branch: str, message: str | None = None) -> None:
|
|
621
|
+
"""Merge branch into current branch and sync environment.
|
|
622
|
+
|
|
623
|
+
Args:
|
|
624
|
+
branch: Branch to merge
|
|
625
|
+
message: Custom merge commit message
|
|
626
|
+
"""
|
|
627
|
+
self.git_orchestrator.merge_branch(branch, message)
|
|
628
|
+
|
|
629
|
+
def revert_commit(self, commit: str) -> None:
|
|
630
|
+
"""Revert a commit by creating new commit that undoes it.
|
|
631
|
+
|
|
632
|
+
Args:
|
|
633
|
+
commit: Commit hash to revert
|
|
634
|
+
"""
|
|
635
|
+
self.git_orchestrator.revert_commit(commit)
|
|
636
|
+
|
|
637
|
+
def get_commit_history(self, limit: int = 10) -> list[dict]:
|
|
638
|
+
"""Get commit history for this environment.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
limit: Maximum number of commits to return
|
|
642
|
+
|
|
643
|
+
Returns:
|
|
644
|
+
List of commit dicts with keys: hash, message, date, date_relative
|
|
645
|
+
"""
|
|
646
|
+
return self.git_manager.get_version_history(limit)
|
|
647
|
+
|
|
648
|
+
def sync_model_paths(self) -> dict | None:
|
|
649
|
+
"""Ensure model symlink is configured for this environment.
|
|
650
|
+
|
|
651
|
+
Returns:
|
|
652
|
+
Status dictionary
|
|
653
|
+
"""
|
|
654
|
+
logger.info(f"Configuring model symlink for environment '{self.name}'")
|
|
655
|
+
try:
|
|
656
|
+
self.model_symlink_manager.create_symlink()
|
|
657
|
+
return {
|
|
658
|
+
"status": "linked",
|
|
659
|
+
"target": str(self.global_models_path),
|
|
660
|
+
"link": str(self.models_path)
|
|
661
|
+
}
|
|
662
|
+
except Exception as e:
|
|
663
|
+
logger.error(f"Failed to configure model symlink: {e}")
|
|
664
|
+
raise
|
|
665
|
+
|
|
666
|
+
# TODO wrap subprocess completed process instance
|
|
667
|
+
def run(self, args: list[str] | None = None) -> subprocess.CompletedProcess:
|
|
668
|
+
"""Run ComfyUI in this environment.
|
|
669
|
+
|
|
670
|
+
Args:
|
|
671
|
+
args: Arguments to pass to ComfyUI
|
|
672
|
+
|
|
673
|
+
Returns:
|
|
674
|
+
CompletedProcess
|
|
675
|
+
"""
|
|
676
|
+
python = self.uv_manager.python_executable
|
|
677
|
+
cmd = [str(python), "main.py"] + (args or [])
|
|
678
|
+
|
|
679
|
+
logger.info(f"Starting ComfyUI with: {' '.join(cmd)}")
|
|
680
|
+
return run_command(cmd, cwd=self.comfyui_path, capture_output=False, timeout=None)
|
|
681
|
+
|
|
682
|
+
# =====================================================
|
|
683
|
+
# Node Management
|
|
684
|
+
# =====================================================
|
|
685
|
+
|
|
686
|
+
def list_nodes(self) -> list[NodeInfo]:
|
|
687
|
+
"""List all custom nodes in this environment.
|
|
688
|
+
|
|
689
|
+
Returns:
|
|
690
|
+
List of NodeInfo objects for all installed custom nodes
|
|
691
|
+
"""
|
|
692
|
+
nodes_dict = self.pyproject.nodes.get_existing()
|
|
693
|
+
return list(nodes_dict.values())
|
|
694
|
+
|
|
695
|
+
def add_node(
|
|
696
|
+
self,
|
|
697
|
+
identifier: str,
|
|
698
|
+
is_development: bool = False,
|
|
699
|
+
no_test: bool = False,
|
|
700
|
+
force: bool = False,
|
|
701
|
+
confirmation_strategy: ConfirmationStrategy | None = None
|
|
702
|
+
) -> NodeInfo:
|
|
703
|
+
"""Add a custom node to the environment.
|
|
704
|
+
|
|
705
|
+
Args:
|
|
706
|
+
identifier: Registry ID or GitHub URL (supports @version)
|
|
707
|
+
is_development: Track as development node
|
|
708
|
+
no_test: Skip dependency resolution testing
|
|
709
|
+
force: Force replacement of existing nodes
|
|
710
|
+
confirmation_strategy: Strategy for confirming replacements
|
|
711
|
+
|
|
712
|
+
Raises:
|
|
713
|
+
CDNodeNotFoundError: If node not found
|
|
714
|
+
CDNodeConflictError: If node has dependency conflicts
|
|
715
|
+
CDEnvironmentError: If node with same name already exists
|
|
716
|
+
"""
|
|
717
|
+
return self.node_manager.add_node(identifier, is_development, no_test, force, confirmation_strategy)
|
|
718
|
+
|
|
719
|
+
def install_nodes_with_progress(
|
|
720
|
+
self,
|
|
721
|
+
node_ids: list[str],
|
|
722
|
+
callbacks: NodeInstallCallbacks | None = None
|
|
723
|
+
) -> tuple[int, list[tuple[str, str]]]:
|
|
724
|
+
"""Install multiple nodes with callback support for progress tracking.
|
|
725
|
+
|
|
726
|
+
Args:
|
|
727
|
+
node_ids: List of node identifiers to install
|
|
728
|
+
callbacks: Optional callbacks for progress feedback
|
|
729
|
+
|
|
730
|
+
Returns:
|
|
731
|
+
Tuple of (success_count, failed_nodes)
|
|
732
|
+
where failed_nodes is a list of (node_id, error_message) tuples
|
|
733
|
+
|
|
734
|
+
Raises:
|
|
735
|
+
CDNodeNotFoundError: If a node is not found
|
|
736
|
+
"""
|
|
737
|
+
if callbacks and callbacks.on_batch_start:
|
|
738
|
+
callbacks.on_batch_start(len(node_ids))
|
|
739
|
+
|
|
740
|
+
success_count = 0
|
|
741
|
+
failed = []
|
|
742
|
+
|
|
743
|
+
for idx, node_id in enumerate(node_ids):
|
|
744
|
+
if callbacks and callbacks.on_node_start:
|
|
745
|
+
callbacks.on_node_start(node_id, idx + 1, len(node_ids))
|
|
746
|
+
|
|
747
|
+
try:
|
|
748
|
+
self.add_node(node_id)
|
|
749
|
+
success_count += 1
|
|
750
|
+
if callbacks and callbacks.on_node_complete:
|
|
751
|
+
callbacks.on_node_complete(node_id, True, None)
|
|
752
|
+
except Exception as e:
|
|
753
|
+
failed.append((node_id, str(e)))
|
|
754
|
+
if callbacks and callbacks.on_node_complete:
|
|
755
|
+
callbacks.on_node_complete(node_id, False, str(e))
|
|
756
|
+
|
|
757
|
+
if callbacks and callbacks.on_batch_complete:
|
|
758
|
+
callbacks.on_batch_complete(success_count, len(node_ids))
|
|
759
|
+
|
|
760
|
+
return success_count, failed
|
|
761
|
+
|
|
762
|
+
def remove_node(self, identifier: str) -> NodeRemovalResult:
|
|
763
|
+
"""Remove a custom node.
|
|
764
|
+
|
|
765
|
+
Returns:
|
|
766
|
+
NodeRemovalResult: Details about the removal
|
|
767
|
+
|
|
768
|
+
Raises:
|
|
769
|
+
CDNodeNotFoundError: If node not found
|
|
770
|
+
"""
|
|
771
|
+
return self.node_manager.remove_node(identifier)
|
|
772
|
+
|
|
773
|
+
def remove_nodes_with_progress(
|
|
774
|
+
self,
|
|
775
|
+
node_ids: list[str],
|
|
776
|
+
callbacks: NodeInstallCallbacks | None = None
|
|
777
|
+
) -> tuple[int, list[tuple[str, str]]]:
|
|
778
|
+
"""Remove multiple nodes with callback support for progress tracking.
|
|
779
|
+
|
|
780
|
+
Args:
|
|
781
|
+
node_ids: List of node identifiers to remove
|
|
782
|
+
callbacks: Optional callbacks for progress feedback
|
|
783
|
+
|
|
784
|
+
Returns:
|
|
785
|
+
Tuple of (success_count, failed_nodes)
|
|
786
|
+
where failed_nodes is a list of (node_id, error_message) tuples
|
|
787
|
+
|
|
788
|
+
Raises:
|
|
789
|
+
CDNodeNotFoundError: If a node is not found
|
|
790
|
+
"""
|
|
791
|
+
if callbacks and callbacks.on_batch_start:
|
|
792
|
+
callbacks.on_batch_start(len(node_ids))
|
|
793
|
+
|
|
794
|
+
success_count = 0
|
|
795
|
+
failed = []
|
|
796
|
+
|
|
797
|
+
for idx, node_id in enumerate(node_ids):
|
|
798
|
+
if callbacks and callbacks.on_node_start:
|
|
799
|
+
callbacks.on_node_start(node_id, idx + 1, len(node_ids))
|
|
800
|
+
|
|
801
|
+
try:
|
|
802
|
+
self.remove_node(node_id)
|
|
803
|
+
success_count += 1
|
|
804
|
+
if callbacks and callbacks.on_node_complete:
|
|
805
|
+
callbacks.on_node_complete(node_id, True, None)
|
|
806
|
+
except Exception as e:
|
|
807
|
+
failed.append((node_id, str(e)))
|
|
808
|
+
if callbacks and callbacks.on_node_complete:
|
|
809
|
+
callbacks.on_node_complete(node_id, False, str(e))
|
|
810
|
+
|
|
811
|
+
if callbacks and callbacks.on_batch_complete:
|
|
812
|
+
callbacks.on_batch_complete(success_count, len(node_ids))
|
|
813
|
+
|
|
814
|
+
return success_count, failed
|
|
815
|
+
|
|
816
|
+
def update_node(
|
|
817
|
+
self,
|
|
818
|
+
identifier: str,
|
|
819
|
+
confirmation_strategy: ConfirmationStrategy | None = None,
|
|
820
|
+
no_test: bool = False
|
|
821
|
+
) -> UpdateResult:
|
|
822
|
+
"""Update a node based on its source type.
|
|
823
|
+
|
|
824
|
+
- Development nodes: Re-scan requirements.txt
|
|
825
|
+
- Registry nodes: Update to latest version
|
|
826
|
+
- Git nodes: Update to latest commit
|
|
827
|
+
|
|
828
|
+
Args:
|
|
829
|
+
identifier: Node identifier or name
|
|
830
|
+
confirmation_strategy: Strategy for confirming updates (None = auto-confirm)
|
|
831
|
+
no_test: Skip resolution testing
|
|
832
|
+
|
|
833
|
+
Raises:
|
|
834
|
+
CDNodeNotFoundError: If node not found
|
|
835
|
+
CDEnvironmentError: If node cannot be updated
|
|
836
|
+
"""
|
|
837
|
+
return self.node_manager.update_node(identifier, confirmation_strategy, no_test)
|
|
838
|
+
|
|
839
|
+
def check_development_node_drift(self) -> dict[str, tuple[set[str], set[str]]]:
|
|
840
|
+
"""Check if development nodes have requirements drift.
|
|
841
|
+
|
|
842
|
+
Returns:
|
|
843
|
+
Dict mapping node_name -> (added_deps, removed_deps)
|
|
844
|
+
"""
|
|
845
|
+
return self.node_manager.check_development_node_drift()
|
|
846
|
+
|
|
847
|
+
# =====================================================
|
|
848
|
+
# Workflow Management
|
|
849
|
+
# =====================================================
|
|
850
|
+
|
|
851
|
+
def list_workflows(self) -> WorkflowSyncStatus:
|
|
852
|
+
"""List all workflows categorized by sync status.
|
|
853
|
+
|
|
854
|
+
Returns:
|
|
855
|
+
Dict with 'new', 'modified', 'deleted', and 'synced' workflow names
|
|
856
|
+
"""
|
|
857
|
+
return self.workflow_manager.get_workflow_sync_status()
|
|
858
|
+
|
|
859
|
+
def resolve_workflow(
|
|
860
|
+
self,
|
|
861
|
+
name: str,
|
|
862
|
+
node_strategy: NodeResolutionStrategy | None = None,
|
|
863
|
+
model_strategy: ModelResolutionStrategy | None = None,
|
|
864
|
+
fix: bool = True,
|
|
865
|
+
download_callbacks: BatchDownloadCallbacks | None = None
|
|
866
|
+
) -> ResolutionResult:
|
|
867
|
+
"""Resolve workflow dependencies - orchestrates analysis and resolution.
|
|
868
|
+
|
|
869
|
+
Args:
|
|
870
|
+
name: Workflow name to resolve
|
|
871
|
+
node_strategy: Strategy for resolving missing nodes
|
|
872
|
+
model_strategy: Strategy for resolving ambiguous/missing models
|
|
873
|
+
fix: Attempt to fix unresolved issues with strategies
|
|
874
|
+
download_callbacks: Optional callbacks for batch download progress (CLI provides)
|
|
875
|
+
|
|
876
|
+
Returns:
|
|
877
|
+
ResolutionResult with changes made
|
|
878
|
+
|
|
879
|
+
Raises:
|
|
880
|
+
FileNotFoundError: If workflow not found
|
|
881
|
+
"""
|
|
882
|
+
# Analyze and resolve workflow (both cached for performance)
|
|
883
|
+
_, result = self.workflow_manager.analyze_and_resolve_workflow(name)
|
|
884
|
+
|
|
885
|
+
# Apply auto-resolutions (reconcile with pyproject.toml)
|
|
886
|
+
self.workflow_manager.apply_resolution(result)
|
|
887
|
+
|
|
888
|
+
# Check if there are any unresolved issues
|
|
889
|
+
if result.has_issues and fix:
|
|
890
|
+
# Fix issues with strategies (progressive writes: models AND nodes saved immediately)
|
|
891
|
+
result = self.workflow_manager.fix_resolution(
|
|
892
|
+
result,
|
|
893
|
+
node_strategy,
|
|
894
|
+
model_strategy
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
# Execute pending downloads if any download intents exist
|
|
898
|
+
if result.has_download_intents:
|
|
899
|
+
result.download_results = self.workflow_manager.execute_pending_downloads(result, download_callbacks)
|
|
900
|
+
|
|
901
|
+
# After successful downloads, update workflow JSON with resolved paths
|
|
902
|
+
# Re-resolve to get fresh model data (cached, so minimal cost)
|
|
903
|
+
if result.download_results and any(dr.success for dr in result.download_results):
|
|
904
|
+
_, fresh_result = self.workflow_manager.analyze_and_resolve_workflow(name)
|
|
905
|
+
self.workflow_manager.update_workflow_model_paths(fresh_result)
|
|
906
|
+
|
|
907
|
+
return result
|
|
908
|
+
|
|
909
|
+
def get_uninstalled_nodes(self, workflow_name: str | None = None) -> list[str]:
|
|
910
|
+
"""Get list of node package IDs referenced in workflows but not installed.
|
|
911
|
+
|
|
912
|
+
Compares nodes referenced in workflow sections against installed nodes
|
|
913
|
+
to identify which nodes need installation.
|
|
914
|
+
|
|
915
|
+
Returns:
|
|
916
|
+
List of node package IDs that are referenced in workflows but not installed.
|
|
917
|
+
Empty list if all workflow nodes are already installed.
|
|
918
|
+
|
|
919
|
+
Example:
|
|
920
|
+
>>> env.resolve_workflow("my_workflow")
|
|
921
|
+
>>> missing = env.get_uninstalled_nodes()
|
|
922
|
+
>>> # ['rgthree-comfy', 'comfyui-depthanythingv2', ...]
|
|
923
|
+
"""
|
|
924
|
+
# Get all node IDs referenced in workflows
|
|
925
|
+
workflow_node_ids = set()
|
|
926
|
+
if workflow_name:
|
|
927
|
+
if workflow := self.pyproject.workflows.get_workflow(workflow_name):
|
|
928
|
+
workflows = {workflow_name: workflow}
|
|
929
|
+
else:
|
|
930
|
+
logger.warning(f"Workflow '{workflow_name}' not found")
|
|
931
|
+
return []
|
|
932
|
+
else:
|
|
933
|
+
workflows = self.pyproject.workflows.get_all_with_resolutions()
|
|
934
|
+
|
|
935
|
+
for workflow_data in workflows.values():
|
|
936
|
+
node_list = workflow_data.get('nodes', [])
|
|
937
|
+
workflow_node_ids.update(node_list)
|
|
938
|
+
|
|
939
|
+
logger.debug(f"Workflow node references: {workflow_node_ids}")
|
|
940
|
+
|
|
941
|
+
# Get installed node IDs
|
|
942
|
+
installed_nodes = self.pyproject.nodes.get_existing()
|
|
943
|
+
installed_node_ids = set(installed_nodes.keys())
|
|
944
|
+
logger.debug(f"Installed nodes: {installed_node_ids}")
|
|
945
|
+
|
|
946
|
+
# Find nodes referenced in workflows but not installed
|
|
947
|
+
uninstalled_ids = list(workflow_node_ids - installed_node_ids)
|
|
948
|
+
logger.debug(f"Uninstalled nodes: {uninstalled_ids}")
|
|
949
|
+
|
|
950
|
+
return uninstalled_ids
|
|
951
|
+
|
|
952
|
+
def get_unused_nodes(self, exclude: list[str] | None = None) -> list[NodeInfo]:
|
|
953
|
+
"""Get installed nodes not referenced by any workflow.
|
|
954
|
+
|
|
955
|
+
Uses the same auto-resolution flow as status command to ensure we capture
|
|
956
|
+
all nodes actually needed by workflows, including those from custom_node_map.
|
|
957
|
+
|
|
958
|
+
Args:
|
|
959
|
+
exclude: Optional list of package IDs to exclude from pruning
|
|
960
|
+
|
|
961
|
+
Returns:
|
|
962
|
+
List of NodeInfo for unused nodes that can be safely removed
|
|
963
|
+
|
|
964
|
+
Example:
|
|
965
|
+
>>> unused = env.get_unused_nodes()
|
|
966
|
+
>>> # [NodeInfo(registry_id='old-node'), ...]
|
|
967
|
+
>>> # Or with exclusions:
|
|
968
|
+
>>> unused = env.get_unused_nodes(exclude=['keep-this-node'])
|
|
969
|
+
"""
|
|
970
|
+
# Get workflow status (triggers auto-resolution with caching)
|
|
971
|
+
workflow_status = self.workflow_manager.get_workflow_status()
|
|
972
|
+
|
|
973
|
+
# Aggregate packages from all workflows
|
|
974
|
+
all_needed_packages = set()
|
|
975
|
+
for workflow_analysis in workflow_status.analyzed_workflows:
|
|
976
|
+
for resolved_node in workflow_analysis.resolution.nodes_resolved:
|
|
977
|
+
# Only count non-optional nodes with actual package IDs
|
|
978
|
+
if resolved_node.package_id and not resolved_node.is_optional:
|
|
979
|
+
all_needed_packages.add(resolved_node.package_id)
|
|
980
|
+
|
|
981
|
+
logger.debug(f"Packages needed by workflows: {all_needed_packages}")
|
|
982
|
+
|
|
983
|
+
# Get installed nodes
|
|
984
|
+
installed_nodes = self.pyproject.nodes.get_existing()
|
|
985
|
+
installed_node_ids = set(installed_nodes.keys())
|
|
986
|
+
logger.debug(f"Installed nodes: {installed_node_ids}")
|
|
987
|
+
|
|
988
|
+
# Calculate unused = installed - needed
|
|
989
|
+
unused_ids = installed_node_ids - all_needed_packages
|
|
990
|
+
|
|
991
|
+
# Apply exclusions
|
|
992
|
+
if exclude:
|
|
993
|
+
unused_ids -= set(exclude)
|
|
994
|
+
logger.debug(f"After exclusions: {unused_ids}")
|
|
995
|
+
|
|
996
|
+
return [installed_nodes[nid] for nid in unused_ids]
|
|
997
|
+
|
|
998
|
+
def prune_unused_nodes(
|
|
999
|
+
self,
|
|
1000
|
+
exclude: list[str] | None = None,
|
|
1001
|
+
callbacks: NodeInstallCallbacks | None = None
|
|
1002
|
+
) -> tuple[int, list[tuple[str, str]]]:
|
|
1003
|
+
"""Remove unused nodes from environment.
|
|
1004
|
+
|
|
1005
|
+
Args:
|
|
1006
|
+
exclude: Package IDs to keep even if unused
|
|
1007
|
+
callbacks: Progress callbacks
|
|
1008
|
+
|
|
1009
|
+
Returns:
|
|
1010
|
+
Tuple of (success_count, failed_removals)
|
|
1011
|
+
"""
|
|
1012
|
+
unused = self.get_unused_nodes(exclude=exclude)
|
|
1013
|
+
|
|
1014
|
+
if not unused:
|
|
1015
|
+
return (0, [])
|
|
1016
|
+
|
|
1017
|
+
# Use existing batch removal
|
|
1018
|
+
node_ids = [node.registry_id or node.name for node in unused]
|
|
1019
|
+
return self.remove_nodes_with_progress(node_ids, callbacks)
|
|
1020
|
+
|
|
1021
|
+
def has_committable_changes(self) -> bool:
|
|
1022
|
+
"""Check if there are any committable changes (workflows OR git).
|
|
1023
|
+
|
|
1024
|
+
This is the clean API for determining if a commit is possible.
|
|
1025
|
+
Checks both workflow file sync status AND git uncommitted changes.
|
|
1026
|
+
|
|
1027
|
+
Returns:
|
|
1028
|
+
True if there are committable changes, False otherwise
|
|
1029
|
+
"""
|
|
1030
|
+
# Check workflow file changes (new/modified/deleted workflows)
|
|
1031
|
+
workflow_status = self.workflow_manager.get_workflow_status()
|
|
1032
|
+
has_workflow_changes = workflow_status.sync_status.has_changes
|
|
1033
|
+
|
|
1034
|
+
# Check git uncommitted changes (pyproject.toml, uv.lock, etc.)
|
|
1035
|
+
has_git_changes = self.git_manager.has_uncommitted_changes()
|
|
1036
|
+
|
|
1037
|
+
return has_workflow_changes or has_git_changes
|
|
1038
|
+
|
|
1039
|
+
def commit(self, message: str | None = None) -> None:
|
|
1040
|
+
"""Commit changes to git repository.
|
|
1041
|
+
|
|
1042
|
+
Args:
|
|
1043
|
+
message: Optional commit message
|
|
1044
|
+
|
|
1045
|
+
Raises:
|
|
1046
|
+
OSError: If git commands fail
|
|
1047
|
+
"""
|
|
1048
|
+
return self.git_manager.commit_all(message)
|
|
1049
|
+
|
|
1050
|
+
def execute_commit(
|
|
1051
|
+
self,
|
|
1052
|
+
workflow_status: DetailedWorkflowStatus | None = None,
|
|
1053
|
+
message: str | None = None,
|
|
1054
|
+
allow_issues: bool = False,
|
|
1055
|
+
) -> None:
|
|
1056
|
+
"""Execute commit using cached or provided analysis.
|
|
1057
|
+
|
|
1058
|
+
Args:
|
|
1059
|
+
message: Optional commit message
|
|
1060
|
+
allow_issues: Allow committing even with unresolved issues
|
|
1061
|
+
"""
|
|
1062
|
+
# Use provided analysis or prepare a new one
|
|
1063
|
+
if not workflow_status:
|
|
1064
|
+
workflow_status = self.workflow_manager.get_workflow_status()
|
|
1065
|
+
|
|
1066
|
+
# Check if there are any changes to commit (workflows OR git)
|
|
1067
|
+
has_workflow_changes = workflow_status.sync_status.has_changes
|
|
1068
|
+
has_git_changes = self.git_manager.has_uncommitted_changes()
|
|
1069
|
+
|
|
1070
|
+
if not has_workflow_changes and not has_git_changes:
|
|
1071
|
+
logger.error("No changes to commit")
|
|
1072
|
+
return
|
|
1073
|
+
|
|
1074
|
+
# Check if changes are safe to commit (no unresolved issues)
|
|
1075
|
+
if not workflow_status.is_commit_safe and not allow_issues:
|
|
1076
|
+
logger.error("Cannot commit with unresolved issues. Use --allow-issues to force.")
|
|
1077
|
+
return
|
|
1078
|
+
|
|
1079
|
+
# Apply auto-resolutions to pyproject.toml for workflows with changes
|
|
1080
|
+
# BATCHED MODE: Load config once, pass through all operations, save once
|
|
1081
|
+
logger.info("Committing all changes...")
|
|
1082
|
+
config = self.pyproject.load()
|
|
1083
|
+
|
|
1084
|
+
for wf_analysis in workflow_status.analyzed_workflows:
|
|
1085
|
+
if wf_analysis.sync_state in ("new", "modified"):
|
|
1086
|
+
# Apply resolution results to pyproject (in-memory mutations)
|
|
1087
|
+
self.workflow_manager.apply_resolution(wf_analysis.resolution, config=config)
|
|
1088
|
+
|
|
1089
|
+
# Clean up deleted workflows from pyproject.toml
|
|
1090
|
+
if workflow_status.sync_status.deleted:
|
|
1091
|
+
logger.info("Cleaning up deleted workflows from pyproject.toml...")
|
|
1092
|
+
removed_count = self.pyproject.workflows.remove_workflows(
|
|
1093
|
+
workflow_status.sync_status.deleted,
|
|
1094
|
+
config=config
|
|
1095
|
+
)
|
|
1096
|
+
logger.debug(f"Removed {removed_count} workflow section(s)")
|
|
1097
|
+
|
|
1098
|
+
# Clean up orphaned models (must run AFTER workflow sections are removed)
|
|
1099
|
+
self.pyproject.models.cleanup_orphans(config=config)
|
|
1100
|
+
|
|
1101
|
+
# Save all changes at once
|
|
1102
|
+
self.pyproject.save(config)
|
|
1103
|
+
|
|
1104
|
+
logger.info("Copying workflows from ComfyUI to .cec...")
|
|
1105
|
+
copy_results = self.workflow_manager.copy_all_workflows()
|
|
1106
|
+
copied_count = len([r for r in copy_results.values() if r and r != "deleted"])
|
|
1107
|
+
logger.debug(f"Copied {copied_count} workflow(s)")
|
|
1108
|
+
|
|
1109
|
+
self.commit(message)
|
|
1110
|
+
|
|
1111
|
+
# =====================================================
|
|
1112
|
+
# Model Source Management
|
|
1113
|
+
# =====================================================
|
|
1114
|
+
|
|
1115
|
+
def add_model_source(self, identifier: str, url: str) -> ModelSourceResult:
|
|
1116
|
+
"""Add a download source URL to a model.
|
|
1117
|
+
|
|
1118
|
+
Args:
|
|
1119
|
+
identifier: Model hash or filename
|
|
1120
|
+
url: Download URL for the model
|
|
1121
|
+
|
|
1122
|
+
Returns:
|
|
1123
|
+
ModelSourceResult with success status and model details
|
|
1124
|
+
"""
|
|
1125
|
+
return self.model_manager.add_model_source(identifier, url)
|
|
1126
|
+
|
|
1127
|
+
def get_models_without_sources(self) -> list[ModelSourceStatus]:
|
|
1128
|
+
"""Get all models in pyproject that don't have download sources.
|
|
1129
|
+
|
|
1130
|
+
Returns:
|
|
1131
|
+
List of ModelSourceStatus objects with model and local availability
|
|
1132
|
+
"""
|
|
1133
|
+
return self.model_manager.get_models_without_sources()
|
|
1134
|
+
|
|
1135
|
+
# =====================================================
|
|
1136
|
+
# Constraint Management
|
|
1137
|
+
# =====================================================
|
|
1138
|
+
|
|
1139
|
+
def add_constraint(self, package: str) -> None:
|
|
1140
|
+
"""Add a constraint dependency."""
|
|
1141
|
+
self.pyproject.uv_config.add_constraint(package)
|
|
1142
|
+
|
|
1143
|
+
def remove_constraint(self, package: str) -> bool:
|
|
1144
|
+
"""Remove a constraint dependency."""
|
|
1145
|
+
return self.pyproject.uv_config.remove_constraint(package)
|
|
1146
|
+
|
|
1147
|
+
def list_constraints(self) -> list[str]:
|
|
1148
|
+
"""List constraint dependencies."""
|
|
1149
|
+
return self.pyproject.uv_config.get_constraints()
|
|
1150
|
+
|
|
1151
|
+
# ===== Python Dependency Management =====
|
|
1152
|
+
|
|
1153
|
+
def add_dependencies(
|
|
1154
|
+
self,
|
|
1155
|
+
packages: list[str] | None = None,
|
|
1156
|
+
requirements_file: Path | None = None,
|
|
1157
|
+
upgrade: bool = False,
|
|
1158
|
+
group: str | None = None,
|
|
1159
|
+
dev: bool = False,
|
|
1160
|
+
editable: bool = False,
|
|
1161
|
+
bounds: str | None = None
|
|
1162
|
+
) -> str:
|
|
1163
|
+
"""Add Python dependencies to the environment.
|
|
1164
|
+
|
|
1165
|
+
Uses uv add to add packages to [project.dependencies] and install them.
|
|
1166
|
+
|
|
1167
|
+
Args:
|
|
1168
|
+
packages: List of package specifications (e.g., ['requests>=2.0.0', 'pillow'])
|
|
1169
|
+
requirements_file: Path to requirements.txt file to add packages from
|
|
1170
|
+
upgrade: Whether to upgrade existing packages
|
|
1171
|
+
group: Dependency group name (e.g., 'optional-cuda')
|
|
1172
|
+
dev: Add to dev dependencies
|
|
1173
|
+
editable: Install as editable (for local development)
|
|
1174
|
+
bounds: Version specifier style ('lower', 'major', 'minor', 'exact')
|
|
1175
|
+
|
|
1176
|
+
Returns:
|
|
1177
|
+
UV command output
|
|
1178
|
+
|
|
1179
|
+
Raises:
|
|
1180
|
+
UVCommandError: If uv add fails
|
|
1181
|
+
ValueError: If neither packages nor requirements_file is provided
|
|
1182
|
+
"""
|
|
1183
|
+
if not packages and not requirements_file:
|
|
1184
|
+
raise ValueError("Either packages or requirements_file must be provided")
|
|
1185
|
+
|
|
1186
|
+
return self.uv_manager.add_dependency(
|
|
1187
|
+
packages=packages,
|
|
1188
|
+
requirements_file=requirements_file,
|
|
1189
|
+
upgrade=upgrade,
|
|
1190
|
+
group=group,
|
|
1191
|
+
dev=dev,
|
|
1192
|
+
editable=editable,
|
|
1193
|
+
bounds=bounds
|
|
1194
|
+
)
|
|
1195
|
+
|
|
1196
|
+
def remove_dependencies(self, packages: list[str]) -> dict:
|
|
1197
|
+
"""Remove Python dependencies from the environment.
|
|
1198
|
+
|
|
1199
|
+
Uses uv remove to remove packages from [project.dependencies] and uninstall them.
|
|
1200
|
+
Safely handles packages that don't exist in dependencies.
|
|
1201
|
+
|
|
1202
|
+
Args:
|
|
1203
|
+
packages: List of package names to remove
|
|
1204
|
+
|
|
1205
|
+
Returns:
|
|
1206
|
+
Dict with 'removed' (list of packages removed) and 'skipped' (list of packages not in deps)
|
|
1207
|
+
|
|
1208
|
+
Raises:
|
|
1209
|
+
UVCommandError: If uv remove fails for existing packages
|
|
1210
|
+
"""
|
|
1211
|
+
return self.uv_manager.remove_dependency(packages=packages)
|
|
1212
|
+
|
|
1213
|
+
def list_dependencies(self, all: bool = False) -> dict[str, list[str]]:
|
|
1214
|
+
"""List project dependencies.
|
|
1215
|
+
|
|
1216
|
+
Args:
|
|
1217
|
+
all: If True, include all dependency groups. If False, only base dependencies.
|
|
1218
|
+
|
|
1219
|
+
Returns:
|
|
1220
|
+
Dictionary mapping group name to list of dependencies.
|
|
1221
|
+
Base dependencies are always under "dependencies" key and appear first.
|
|
1222
|
+
"""
|
|
1223
|
+
config = self.pyproject.load()
|
|
1224
|
+
base_deps = config.get('project', {}).get('dependencies', [])
|
|
1225
|
+
|
|
1226
|
+
result = {"dependencies": base_deps}
|
|
1227
|
+
|
|
1228
|
+
if all:
|
|
1229
|
+
dep_groups = self.pyproject.dependencies.get_groups()
|
|
1230
|
+
result.update(dep_groups)
|
|
1231
|
+
|
|
1232
|
+
return result
|
|
1233
|
+
|
|
1234
|
+
# =====================================================
|
|
1235
|
+
# Export/Import
|
|
1236
|
+
# =====================================================
|
|
1237
|
+
|
|
1238
|
+
def export_environment(
|
|
1239
|
+
self,
|
|
1240
|
+
output_path: Path,
|
|
1241
|
+
callbacks: ExportCallbacks | None = None
|
|
1242
|
+
) -> Path:
|
|
1243
|
+
"""Export environment as .tar.gz bundle.
|
|
1244
|
+
|
|
1245
|
+
Args:
|
|
1246
|
+
output_path: Path for output tarball
|
|
1247
|
+
callbacks: Optional callbacks for warnings/progress
|
|
1248
|
+
|
|
1249
|
+
Returns:
|
|
1250
|
+
Path to created tarball
|
|
1251
|
+
|
|
1252
|
+
Raises:
|
|
1253
|
+
CDExportError: If environment has uncommitted changes or unresolved issues
|
|
1254
|
+
"""
|
|
1255
|
+
from ..managers.export_import_manager import ExportImportManager
|
|
1256
|
+
from ..models.exceptions import CDExportError, ExportErrorContext
|
|
1257
|
+
|
|
1258
|
+
# Validation: Get workflow status first for comprehensive checks
|
|
1259
|
+
status = self.workflow_manager.get_workflow_status()
|
|
1260
|
+
|
|
1261
|
+
# Check for uncommitted workflow changes (new, modified, or deleted)
|
|
1262
|
+
if status.sync_status.has_changes:
|
|
1263
|
+
context = ExportErrorContext(
|
|
1264
|
+
uncommitted_workflows=(
|
|
1265
|
+
status.sync_status.new +
|
|
1266
|
+
status.sync_status.modified +
|
|
1267
|
+
status.sync_status.deleted
|
|
1268
|
+
)
|
|
1269
|
+
)
|
|
1270
|
+
raise CDExportError(
|
|
1271
|
+
"Cannot export with uncommitted workflow changes",
|
|
1272
|
+
context=context
|
|
1273
|
+
)
|
|
1274
|
+
|
|
1275
|
+
# Validation: Check for uncommitted git changes in .cec/
|
|
1276
|
+
if self.git_manager.has_uncommitted_changes():
|
|
1277
|
+
context = ExportErrorContext(uncommitted_git_changes=True)
|
|
1278
|
+
raise CDExportError(
|
|
1279
|
+
"Cannot export with uncommitted git changes",
|
|
1280
|
+
context=context
|
|
1281
|
+
)
|
|
1282
|
+
|
|
1283
|
+
# Validation: Check all workflows are resolved
|
|
1284
|
+
if not status.is_commit_safe:
|
|
1285
|
+
context = ExportErrorContext(has_unresolved_issues=True)
|
|
1286
|
+
raise CDExportError(
|
|
1287
|
+
"Cannot export - workflows have unresolved issues",
|
|
1288
|
+
context=context
|
|
1289
|
+
)
|
|
1290
|
+
|
|
1291
|
+
# Check for models without sources and collect workflow usage
|
|
1292
|
+
from ..models.shared import ModelWithoutSourceInfo
|
|
1293
|
+
|
|
1294
|
+
models_without_sources: list[ModelWithoutSourceInfo] = []
|
|
1295
|
+
models_by_hash = {m.hash: m for m in self.pyproject.models.get_all() if not m.sources}
|
|
1296
|
+
|
|
1297
|
+
if models_by_hash:
|
|
1298
|
+
# Map models to workflows that use them
|
|
1299
|
+
all_workflows = self.pyproject.workflows.get_all_with_resolutions()
|
|
1300
|
+
for workflow_name in all_workflows.keys():
|
|
1301
|
+
workflow_models = self.pyproject.workflows.get_workflow_models(workflow_name)
|
|
1302
|
+
for wf_model in workflow_models:
|
|
1303
|
+
if wf_model.hash and wf_model.hash in models_by_hash:
|
|
1304
|
+
# Find or create entry for this model
|
|
1305
|
+
existing = next((m for m in models_without_sources if m.hash == wf_model.hash), None)
|
|
1306
|
+
if existing:
|
|
1307
|
+
existing.workflows.append(workflow_name)
|
|
1308
|
+
else:
|
|
1309
|
+
model_data = models_by_hash[wf_model.hash]
|
|
1310
|
+
models_without_sources.append(
|
|
1311
|
+
ModelWithoutSourceInfo(
|
|
1312
|
+
filename=model_data.filename,
|
|
1313
|
+
hash=wf_model.hash,
|
|
1314
|
+
workflows=[workflow_name]
|
|
1315
|
+
)
|
|
1316
|
+
)
|
|
1317
|
+
|
|
1318
|
+
# Notify callback with structured data
|
|
1319
|
+
if callbacks:
|
|
1320
|
+
callbacks.on_models_without_sources(models_without_sources)
|
|
1321
|
+
|
|
1322
|
+
# Create export
|
|
1323
|
+
manager = ExportImportManager(self.cec_path, self.comfyui_path)
|
|
1324
|
+
return manager.create_export(output_path, self.pyproject)
|
|
1325
|
+
|
|
1326
|
+
def finalize_import(
|
|
1327
|
+
self,
|
|
1328
|
+
model_strategy: str = "all",
|
|
1329
|
+
callbacks: ImportCallbacks | None = None
|
|
1330
|
+
) -> None:
|
|
1331
|
+
"""Complete import setup after .cec extraction.
|
|
1332
|
+
|
|
1333
|
+
Assumes .cec directory is already populated (from tarball or git).
|
|
1334
|
+
|
|
1335
|
+
Phases:
|
|
1336
|
+
1. Clone/restore ComfyUI from cache and configure PyTorch
|
|
1337
|
+
2. Initialize git repository
|
|
1338
|
+
3. Copy workflows to ComfyUI user directory
|
|
1339
|
+
4. Sync dependencies, custom nodes, and workflows (via sync())
|
|
1340
|
+
5. Prepare and resolve models based on strategy
|
|
1341
|
+
|
|
1342
|
+
Args:
|
|
1343
|
+
model_strategy: "all", "required", or "skip"
|
|
1344
|
+
callbacks: Optional progress callbacks
|
|
1345
|
+
|
|
1346
|
+
Raises:
|
|
1347
|
+
ValueError: If ComfyUI already exists or .cec not properly initialized
|
|
1348
|
+
"""
|
|
1349
|
+
from ..caching.comfyui_cache import ComfyUICacheManager, ComfyUISpec
|
|
1350
|
+
from ..utils.comfyui_ops import clone_comfyui
|
|
1351
|
+
from ..utils.git import git_rev_parse
|
|
1352
|
+
|
|
1353
|
+
logger.info(f"Finalizing import for environment: {self.name}")
|
|
1354
|
+
|
|
1355
|
+
# Verify environment state
|
|
1356
|
+
if self.comfyui_path.exists():
|
|
1357
|
+
raise ValueError("Environment already has ComfyUI - cannot finalize import")
|
|
1358
|
+
|
|
1359
|
+
# Phase 1: Clone or restore ComfyUI from cache
|
|
1360
|
+
comfyui_cache = ComfyUICacheManager(cache_base_path=self.workspace_paths.cache)
|
|
1361
|
+
|
|
1362
|
+
# Read ComfyUI version from pyproject.toml
|
|
1363
|
+
comfyui_version = None
|
|
1364
|
+
comfyui_version_type = None
|
|
1365
|
+
try:
|
|
1366
|
+
pyproject_data = self.pyproject.load()
|
|
1367
|
+
comfygit_config = pyproject_data.get("tool", {}).get("comfygit", {})
|
|
1368
|
+
comfyui_version = comfygit_config.get("comfyui_version")
|
|
1369
|
+
comfyui_version_type = comfygit_config.get("comfyui_version_type")
|
|
1370
|
+
except Exception as e:
|
|
1371
|
+
logger.warning(f"Could not read comfyui_version from pyproject.toml: {e}")
|
|
1372
|
+
|
|
1373
|
+
if comfyui_version:
|
|
1374
|
+
version_desc = f"{comfyui_version_type} {comfyui_version}" if comfyui_version_type else comfyui_version
|
|
1375
|
+
logger.debug(f"Using comfyui_version from pyproject: {version_desc}")
|
|
1376
|
+
|
|
1377
|
+
# Auto-detect version type if not specified
|
|
1378
|
+
if not comfyui_version_type and comfyui_version:
|
|
1379
|
+
if comfyui_version.startswith('v'):
|
|
1380
|
+
comfyui_version_type = "release"
|
|
1381
|
+
elif comfyui_version in ("main", "master"):
|
|
1382
|
+
comfyui_version_type = "branch"
|
|
1383
|
+
else:
|
|
1384
|
+
comfyui_version_type = "commit"
|
|
1385
|
+
logger.debug(f"Auto-detected version type: {comfyui_version_type}")
|
|
1386
|
+
|
|
1387
|
+
# Create version spec
|
|
1388
|
+
spec = ComfyUISpec(
|
|
1389
|
+
version=comfyui_version or "main",
|
|
1390
|
+
version_type=comfyui_version_type or "branch",
|
|
1391
|
+
commit_sha=None
|
|
1392
|
+
)
|
|
1393
|
+
|
|
1394
|
+
# Check cache first
|
|
1395
|
+
cached_path = comfyui_cache.get_cached_comfyui(spec)
|
|
1396
|
+
|
|
1397
|
+
if cached_path:
|
|
1398
|
+
if callbacks:
|
|
1399
|
+
callbacks.on_phase("restore_comfyui", f"Restoring ComfyUI {spec.version} from cache...")
|
|
1400
|
+
logger.info(f"Restoring ComfyUI {spec.version} from cache")
|
|
1401
|
+
shutil.copytree(cached_path, self.comfyui_path)
|
|
1402
|
+
else:
|
|
1403
|
+
if callbacks:
|
|
1404
|
+
callbacks.on_phase("clone_comfyui", f"Cloning ComfyUI {spec.version}...")
|
|
1405
|
+
logger.info(f"Cloning ComfyUI {spec.version}")
|
|
1406
|
+
clone_comfyui(self.comfyui_path, comfyui_version)
|
|
1407
|
+
|
|
1408
|
+
# Cache the fresh clone
|
|
1409
|
+
commit_sha = git_rev_parse(self.comfyui_path, "HEAD")
|
|
1410
|
+
if commit_sha:
|
|
1411
|
+
spec.commit_sha = commit_sha
|
|
1412
|
+
comfyui_cache.cache_comfyui(spec, self.comfyui_path)
|
|
1413
|
+
logger.info(f"Cached ComfyUI {spec.version} ({commit_sha[:7]})")
|
|
1414
|
+
else:
|
|
1415
|
+
logger.warning(f"Could not determine commit SHA for ComfyUI {spec.version}")
|
|
1416
|
+
|
|
1417
|
+
# Remove ComfyUI's default models directory (will be replaced with symlink)
|
|
1418
|
+
models_dir = self.comfyui_path / "models"
|
|
1419
|
+
if models_dir.exists() and not models_dir.is_symlink():
|
|
1420
|
+
shutil.rmtree(models_dir)
|
|
1421
|
+
|
|
1422
|
+
# Remove ComfyUI's default input/output directories (will be replaced with symlinks)
|
|
1423
|
+
from ..utils.symlink_utils import is_link
|
|
1424
|
+
|
|
1425
|
+
input_dir = self.comfyui_path / "input"
|
|
1426
|
+
if input_dir.exists() and not is_link(input_dir):
|
|
1427
|
+
shutil.rmtree(input_dir)
|
|
1428
|
+
logger.debug("Removed ComfyUI's default input directory during import")
|
|
1429
|
+
|
|
1430
|
+
output_dir = self.comfyui_path / "output"
|
|
1431
|
+
if output_dir.exists() and not is_link(output_dir):
|
|
1432
|
+
shutil.rmtree(output_dir)
|
|
1433
|
+
logger.debug("Removed ComfyUI's default output directory during import")
|
|
1434
|
+
|
|
1435
|
+
# Phase 1.5: Create venv and optionally install PyTorch with specific backend
|
|
1436
|
+
# Read Python version from .python-version file
|
|
1437
|
+
python_version_file = self.cec_path / ".python-version"
|
|
1438
|
+
python_version = python_version_file.read_text(encoding='utf-8').strip() if python_version_file.exists() else None
|
|
1439
|
+
|
|
1440
|
+
if self.torch_backend:
|
|
1441
|
+
if callbacks:
|
|
1442
|
+
callbacks.on_phase("configure_pytorch", f"Configuring PyTorch backend: {self.torch_backend}")
|
|
1443
|
+
|
|
1444
|
+
# Strip imported PyTorch config BEFORE venv creation to avoid platform conflicts
|
|
1445
|
+
from ..constants import PYTORCH_CORE_PACKAGES
|
|
1446
|
+
|
|
1447
|
+
logger.info("Stripping imported PyTorch configuration...")
|
|
1448
|
+
config = self.pyproject.load()
|
|
1449
|
+
if "tool" in config and "uv" in config["tool"]:
|
|
1450
|
+
# Remove PyTorch indexes
|
|
1451
|
+
indexes = config["tool"]["uv"].get("index", [])
|
|
1452
|
+
if isinstance(indexes, list):
|
|
1453
|
+
config["tool"]["uv"]["index"] = [
|
|
1454
|
+
idx for idx in indexes
|
|
1455
|
+
if not any(p in idx.get("name", "").lower() for p in ["pytorch-", "torch-"])
|
|
1456
|
+
]
|
|
1457
|
+
|
|
1458
|
+
# Remove PyTorch sources
|
|
1459
|
+
sources = config.get("tool", {}).get("uv", {}).get("sources", {})
|
|
1460
|
+
for pkg in PYTORCH_CORE_PACKAGES:
|
|
1461
|
+
sources.pop(pkg, None)
|
|
1462
|
+
|
|
1463
|
+
self.pyproject.save(config)
|
|
1464
|
+
|
|
1465
|
+
# Remove PyTorch constraints
|
|
1466
|
+
for pkg in PYTORCH_CORE_PACKAGES:
|
|
1467
|
+
self.pyproject.uv_config.remove_constraint(pkg)
|
|
1468
|
+
|
|
1469
|
+
logger.info(f"Creating venv with Python {python_version}")
|
|
1470
|
+
self.uv_manager.create_venv(self.venv_path, python_version=python_version, seed=True)
|
|
1471
|
+
|
|
1472
|
+
logger.info(f"Installing PyTorch with backend: {self.torch_backend}")
|
|
1473
|
+
self.uv_manager.install_packages(
|
|
1474
|
+
packages=["torch", "torchvision", "torchaudio"],
|
|
1475
|
+
python=self.uv_manager.python_executable,
|
|
1476
|
+
torch_backend=self.torch_backend,
|
|
1477
|
+
verbose=True
|
|
1478
|
+
)
|
|
1479
|
+
|
|
1480
|
+
# Detect installed backend and configure pyproject
|
|
1481
|
+
from ..utils.pytorch import extract_backend_from_version, get_pytorch_index_url
|
|
1482
|
+
|
|
1483
|
+
first_version = extract_pip_show_package_version(
|
|
1484
|
+
self.uv_manager.show_package("torch", self.uv_manager.python_executable)
|
|
1485
|
+
)
|
|
1486
|
+
|
|
1487
|
+
if first_version:
|
|
1488
|
+
backend = extract_backend_from_version(first_version)
|
|
1489
|
+
logger.info(f"Detected PyTorch backend from installed version: {backend}")
|
|
1490
|
+
|
|
1491
|
+
if backend:
|
|
1492
|
+
# Add new index for detected backend
|
|
1493
|
+
index_name = f"pytorch-{backend}"
|
|
1494
|
+
self.pyproject.uv_config.add_index(
|
|
1495
|
+
name=index_name,
|
|
1496
|
+
url=get_pytorch_index_url(backend),
|
|
1497
|
+
explicit=True
|
|
1498
|
+
)
|
|
1499
|
+
|
|
1500
|
+
# Add sources pointing to new index
|
|
1501
|
+
for pkg in PYTORCH_CORE_PACKAGES:
|
|
1502
|
+
self.pyproject.uv_config.add_source(pkg, {"index": index_name})
|
|
1503
|
+
|
|
1504
|
+
logger.info(f"Configured PyTorch index: {index_name}")
|
|
1505
|
+
|
|
1506
|
+
# Add constraints for installed versions
|
|
1507
|
+
for pkg in PYTORCH_CORE_PACKAGES:
|
|
1508
|
+
version = extract_pip_show_package_version(
|
|
1509
|
+
self.uv_manager.show_package(pkg, self.uv_manager.python_executable)
|
|
1510
|
+
)
|
|
1511
|
+
if version:
|
|
1512
|
+
self.pyproject.uv_config.add_constraint(f"{pkg}=={version}")
|
|
1513
|
+
logger.info(f"Added constraint: {pkg}=={version}")
|
|
1514
|
+
|
|
1515
|
+
# Phase 2: Setup git repository
|
|
1516
|
+
# For git imports: .git already exists with remote, just ensure gitignore
|
|
1517
|
+
# For tarball imports: .git doesn't exist, initialize fresh repo
|
|
1518
|
+
git_existed = (self.cec_path / ".git").exists()
|
|
1519
|
+
|
|
1520
|
+
if callbacks:
|
|
1521
|
+
phase_msg = "Ensuring git configuration..." if git_existed else "Initializing git repository..."
|
|
1522
|
+
callbacks.on_phase("init_git", phase_msg)
|
|
1523
|
+
|
|
1524
|
+
if git_existed:
|
|
1525
|
+
# Git import case: preserve existing repo, just ensure gitignore
|
|
1526
|
+
logger.info("Git repository already exists (imported from git), preserving remote and history")
|
|
1527
|
+
self.git_manager._create_gitignore()
|
|
1528
|
+
self.git_manager.ensure_git_identity()
|
|
1529
|
+
else:
|
|
1530
|
+
# Tarball import case: initialize fresh repo
|
|
1531
|
+
logger.info("Initializing new git repository")
|
|
1532
|
+
self.git_manager.initialize_environment_repo("Imported environment")
|
|
1533
|
+
|
|
1534
|
+
# Phase 3: Copy workflows
|
|
1535
|
+
if callbacks:
|
|
1536
|
+
callbacks.on_phase("copy_workflows", "Setting up workflows...")
|
|
1537
|
+
|
|
1538
|
+
workflows_src = self.cec_path / "workflows"
|
|
1539
|
+
workflows_dst = self.comfyui_path / "user" / "default" / "workflows"
|
|
1540
|
+
workflows_dst.mkdir(parents=True, exist_ok=True)
|
|
1541
|
+
|
|
1542
|
+
if workflows_src.exists():
|
|
1543
|
+
for workflow_file in workflows_src.glob("*.json"):
|
|
1544
|
+
shutil.copy2(workflow_file, workflows_dst / workflow_file.name)
|
|
1545
|
+
if callbacks:
|
|
1546
|
+
callbacks.on_workflow_copied(workflow_file.name)
|
|
1547
|
+
|
|
1548
|
+
# Phase 4: Sync dependencies, custom nodes, and workflows
|
|
1549
|
+
# This single sync() call handles all dependency installation, node syncing, and workflow restoration
|
|
1550
|
+
if callbacks:
|
|
1551
|
+
callbacks.on_phase("sync_environment", "Syncing dependencies and custom nodes...")
|
|
1552
|
+
|
|
1553
|
+
try:
|
|
1554
|
+
# During import, don't remove ComfyUI builtins (fresh clone has example files)
|
|
1555
|
+
# Enable verbose to show real-time uv output during dependency installation
|
|
1556
|
+
sync_result = self.sync(remove_extra_nodes=False, sync_callbacks=callbacks, verbose=True)
|
|
1557
|
+
if sync_result.success and sync_result.nodes_installed and callbacks:
|
|
1558
|
+
for node_name in sync_result.nodes_installed:
|
|
1559
|
+
callbacks.on_node_installed(node_name)
|
|
1560
|
+
elif not sync_result.success and callbacks:
|
|
1561
|
+
for error in sync_result.errors:
|
|
1562
|
+
callbacks.on_error(f"Node sync: {error}")
|
|
1563
|
+
except Exception as e:
|
|
1564
|
+
if callbacks:
|
|
1565
|
+
callbacks.on_error(f"Node sync failed: {e}")
|
|
1566
|
+
|
|
1567
|
+
# Phase 5: Prepare and resolve models
|
|
1568
|
+
if callbacks:
|
|
1569
|
+
callbacks.on_phase("resolve_models", f"Resolving workflows ({model_strategy} strategy)...")
|
|
1570
|
+
|
|
1571
|
+
# Always prepare models to copy sources from global table, even for "skip"
|
|
1572
|
+
# This ensures download intents are preserved for later resolution
|
|
1573
|
+
workflows_with_intents = self.model_manager.prepare_import_with_model_strategy(model_strategy)
|
|
1574
|
+
|
|
1575
|
+
# Only auto-resolve if not "skip" strategy
|
|
1576
|
+
workflows_to_resolve = [] if model_strategy == "skip" else workflows_with_intents
|
|
1577
|
+
|
|
1578
|
+
# Resolve workflows with download intents
|
|
1579
|
+
from ..models.workflow import BatchDownloadCallbacks
|
|
1580
|
+
from ..strategies.auto import AutoModelStrategy, AutoNodeStrategy
|
|
1581
|
+
|
|
1582
|
+
download_failures = []
|
|
1583
|
+
|
|
1584
|
+
# Create download callbacks adapter if import callbacks provided
|
|
1585
|
+
download_callbacks = None
|
|
1586
|
+
if callbacks:
|
|
1587
|
+
download_callbacks = BatchDownloadCallbacks(
|
|
1588
|
+
on_batch_start=callbacks.on_download_batch_start,
|
|
1589
|
+
on_file_start=callbacks.on_download_file_start,
|
|
1590
|
+
on_file_progress=callbacks.on_download_file_progress,
|
|
1591
|
+
on_file_complete=callbacks.on_download_file_complete,
|
|
1592
|
+
on_batch_complete=callbacks.on_download_batch_complete
|
|
1593
|
+
)
|
|
1594
|
+
|
|
1595
|
+
for workflow_name in workflows_to_resolve:
|
|
1596
|
+
try:
|
|
1597
|
+
result = self.resolve_workflow(
|
|
1598
|
+
name=workflow_name,
|
|
1599
|
+
model_strategy=AutoModelStrategy(),
|
|
1600
|
+
node_strategy=AutoNodeStrategy(),
|
|
1601
|
+
download_callbacks=download_callbacks
|
|
1602
|
+
)
|
|
1603
|
+
|
|
1604
|
+
# Track successful vs failed downloads from actual download results
|
|
1605
|
+
successful_downloads = sum(1 for dr in result.download_results if dr.success)
|
|
1606
|
+
failed_downloads = [
|
|
1607
|
+
(workflow_name, dr.filename)
|
|
1608
|
+
for dr in result.download_results
|
|
1609
|
+
if not dr.success
|
|
1610
|
+
]
|
|
1611
|
+
|
|
1612
|
+
download_failures.extend(failed_downloads)
|
|
1613
|
+
|
|
1614
|
+
if callbacks:
|
|
1615
|
+
callbacks.on_workflow_resolved(workflow_name, successful_downloads)
|
|
1616
|
+
|
|
1617
|
+
except Exception as e:
|
|
1618
|
+
if callbacks:
|
|
1619
|
+
callbacks.on_error(f"Failed to resolve {workflow_name}: {e}")
|
|
1620
|
+
|
|
1621
|
+
# Report download failures
|
|
1622
|
+
if download_failures and callbacks:
|
|
1623
|
+
callbacks.on_download_failures(download_failures)
|
|
1624
|
+
|
|
1625
|
+
# Mark environment as fully initialized
|
|
1626
|
+
from ..utils.environment_cleanup import mark_environment_complete
|
|
1627
|
+
mark_environment_complete(self.cec_path)
|
|
1628
|
+
|
|
1629
|
+
# Phase 7: Commit all changes from import process
|
|
1630
|
+
# This captures: workflows copied, nodes synced, models resolved, pyproject updates
|
|
1631
|
+
if self.git_manager.has_uncommitted_changes():
|
|
1632
|
+
self.git_manager.commit_with_identity("Imported environment", add_all=True)
|
|
1633
|
+
logger.info("Committed import changes")
|
|
1634
|
+
|
|
1635
|
+
logger.info("Import finalization completed successfully")
|