comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,1944 @@
1
+ """Auto workflow tracking - all workflows in ComfyUI are automatically managed."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import shutil
7
+ from pathlib import Path
8
+ from typing import TYPE_CHECKING
9
+
10
+ from comfygit_core.models.shared import ModelWithLocation
11
+ from comfygit_core.repositories.node_mappings_repository import NodeMappingsRepository
12
+ from comfygit_core.resolvers.global_node_resolver import GlobalNodeResolver
13
+
14
+ from ..analyzers.workflow_dependency_parser import WorkflowDependencyParser
15
+ from ..logging.logging_config import get_logger
16
+ from ..models.protocols import ModelResolutionStrategy, NodeResolutionStrategy
17
+ from ..models.workflow import (
18
+ DetailedWorkflowStatus,
19
+ ModelResolutionContext,
20
+ NodeResolutionContext,
21
+ ResolutionResult,
22
+ ResolvedModel,
23
+ ScoredMatch,
24
+ Workflow,
25
+ WorkflowAnalysisStatus,
26
+ WorkflowNode,
27
+ WorkflowNodeWidgetRef,
28
+ WorkflowSyncStatus,
29
+ )
30
+ from ..repositories.workflow_repository import WorkflowRepository
31
+ from ..resolvers.model_resolver import ModelResolver
32
+ from ..services.model_downloader import ModelDownloader
33
+ from ..utils.git import is_git_url
34
+ from ..utils.workflow_hash import normalize_workflow
35
+
36
+ if TYPE_CHECKING:
37
+ from ..caching.workflow_cache import WorkflowCacheRepository
38
+ from ..models.workflow import ResolvedNodePackage, WorkflowDependencies
39
+ from ..repositories.model_repository import ModelRepository
40
+ from .pyproject_manager import PyprojectManager
41
+
42
+ logger = get_logger(__name__)
43
+
44
+ CATEGORY_CRITICALITY_DEFAULTS = {
45
+ "checkpoints": "flexible",
46
+ "vae": "flexible",
47
+ "text_encoders": "flexible",
48
+ "loras": "flexible",
49
+ "controlnet": "required",
50
+ "clip_vision": "required",
51
+ "style_models": "flexible",
52
+ "embeddings": "flexible",
53
+ "upscale_models": "flexible",
54
+ }
55
+
56
+
57
+ class WorkflowManager:
58
+ """Manages all workflows automatically - no explicit tracking needed."""
59
+
60
+ def __init__(
61
+ self,
62
+ comfyui_path: Path,
63
+ cec_path: Path,
64
+ pyproject: PyprojectManager,
65
+ model_repository: ModelRepository,
66
+ node_mapping_repository: NodeMappingsRepository,
67
+ model_downloader: ModelDownloader,
68
+ workflow_cache: WorkflowCacheRepository,
69
+ environment_name: str
70
+ ):
71
+ self.comfyui_path = comfyui_path
72
+ self.cec_path = cec_path
73
+ self.pyproject = pyproject
74
+ self.model_repository = model_repository
75
+ self.node_mapping_repository = node_mapping_repository
76
+ self.workflow_cache = workflow_cache
77
+ self.environment_name = environment_name
78
+
79
+ self.comfyui_workflows = comfyui_path / "user" / "default" / "workflows"
80
+ self.cec_workflows = cec_path / "workflows"
81
+
82
+ # Ensure directories exist
83
+ self.comfyui_workflows.mkdir(parents=True, exist_ok=True)
84
+ self.cec_workflows.mkdir(parents=True, exist_ok=True)
85
+
86
+ # Create repository and inject into resolver
87
+ self.global_node_resolver = GlobalNodeResolver(self.node_mapping_repository)
88
+ self.model_resolver = ModelResolver(model_repository=self.model_repository)
89
+
90
+ # Use injected model downloader from workspace
91
+ self.downloader = model_downloader
92
+
93
+ def _normalize_package_id(self, package_id: str) -> str:
94
+ """Normalize GitHub URLs to registry IDs if they exist in the registry.
95
+
96
+ This prevents duplicate entries when users manually enter GitHub URLs
97
+ for packages that exist in the registry.
98
+
99
+ Args:
100
+ package_id: Package ID (registry ID or GitHub URL)
101
+
102
+ Returns:
103
+ Normalized package ID (registry ID if URL matches, otherwise unchanged)
104
+ """
105
+ # Check if it's a GitHub URL
106
+ if is_git_url(package_id):
107
+ # Try to resolve to registry package
108
+ if registry_pkg := self.global_node_resolver.resolve_github_url(package_id):
109
+ return registry_pkg.id
110
+
111
+ # Return as-is if not a GitHub URL or not in registry
112
+ return package_id
113
+
114
+
115
+ def _write_single_model_resolution(
116
+ self,
117
+ workflow_name: str,
118
+ resolved: ResolvedModel
119
+ ) -> None:
120
+ """Write a single model resolution immediately (progressive mode).
121
+
122
+ Builds ManifestWorkflowModel from resolved model and writes to both:
123
+ 1. Global models table (if resolved)
124
+ 2. Workflow models list (unified)
125
+
126
+ Supports download intents (status=unresolved, sources=[URL], relative_path=path).
127
+
128
+ Args:
129
+ workflow_name: Workflow being resolved
130
+ resolved: ResolvedModel with reference + resolved model + flags
131
+ """
132
+ from comfygit_core.models.manifest import ManifestModel, ManifestWorkflowModel
133
+
134
+ model_ref = resolved.reference
135
+ model = resolved.resolved_model
136
+
137
+ # Determine category and criticality
138
+ category = self._get_category_for_node_ref(model_ref)
139
+
140
+ # Override criticality if marked optional
141
+ if resolved.is_optional:
142
+ criticality = "optional"
143
+ else:
144
+ criticality = self._get_default_criticality(category)
145
+
146
+ # NEW: Handle download intent case
147
+ if resolved.match_type == "download_intent":
148
+ manifest_model = ManifestWorkflowModel(
149
+ filename=model_ref.widget_value,
150
+ category=category,
151
+ criticality=criticality,
152
+ status="unresolved", # No hash yet
153
+ nodes=[model_ref],
154
+ sources=[resolved.model_source] if resolved.model_source else [], # URL
155
+ relative_path=str(resolved.target_path) if resolved.target_path else None # Target path
156
+ )
157
+ self.pyproject.workflows.add_workflow_model(workflow_name, manifest_model)
158
+
159
+ # Invalidate cache so download intent is detected on next resolution
160
+ self.workflow_cache.invalidate(
161
+ env_name=self.environment_name,
162
+ workflow_name=workflow_name
163
+ )
164
+
165
+ return
166
+
167
+ # Build manifest model
168
+ if model is None:
169
+ # Model without hash - always unresolved (even if optional)
170
+ # Optional means "workflow works without it", not "resolved"
171
+ manifest_model = ManifestWorkflowModel(
172
+ filename=model_ref.widget_value,
173
+ category=category,
174
+ criticality=criticality,
175
+ status="unresolved",
176
+ nodes=[model_ref],
177
+ sources=[]
178
+ )
179
+ else:
180
+ # Resolved model - fetch sources from repository
181
+ sources = []
182
+ if model.hash:
183
+ sources_from_repo = self.model_repository.get_sources(model.hash)
184
+ sources = [s['url'] for s in sources_from_repo]
185
+
186
+ manifest_model = ManifestWorkflowModel(
187
+ hash=model.hash,
188
+ filename=model.filename,
189
+ category=category,
190
+ criticality=criticality,
191
+ status="resolved",
192
+ nodes=[model_ref],
193
+ sources=sources
194
+ )
195
+
196
+ # Add to global table with sources
197
+ global_model = ManifestModel(
198
+ hash=model.hash,
199
+ filename=model.filename,
200
+ size=model.file_size,
201
+ relative_path=model.relative_path,
202
+ category=category,
203
+ sources=sources
204
+ )
205
+ self.pyproject.models.add_model(global_model)
206
+
207
+ # Progressive write to workflow
208
+ self.pyproject.workflows.add_workflow_model(workflow_name, manifest_model)
209
+
210
+ # NOTE: Workflow JSON path update moved to batch operation at end of fix_resolution()
211
+ # Progressive JSON updates fail when cache has stale node IDs (node lookup mismatch)
212
+ # Batch update is more efficient and ensures consistent node IDs within same parse session
213
+
214
+ def _write_model_resolution_grouped(
215
+ self,
216
+ workflow_name: str,
217
+ resolved: ResolvedModel,
218
+ all_refs: list[WorkflowNodeWidgetRef]
219
+ ) -> None:
220
+ """Write model resolution for multiple node references (deduplicated).
221
+
222
+ This is the deduplication-aware version of _write_single_model_resolution().
223
+ When the same model appears in multiple nodes, all refs are written together
224
+ in a single ManifestWorkflowModel entry.
225
+
226
+ Args:
227
+ workflow_name: Workflow being resolved
228
+ resolved: ResolvedModel with resolution result
229
+ all_refs: ALL node references for this model (deduplicated group)
230
+ """
231
+ from comfygit_core.models.manifest import ManifestModel, ManifestWorkflowModel
232
+
233
+ # Use primary ref for category determination
234
+ primary_ref = resolved.reference
235
+ model = resolved.resolved_model
236
+
237
+ # Determine category and criticality
238
+ category = self._get_category_for_node_ref(primary_ref)
239
+
240
+ # Override criticality if marked optional
241
+ if resolved.is_optional:
242
+ criticality = "optional"
243
+ else:
244
+ criticality = self._get_default_criticality(category)
245
+
246
+ # Handle download intent case
247
+ if resolved.match_type == "download_intent":
248
+ manifest_model = ManifestWorkflowModel(
249
+ filename=primary_ref.widget_value,
250
+ category=category,
251
+ criticality=criticality,
252
+ status="unresolved",
253
+ nodes=all_refs, # ALL REFS!
254
+ sources=[resolved.model_source] if resolved.model_source else [],
255
+ relative_path=str(resolved.target_path) if resolved.target_path else None
256
+ )
257
+ self.pyproject.workflows.add_workflow_model(workflow_name, manifest_model)
258
+
259
+ # Invalidate cache
260
+ self.workflow_cache.invalidate(
261
+ env_name=self.environment_name,
262
+ workflow_name=workflow_name
263
+ )
264
+ return
265
+
266
+ # Build manifest model
267
+ if model is None:
268
+ # Model without hash - unresolved
269
+ manifest_model = ManifestWorkflowModel(
270
+ filename=primary_ref.widget_value,
271
+ category=category,
272
+ criticality=criticality,
273
+ status="unresolved",
274
+ nodes=all_refs, # ALL REFS!
275
+ sources=[]
276
+ )
277
+ else:
278
+ # Resolved model - fetch sources from repository
279
+ sources = []
280
+ if model.hash:
281
+ sources_from_repo = self.model_repository.get_sources(model.hash)
282
+ sources = [s['url'] for s in sources_from_repo]
283
+
284
+ manifest_model = ManifestWorkflowModel(
285
+ hash=model.hash,
286
+ filename=model.filename,
287
+ category=category,
288
+ criticality=criticality,
289
+ status="resolved",
290
+ nodes=all_refs, # ALL REFS!
291
+ sources=sources
292
+ )
293
+
294
+ # Add to global table with sources
295
+ global_model = ManifestModel(
296
+ hash=model.hash,
297
+ filename=model.filename,
298
+ size=model.file_size,
299
+ relative_path=model.relative_path,
300
+ category=category,
301
+ sources=sources
302
+ )
303
+ self.pyproject.models.add_model(global_model)
304
+
305
+ # Progressive write to workflow
306
+ self.pyproject.workflows.add_workflow_model(workflow_name, manifest_model)
307
+
308
+ # Log grouped write
309
+ if len(all_refs) > 1:
310
+ node_ids = ", ".join(f"#{ref.node_id}" for ref in all_refs)
311
+ logger.debug(f"Wrote grouped model resolution for nodes: {node_ids}")
312
+
313
+ def _update_single_workflow_node_path(
314
+ self,
315
+ workflow_name: str,
316
+ model_ref: WorkflowNodeWidgetRef,
317
+ model: ModelWithLocation
318
+ ) -> None:
319
+ """Update a single node's widget value in workflow JSON.
320
+
321
+ Args:
322
+ workflow_name: Workflow name
323
+ model_ref: Node widget reference
324
+ model: Resolved model with path
325
+ """
326
+ workflow_path = self.comfyui_workflows / f"{workflow_name}.json"
327
+ if not workflow_path.exists():
328
+ return
329
+
330
+ workflow = WorkflowRepository.load(workflow_path)
331
+
332
+ if model_ref.node_id in workflow.nodes:
333
+ node = workflow.nodes[model_ref.node_id]
334
+ if model_ref.widget_index < len(node.widgets_values):
335
+ display_path = self._strip_base_directory_for_node(
336
+ model_ref.node_type,
337
+ model.relative_path
338
+ )
339
+ node.widgets_values[model_ref.widget_index] = display_path
340
+ WorkflowRepository.save(workflow, workflow_path)
341
+
342
+ # Invalidate cache since workflow content changed
343
+ self.workflow_cache.invalidate(
344
+ env_name=self.environment_name,
345
+ workflow_name=workflow_name
346
+ )
347
+
348
+ logger.debug(f"Updated workflow JSON node {model_ref.node_id}")
349
+
350
+ def _write_single_node_resolution(
351
+ self,
352
+ workflow_name: str,
353
+ node_package_id: str
354
+ ) -> None:
355
+ """Write a single node resolution immediately (progressive mode).
356
+
357
+ Updates workflow.nodes section in pyproject.toml for ONE node.
358
+ This enables Ctrl+C safety and auto-resume.
359
+
360
+ Args:
361
+ workflow_name: Workflow being resolved
362
+ node_package_id: Package ID to add to workflow.nodes
363
+ """
364
+ # Get existing workflow node packages from pyproject
365
+ workflows_config = self.pyproject.workflows.get_all_with_resolutions()
366
+ workflow_config = workflows_config.get(workflow_name, {})
367
+ existing_nodes = set(workflow_config.get('nodes', []))
368
+
369
+ # Add new package (set handles deduplication)
370
+ existing_nodes.add(node_package_id)
371
+
372
+ # Write back to pyproject
373
+ self.pyproject.workflows.set_node_packs(workflow_name, existing_nodes)
374
+ logger.debug(f"Added {node_package_id} to workflow '{workflow_name}' nodes")
375
+
376
+ def get_workflow_path(self, name: str) -> Path:
377
+ """Check if workflow exists in ComfyUI directory and return path.
378
+
379
+ Args:
380
+ name: Workflow name
381
+
382
+ Returns:
383
+ Path to workflow file if it exists
384
+
385
+ Raises:
386
+ FileNotFoundError
387
+ """
388
+ workflow_path = self.comfyui_workflows / f"{name}.json"
389
+ if workflow_path.exists():
390
+ return workflow_path
391
+ else:
392
+ raise FileNotFoundError(f"Workflow '{name}' not found in ComfyUI directory")
393
+
394
+ def get_workflow_sync_status(self) -> WorkflowSyncStatus:
395
+ """Get file-level sync status between ComfyUI and .cec.
396
+
397
+ Returns:
398
+ WorkflowSyncStatus with categorized workflow lists
399
+ """
400
+ # Get all workflows from ComfyUI
401
+ comfyui_workflows = set()
402
+ if self.comfyui_workflows.exists():
403
+ for workflow_file in self.comfyui_workflows.glob("*.json"):
404
+ comfyui_workflows.add(workflow_file.stem)
405
+
406
+ # Get all workflows from .cec
407
+ cec_workflows = set()
408
+ if self.cec_workflows.exists():
409
+ for workflow_file in self.cec_workflows.glob("*.json"):
410
+ cec_workflows.add(workflow_file.stem)
411
+
412
+ # Categorize workflows
413
+ new_workflows = []
414
+ modified_workflows = []
415
+ deleted_workflows = []
416
+ synced_workflows = []
417
+
418
+ # Check each ComfyUI workflow
419
+ for name in comfyui_workflows:
420
+ if name not in cec_workflows:
421
+ new_workflows.append(name)
422
+ else:
423
+ # Compare contents to detect modifications
424
+ if self._workflows_differ(name):
425
+ modified_workflows.append(name)
426
+ else:
427
+ synced_workflows.append(name)
428
+
429
+ # Check for deleted workflows (in .cec but not ComfyUI)
430
+ for name in cec_workflows:
431
+ if name not in comfyui_workflows:
432
+ deleted_workflows.append(name)
433
+
434
+ return WorkflowSyncStatus(
435
+ new=sorted(new_workflows),
436
+ modified=sorted(modified_workflows),
437
+ deleted=sorted(deleted_workflows),
438
+ synced=sorted(synced_workflows),
439
+ )
440
+
441
+ def _workflows_differ(self, name: str) -> bool:
442
+ """Check if workflow differs between ComfyUI and .cec.
443
+
444
+ Args:
445
+ name: Workflow name
446
+
447
+ Returns:
448
+ True if workflows differ or .cec copy doesn't exist
449
+ """
450
+ # TODO: This will fail if workflow is in a subdirectory in ComfyUI
451
+ comfyui_file = self.comfyui_workflows / f"{name}.json"
452
+ cec_file = self.cec_workflows / f"{name}.json"
453
+
454
+ if not cec_file.exists():
455
+ return True
456
+
457
+ if not comfyui_file.exists():
458
+ return False
459
+
460
+ try:
461
+ # Compare file contents, ignoring volatile metadata fields
462
+ with open(comfyui_file, encoding='utf-8') as f:
463
+ comfyui_content = json.load(f)
464
+ with open(cec_file, encoding='utf-8') as f:
465
+ cec_content = json.load(f)
466
+
467
+ # Normalize by removing volatile fields that change between saves
468
+ comfyui_normalized = normalize_workflow(comfyui_content)
469
+ cec_normalized = normalize_workflow(cec_content)
470
+
471
+ return comfyui_normalized != cec_normalized
472
+ except (OSError, json.JSONDecodeError) as e:
473
+ logger.warning(f"Error comparing workflows '{name}': {e}")
474
+ return True
475
+
476
+ def copy_all_workflows(self) -> dict[str, Path | None]:
477
+ """Copy ALL workflows from ComfyUI to .cec for commit.
478
+
479
+ Returns:
480
+ Dictionary of workflow names to Path
481
+ """
482
+ results = {}
483
+
484
+ if not self.comfyui_workflows.exists():
485
+ logger.info("No ComfyUI workflows directory found")
486
+ return results
487
+
488
+ # Copy every workflow from ComfyUI to .cec
489
+ for workflow_file in self.comfyui_workflows.glob("*.json"):
490
+ name = workflow_file.stem
491
+ source = self.comfyui_workflows / f"{name}.json"
492
+ dest = self.cec_workflows / f"{name}.json"
493
+
494
+ # Check if workflow was actually modified (not just UI changes)
495
+ was_modified = self._workflows_differ(name)
496
+
497
+ try:
498
+ shutil.copy2(source, dest)
499
+ results[name] = dest
500
+ logger.debug(f"Copied workflow '{name}' to .cec")
501
+
502
+ # Invalidate cache for truly modified workflows
503
+ if was_modified:
504
+ self.workflow_cache.invalidate(
505
+ env_name=self.environment_name,
506
+ workflow_name=name
507
+ )
508
+ logger.debug(f"Invalidated cache for modified workflow '{name}'")
509
+
510
+ except Exception as e:
511
+ results[name] = None
512
+ logger.error(f"Failed to copy workflow '{name}': {e}")
513
+
514
+ # Remove workflows from .cec that no longer exist in ComfyUI
515
+ if self.cec_workflows.exists():
516
+ comfyui_names = {f.stem for f in self.comfyui_workflows.glob("*.json")}
517
+ for cec_file in self.cec_workflows.glob("*.json"):
518
+ name = cec_file.stem
519
+ if name not in comfyui_names:
520
+ try:
521
+ cec_file.unlink()
522
+ results[name] = "deleted"
523
+
524
+ # Invalidate cache for deleted workflows
525
+ self.workflow_cache.invalidate(
526
+ env_name=self.environment_name,
527
+ workflow_name=name
528
+ )
529
+ logger.debug(
530
+ f"Deleted workflow '{name}' from .cec (no longer in ComfyUI)"
531
+ )
532
+ except Exception as e:
533
+ logger.error(f"Failed to delete workflow '{name}': {e}")
534
+
535
+ return results
536
+
537
+ def restore_from_cec(self, name: str) -> bool:
538
+ """Restore a workflow from .cec to ComfyUI directory.
539
+
540
+ Args:
541
+ name: Workflow name
542
+
543
+ Returns:
544
+ True if successful, False if workflow not found
545
+ """
546
+ source = self.cec_workflows / f"{name}.json"
547
+ dest = self.comfyui_workflows / f"{name}.json"
548
+
549
+ if not source.exists():
550
+ return False
551
+
552
+ try:
553
+ shutil.copy2(source, dest)
554
+ logger.info(f"Restored workflow '{name}' to ComfyUI")
555
+ return True
556
+ except Exception as e:
557
+ logger.error(f"Failed to restore workflow '{name}': {e}")
558
+ return False
559
+
560
+ def restore_all_from_cec(self, preserve_uncommitted: bool = False) -> dict[str, str]:
561
+ """Restore all workflows from .cec to ComfyUI.
562
+
563
+ Args:
564
+ preserve_uncommitted: If True, don't delete workflows not in .cec.
565
+ This enables git-like behavior where uncommitted
566
+ changes are preserved during branch switches.
567
+ If False, force ComfyUI to match .cec exactly
568
+ (current behavior for rollback operations).
569
+
570
+ Returns:
571
+ Dictionary of workflow names to restore status
572
+ """
573
+ results = {}
574
+
575
+ # Phase 1: Restore workflows that exist in .cec
576
+ if self.cec_workflows.exists():
577
+ # Copy every workflow from .cec to ComfyUI
578
+ for workflow_file in self.cec_workflows.glob("*.json"):
579
+ name = workflow_file.stem
580
+ if self.restore_from_cec(name):
581
+ results[name] = "restored"
582
+ else:
583
+ results[name] = "failed"
584
+
585
+ # Phase 2: Cleanup (ALWAYS run, even if .cec/workflows/ doesn't exist!)
586
+ # This ensures git semantics: switching to branch without workflows deletes them
587
+ if not preserve_uncommitted and self.comfyui_workflows.exists():
588
+ # Determine what workflows SHOULD exist
589
+ if self.cec_workflows.exists():
590
+ cec_names = {f.stem for f in self.cec_workflows.glob("*.json")}
591
+ else:
592
+ # No .cec/workflows/ directory = no workflows should exist
593
+ # This happens when switching to branches that never had workflows committed
594
+ cec_names = set()
595
+
596
+ # Remove workflows that shouldn't exist
597
+ for comfyui_file in self.comfyui_workflows.glob("*.json"):
598
+ name = comfyui_file.stem
599
+ if name not in cec_names:
600
+ try:
601
+ comfyui_file.unlink()
602
+ results[name] = "removed"
603
+ logger.debug(
604
+ f"Removed workflow '{name}' from ComfyUI (not in .cec)"
605
+ )
606
+ except Exception as e:
607
+ logger.error(f"Failed to remove workflow '{name}': {e}")
608
+
609
+ return results
610
+
611
+ def analyze_single_workflow_status(
612
+ self,
613
+ name: str,
614
+ sync_state: str,
615
+ workflows_config: dict | None = None,
616
+ installed_nodes: set[str] | None = None
617
+ ) -> WorkflowAnalysisStatus:
618
+ """Analyze a single workflow for dependencies and resolution status.
619
+
620
+ This is read-only - no side effects, no copying, just analysis.
621
+
622
+ Args:
623
+ name: Workflow name
624
+ sync_state: Sync state ("new", "modified", "deleted", "synced")
625
+ workflows_config: Pre-loaded workflows config (avoids re-reading pyproject)
626
+ installed_nodes: Pre-loaded set of installed node IDs (avoids re-reading pyproject)
627
+
628
+ Returns:
629
+ WorkflowAnalysisStatus with complete dependency and resolution info
630
+ """
631
+ # Phase 1 & 2: Analyze and resolve (both cached!)
632
+ dependencies, resolution = self.analyze_and_resolve_workflow(name)
633
+
634
+ # Phase 3: Calculate uninstalled nodes (for CLI display)
635
+ # Load pyproject data if not provided
636
+ if workflows_config is None:
637
+ workflows_config = self.pyproject.workflows.get_all_with_resolutions()
638
+ if installed_nodes is None:
639
+ installed_nodes = set(self.pyproject.nodes.get_existing().keys())
640
+
641
+ # Check if workflow has an entry in pyproject.toml
642
+ workflow_config = workflows_config.get(name, {})
643
+ pyproject_nodes = set(workflow_config.get('nodes', []))
644
+
645
+ # For NEW workflows not yet in pyproject, use resolution result
646
+ # For workflows already in pyproject (modified, synced, or new from git), use pyproject
647
+ if sync_state == "new" and not pyproject_nodes:
648
+ # Use resolved nodes from current analysis (not yet committed)
649
+ workflow_needs = set(r.package_id for r in resolution.nodes_resolved if r.package_id)
650
+ else:
651
+ # Use pyproject for all other cases
652
+ workflow_needs = pyproject_nodes
653
+
654
+ # Calculate uninstalled = needed - installed
655
+ uninstalled_nodes = list(workflow_needs - installed_nodes)
656
+
657
+ return WorkflowAnalysisStatus(
658
+ name=name,
659
+ sync_state=sync_state,
660
+ dependencies=dependencies,
661
+ resolution=resolution,
662
+ uninstalled_nodes=uninstalled_nodes
663
+ )
664
+
665
+ def get_workflow_status(self) -> DetailedWorkflowStatus:
666
+ """Get detailed workflow status with full dependency analysis.
667
+
668
+ Analyzes ALL workflows in ComfyUI directory, checking dependencies
669
+ and resolution status. This is read-only - no copying to .cec.
670
+
671
+ Returns:
672
+ DetailedWorkflowStatus with sync status and analysis for each workflow
673
+ """
674
+ # Step 1: Get file sync status (fast)
675
+ sync_status = self.get_workflow_sync_status()
676
+
677
+ # Step 2: Pre-load pyproject data once for all workflows
678
+ workflows_config = self.pyproject.workflows.get_all_with_resolutions()
679
+ installed_nodes = set(self.pyproject.nodes.get_existing().keys())
680
+
681
+ # Step 3: Analyze all workflows (reusing pyproject data)
682
+ all_workflow_names = (
683
+ sync_status.new +
684
+ sync_status.modified +
685
+ sync_status.synced
686
+ )
687
+
688
+ analyzed: list[WorkflowAnalysisStatus] = []
689
+
690
+ for name in all_workflow_names:
691
+ # Determine sync state
692
+ if name in sync_status.new:
693
+ state = "new"
694
+ elif name in sync_status.modified:
695
+ state = "modified"
696
+ else:
697
+ state = "synced"
698
+
699
+ try:
700
+ analysis = self.analyze_single_workflow_status(
701
+ name,
702
+ state,
703
+ workflows_config=workflows_config,
704
+ installed_nodes=installed_nodes
705
+ )
706
+ analyzed.append(analysis)
707
+ except Exception as e:
708
+ logger.error(f"Failed to analyze workflow {name}: {e}")
709
+ # Continue with other workflows
710
+
711
+ return DetailedWorkflowStatus(
712
+ sync_status=sync_status,
713
+ analyzed_workflows=analyzed
714
+ )
715
+
716
+ def analyze_workflow(self, name: str) -> WorkflowDependencies:
717
+ """Analyze a single workflow for dependencies - with caching.
718
+
719
+ NOTE: For best performance, use analyze_and_resolve_workflow() which
720
+ caches BOTH analysis and resolution.
721
+
722
+ Args:
723
+ name: Workflow name
724
+
725
+ Returns:
726
+ WorkflowDependencies
727
+
728
+ Raises:
729
+ FileNotFoundError if workflow not found
730
+ """
731
+ workflow_path = self.get_workflow_path(name)
732
+
733
+ # Check cache first
734
+ cached = self.workflow_cache.get(
735
+ env_name=self.environment_name,
736
+ workflow_name=name,
737
+ workflow_path=workflow_path,
738
+ pyproject_path=self.pyproject.path
739
+ )
740
+
741
+ if cached is not None:
742
+ logger.debug(f"Cache HIT for workflow '{name}'")
743
+ return cached.dependencies
744
+
745
+ logger.debug(f"Cache MISS for workflow '{name}' - running full analysis")
746
+
747
+ # Cache miss - run full analysis
748
+ parser = WorkflowDependencyParser(workflow_path)
749
+ deps = parser.analyze_dependencies()
750
+
751
+ # Store in cache (no resolution yet)
752
+ self.workflow_cache.set(
753
+ env_name=self.environment_name,
754
+ workflow_name=name,
755
+ workflow_path=workflow_path,
756
+ dependencies=deps,
757
+ resolution=None,
758
+ pyproject_path=self.pyproject.path
759
+ )
760
+
761
+ return deps
762
+
763
+ def analyze_and_resolve_workflow(self, name: str) -> tuple[WorkflowDependencies, ResolutionResult]:
764
+ """Analyze and resolve workflow with full caching.
765
+
766
+ This is the preferred method for performance - caches BOTH analysis and resolution.
767
+
768
+ Args:
769
+ name: Workflow name
770
+
771
+ Returns:
772
+ Tuple of (dependencies, resolution)
773
+
774
+ Raises:
775
+ FileNotFoundError if workflow not found
776
+ """
777
+ workflow_path = self.get_workflow_path(name)
778
+
779
+ # Check cache
780
+ cached = self.workflow_cache.get(
781
+ env_name=self.environment_name,
782
+ workflow_name=name,
783
+ workflow_path=workflow_path,
784
+ pyproject_path=self.pyproject.path
785
+ )
786
+
787
+ if cached and not cached.needs_reresolution and cached.resolution:
788
+ # Full cache hit - both analysis and resolution valid
789
+ logger.debug(f"Cache HIT (full) for workflow '{name}'")
790
+ return (cached.dependencies, cached.resolution)
791
+
792
+ if cached and cached.needs_reresolution:
793
+ # Partial hit - workflow content valid but resolution stale
794
+ logger.debug(f"Cache PARTIAL HIT for workflow '{name}' - re-resolving")
795
+ dependencies = cached.dependencies
796
+ else:
797
+ # Full miss - analyze workflow
798
+ logger.debug(f"Cache MISS for workflow '{name}' - full analysis + resolution")
799
+ parser = WorkflowDependencyParser(workflow_path)
800
+ dependencies = parser.analyze_dependencies()
801
+
802
+ # Resolve (either from cache miss or stale resolution)
803
+ resolution = self.resolve_workflow(dependencies)
804
+
805
+ # Cache both analysis and resolution
806
+ self.workflow_cache.set(
807
+ env_name=self.environment_name,
808
+ workflow_name=name,
809
+ workflow_path=workflow_path,
810
+ dependencies=dependencies,
811
+ resolution=resolution,
812
+ pyproject_path=self.pyproject.path
813
+ )
814
+
815
+ return (dependencies, resolution)
816
+
817
+ def resolve_workflow(self, analysis: WorkflowDependencies) -> ResolutionResult:
818
+ """Attempt automatic resolution of workflow dependencies.
819
+
820
+ Takes the provided analysis and tries to resolve:
821
+ - Missing nodes → node packages from registry/GitHub using GlobalNodeResolver
822
+ - Model references → actual model files in index
823
+
824
+ Returns ResolutionResult showing what was resolved and what remains ambiguous.
825
+ Does NOT modify pyproject.toml - that happens in fix_workflow().
826
+
827
+ Args:
828
+ analysis: Workflow dependencies from analyze_workflow()
829
+
830
+ Returns:
831
+ ResolutionResult with resolved and unresolved dependencies
832
+ """
833
+ nodes_resolved: list[ResolvedNodePackage] = []
834
+ nodes_unresolved: list[WorkflowNode] = []
835
+ nodes_ambiguous: list[list[ResolvedNodePackage]] = []
836
+
837
+ models_resolved: list[ResolvedModel] = []
838
+ models_unresolved: list[WorkflowNodeWidgetRef] = []
839
+ models_ambiguous: list[list[ResolvedModel]] = []
840
+
841
+ workflow_name = analysis.workflow_name
842
+
843
+ # Load workflow JSON for path comparison
844
+ try:
845
+ workflow_path = self.get_workflow_path(workflow_name)
846
+ workflow = WorkflowRepository.load(workflow_path)
847
+ except FileNotFoundError:
848
+ workflow = None
849
+ logger.warning(f"Could not load workflow '{workflow_name}' for path sync check")
850
+
851
+ # Build node resolution context with per-workflow custom_node_map
852
+ node_context = NodeResolutionContext(
853
+ installed_packages=self.pyproject.nodes.get_existing(),
854
+ custom_mappings=self.pyproject.workflows.get_custom_node_map(workflow_name),
855
+ workflow_name=workflow_name,
856
+ auto_select_ambiguous=True # TODO: Make configurable
857
+ )
858
+
859
+ # Deduplicate node types (same type appears multiple times in workflow)
860
+ # Prefer nodes with properties when deduplicating
861
+ unique_nodes: dict[str, WorkflowNode] = {}
862
+ for node in analysis.non_builtin_nodes:
863
+ if node.type not in unique_nodes:
864
+ unique_nodes[node.type] = node
865
+ else:
866
+ # Prefer node with properties over one without
867
+ if node.properties.get('cnr_id') and not unique_nodes[node.type].properties.get('cnr_id'):
868
+ # TODO: Log if the same node type already exists with a different cnr_id
869
+ unique_nodes[node.type] = node
870
+
871
+ logger.debug(f"Resolving {len(unique_nodes)} unique node types from {len(analysis.non_builtin_nodes)} total non-builtin nodes")
872
+
873
+ # Resolve each unique node type with context
874
+ for node_type, node in unique_nodes.items():
875
+ logger.debug(f"Trying to resolve node: {node}")
876
+ resolved_packages = self.global_node_resolver.resolve_single_node_with_context(node, node_context)
877
+
878
+ if resolved_packages is None:
879
+ # Not resolved - trigger strategy
880
+ logger.debug(f"Node not found: {node}")
881
+ nodes_unresolved.append(node)
882
+ elif len(resolved_packages) == 1:
883
+ # Single match - cleanly resolved
884
+ logger.debug(f"Resolved node: {resolved_packages[0]}")
885
+ nodes_resolved.append(resolved_packages[0])
886
+ else:
887
+ # Multiple matches from registry (ambiguous)
888
+ nodes_ambiguous.append(resolved_packages)
889
+
890
+ # Build context with full ManifestWorkflowModel objects
891
+ # This enables download intent detection and other advanced resolution logic
892
+ previous_resolutions = {}
893
+ workflow_models = self.pyproject.workflows.get_workflow_models(workflow_name)
894
+
895
+ for manifest_model in workflow_models:
896
+ # Store full ManifestWorkflowModel object for each node reference
897
+ # This provides access to hash, sources, status, relative_path, etc.
898
+ for ref in manifest_model.nodes:
899
+ previous_resolutions[ref] = manifest_model
900
+
901
+ # Get global models table for download intent creation
902
+ global_models_dict = {}
903
+ try:
904
+ all_global_models = self.pyproject.models.get_all()
905
+ for model in all_global_models:
906
+ global_models_dict[model.hash] = model
907
+ except Exception as e:
908
+ logger.warning(f"Failed to load global models table: {e}")
909
+
910
+ model_context = ModelResolutionContext(
911
+ workflow_name=workflow_name,
912
+ previous_resolutions=previous_resolutions,
913
+ global_models=global_models_dict,
914
+ auto_select_ambiguous=True # TODO: Make configurable
915
+ )
916
+
917
+ # Deduplicate model refs by (widget_value, node_type) before resolving
918
+ # This ensures status reporting shows accurate counts (not inflated by duplicates)
919
+ model_groups: dict[tuple[str, str], list[WorkflowNodeWidgetRef]] = {}
920
+ for model_ref in analysis.found_models:
921
+ key = (model_ref.widget_value, model_ref.node_type)
922
+ if key not in model_groups:
923
+ model_groups[key] = []
924
+ model_groups[key].append(model_ref)
925
+
926
+ # Resolve each unique model group (one resolution per unique model)
927
+ for (widget_value, node_type), refs_in_group in model_groups.items():
928
+ # Use first ref as representative for resolution
929
+ primary_ref = refs_in_group[0]
930
+
931
+ result = self.model_resolver.resolve_model(primary_ref, model_context)
932
+
933
+ if result is None:
934
+ # Model not found at all - add primary ref only (deduplicated)
935
+ logger.debug(f"Failed to resolve model: {primary_ref}")
936
+ models_unresolved.append(primary_ref)
937
+ elif len(result) == 1:
938
+ # Clean resolution (exact match or from pyproject cache)
939
+ resolved_model = result[0]
940
+
941
+ # Check if path needs syncing (only for builtin nodes with resolved models)
942
+ if workflow and resolved_model.resolved_model:
943
+ resolved_model.needs_path_sync = self._check_path_needs_sync(
944
+ resolved_model,
945
+ workflow
946
+ )
947
+
948
+ logger.debug(f"Resolved model: {resolved_model}")
949
+ models_resolved.append(resolved_model)
950
+ elif len(result) > 1:
951
+ # Ambiguous - multiple matches (use primary ref)
952
+ logger.debug(f"Ambiguous model: {result}")
953
+ models_ambiguous.append(result)
954
+ else:
955
+ # No resolution possible - add primary ref only (deduplicated)
956
+ logger.debug(f"Failed to resolve model: {primary_ref}, result: {result}")
957
+ models_unresolved.append(primary_ref)
958
+
959
+ return ResolutionResult(
960
+ workflow_name=workflow_name,
961
+ nodes_resolved=nodes_resolved,
962
+ nodes_unresolved=nodes_unresolved,
963
+ nodes_ambiguous=nodes_ambiguous,
964
+ models_resolved=models_resolved,
965
+ models_unresolved=models_unresolved,
966
+ models_ambiguous=models_ambiguous,
967
+ )
968
+
969
+ def fix_resolution(
970
+ self,
971
+ resolution: ResolutionResult,
972
+ node_strategy: NodeResolutionStrategy | None = None,
973
+ model_strategy: ModelResolutionStrategy | None = None
974
+ ) -> ResolutionResult:
975
+ """Fix remaining issues using strategies with progressive writes.
976
+
977
+ Takes ResolutionResult from resolve_workflow() and uses strategies to resolve ambiguities.
978
+ ALL user choices are written immediately (progressive mode):
979
+ - Each model resolution writes to pyproject + workflow JSON
980
+ - Each node mapping writes to per-workflow custom_node_map
981
+ - Ctrl+C preserves partial progress
982
+
983
+ Args:
984
+ resolution: Result from resolve_workflow()
985
+ node_strategy: Strategy for handling unresolved/ambiguous nodes
986
+ model_strategy: Strategy for handling ambiguous/missing models
987
+
988
+ Returns:
989
+ Updated ResolutionResult with fixes applied
990
+ """
991
+ workflow_name = resolution.workflow_name
992
+
993
+ # Start with what was already resolved
994
+ nodes_to_add = list(resolution.nodes_resolved)
995
+ models_to_add = list(resolution.models_resolved)
996
+
997
+ remaining_nodes_ambiguous: list[list[ResolvedNodePackage]] = []
998
+ remaining_nodes_unresolved: list[WorkflowNode] = []
999
+ remaining_models_ambiguous: list[list[ResolvedModel]] = []
1000
+ remaining_models_unresolved: list[WorkflowNodeWidgetRef] = []
1001
+
1002
+ # ========== NODE RESOLUTION (UNIFIED) ==========
1003
+
1004
+ if not node_strategy:
1005
+ # No strategy - keep everything as unresolved
1006
+ remaining_nodes_ambiguous = list(resolution.nodes_ambiguous)
1007
+ remaining_nodes_unresolved = list(resolution.nodes_unresolved)
1008
+ else:
1009
+ # Build context with search function
1010
+ node_context = NodeResolutionContext(
1011
+ installed_packages=self.pyproject.nodes.get_existing(),
1012
+ custom_mappings=self.pyproject.workflows.get_custom_node_map(workflow_name),
1013
+ workflow_name=workflow_name,
1014
+ search_fn=self.global_node_resolver.search_packages,
1015
+ auto_select_ambiguous=True # TODO: Make configurable
1016
+ )
1017
+
1018
+ # Unified loop: handle both ambiguous and unresolved nodes
1019
+ all_unresolved_nodes: list[tuple[str, list[ResolvedNodePackage]]] = []
1020
+
1021
+ # Ambiguous nodes (have candidates)
1022
+ for packages in resolution.nodes_ambiguous:
1023
+ if packages:
1024
+ node_type = packages[0].node_type
1025
+ all_unresolved_nodes.append((node_type, packages))
1026
+
1027
+ # Missing nodes (no candidates)
1028
+ for node in resolution.nodes_unresolved:
1029
+ all_unresolved_nodes.append((node.type, []))
1030
+
1031
+ # Resolve each node
1032
+ for node_type, candidates in all_unresolved_nodes:
1033
+ try:
1034
+ selected = node_strategy.resolve_unknown_node(node_type, candidates, node_context)
1035
+
1036
+ if selected is None:
1037
+ # User skipped - remains unresolved
1038
+ if candidates:
1039
+ remaining_nodes_ambiguous.append(candidates)
1040
+ else:
1041
+ # Create WorkflowNode for unresolved tracking
1042
+ remaining_nodes_unresolved.append(WorkflowNode(id="", type=node_type))
1043
+ logger.debug(f"Skipped: {node_type}")
1044
+ continue
1045
+
1046
+ # Handle optional nodes
1047
+ if selected.match_type == 'optional':
1048
+ # PROGRESSIVE: Save optional node mapping
1049
+ if workflow_name:
1050
+ self.pyproject.workflows.set_custom_node_mapping(
1051
+ workflow_name, node_type, None
1052
+ )
1053
+ logger.info(f"Marked node '{node_type}' as optional")
1054
+ continue
1055
+
1056
+ # Handle resolved nodes
1057
+ nodes_to_add.append(selected)
1058
+ node_id = selected.package_data.id if selected.package_data else selected.package_id
1059
+
1060
+ if not node_id:
1061
+ logger.warning(f"No package ID for resolved node '{node_type}'")
1062
+ continue
1063
+
1064
+ normalized_id = self._normalize_package_id(node_id)
1065
+
1066
+ # PROGRESSIVE: Save user-confirmed node mapping
1067
+ user_intervention_types = ("user_confirmed", "manual", "heuristic")
1068
+ if selected.match_type in user_intervention_types and workflow_name:
1069
+ self.pyproject.workflows.set_custom_node_mapping(
1070
+ workflow_name, node_type, normalized_id
1071
+ )
1072
+ logger.info(f"Saved custom_node_map: {node_type} -> {normalized_id}")
1073
+
1074
+ # PROGRESSIVE: Write to workflow.nodes immediately
1075
+ if workflow_name:
1076
+ self._write_single_node_resolution(workflow_name, normalized_id)
1077
+
1078
+ logger.info(f"Resolved node: {node_type} -> {normalized_id}")
1079
+
1080
+ except Exception as e:
1081
+ logger.error(f"Failed to resolve {node_type}: {e}")
1082
+ if candidates:
1083
+ remaining_nodes_ambiguous.append(candidates)
1084
+ else:
1085
+ remaining_nodes_unresolved.append(WorkflowNode(id="", type=node_type))
1086
+
1087
+ # ========== MODEL RESOLUTION (NEW UNIFIED FLOW) ==========
1088
+
1089
+ if not model_strategy:
1090
+ # No strategy - keep everything as unresolved
1091
+ remaining_models_ambiguous = list(resolution.models_ambiguous)
1092
+ remaining_models_unresolved = list(resolution.models_unresolved)
1093
+ else:
1094
+ # Get global models table for download intent creation
1095
+ global_models_dict = {}
1096
+ try:
1097
+ all_global_models = self.pyproject.models.get_all()
1098
+ for model in all_global_models:
1099
+ global_models_dict[model.hash] = model
1100
+ except Exception as e:
1101
+ logger.warning(f"Failed to load global models table: {e}")
1102
+
1103
+ # Build context with search function and downloader
1104
+ model_context = ModelResolutionContext(
1105
+ workflow_name=workflow_name,
1106
+ global_models=global_models_dict,
1107
+ search_fn=self.search_models,
1108
+ downloader=self.downloader,
1109
+ auto_select_ambiguous=True # TODO: Make configurable
1110
+ )
1111
+
1112
+ # Unified loop: handle both ambiguous and unresolved models
1113
+ all_unresolved_models: list[tuple[WorkflowNodeWidgetRef, list[ResolvedModel]]] = []
1114
+
1115
+ # Ambiguous models (have candidates)
1116
+ for resolved_model_list in resolution.models_ambiguous:
1117
+ if resolved_model_list:
1118
+ model_ref = resolved_model_list[0].reference
1119
+ all_unresolved_models.append((model_ref, resolved_model_list))
1120
+
1121
+ # Missing models (no candidates)
1122
+ for model_ref in resolution.models_unresolved:
1123
+ all_unresolved_models.append((model_ref, []))
1124
+
1125
+ # DEDUPLICATION: Group by (widget_value, node_type)
1126
+ model_groups: dict[tuple[str, str], list[tuple[WorkflowNodeWidgetRef, list[ResolvedModel]]]] = {}
1127
+
1128
+ for model_ref, candidates in all_unresolved_models:
1129
+ # Group key: (widget_value, node_type)
1130
+ # This ensures same model in same loader type gets resolved once
1131
+ key = (model_ref.widget_value, model_ref.node_type)
1132
+ if key not in model_groups:
1133
+ model_groups[key] = []
1134
+ model_groups[key].append((model_ref, candidates))
1135
+
1136
+ # Resolve each group (one prompt per unique model)
1137
+ for (widget_value, node_type), group in model_groups.items():
1138
+ # Extract all refs and candidates
1139
+ all_refs_in_group = [ref for ref, _ in group]
1140
+ primary_ref, primary_candidates = group[0]
1141
+
1142
+ # Log deduplication for debugging
1143
+ if len(all_refs_in_group) > 1:
1144
+ node_ids = ", ".join(f"#{ref.node_id}" for ref in all_refs_in_group)
1145
+ logger.info(f"Deduplicating model '{widget_value}' found in nodes: {node_ids}")
1146
+
1147
+ try:
1148
+ # Prompt user once for this model
1149
+ resolved = model_strategy.resolve_model(primary_ref, primary_candidates, model_context)
1150
+
1151
+ if resolved is None:
1152
+ # User skipped - remains unresolved for ALL refs
1153
+ for ref in all_refs_in_group:
1154
+ remaining_models_unresolved.append(ref)
1155
+ logger.debug(f"Skipped: {widget_value}")
1156
+ continue
1157
+
1158
+ # PROGRESSIVE: Write with ALL refs at once
1159
+ if workflow_name:
1160
+ self._write_model_resolution_grouped(workflow_name, resolved, all_refs_in_group)
1161
+
1162
+ # Add to results for ALL refs (needed for update_workflow_model_paths)
1163
+ for ref in all_refs_in_group:
1164
+ # Create ResolvedModel for each ref pointing to same resolved model
1165
+ ref_resolved = ResolvedModel(
1166
+ workflow=workflow_name,
1167
+ reference=ref,
1168
+ resolved_model=resolved.resolved_model,
1169
+ model_source=resolved.model_source,
1170
+ is_optional=resolved.is_optional,
1171
+ match_type=resolved.match_type,
1172
+ match_confidence=resolved.match_confidence,
1173
+ target_path=resolved.target_path,
1174
+ needs_path_sync=resolved.needs_path_sync
1175
+ )
1176
+ models_to_add.append(ref_resolved)
1177
+
1178
+ # Log result
1179
+ if resolved.is_optional:
1180
+ logger.info(f"Marked as optional: {widget_value}")
1181
+ elif resolved.resolved_model:
1182
+ logger.info(f"Resolved: {widget_value} → {resolved.resolved_model.filename}")
1183
+ else:
1184
+ logger.info(f"Marked as optional (unresolved): {widget_value}")
1185
+
1186
+ except Exception as e:
1187
+ logger.error(f"Failed to resolve {widget_value}: {e}")
1188
+ for ref in all_refs_in_group:
1189
+ remaining_models_unresolved.append(ref)
1190
+
1191
+ # Build updated result
1192
+ result = ResolutionResult(
1193
+ workflow_name=workflow_name,
1194
+ nodes_resolved=nodes_to_add,
1195
+ nodes_unresolved=remaining_nodes_unresolved,
1196
+ nodes_ambiguous=remaining_nodes_ambiguous,
1197
+ models_resolved=models_to_add,
1198
+ models_unresolved=remaining_models_unresolved,
1199
+ models_ambiguous=remaining_models_ambiguous,
1200
+ )
1201
+
1202
+ # Batch update workflow JSON with all resolved model paths
1203
+ # This ensures all model paths are synced after interactive resolution
1204
+ # Uses consistent node IDs from same parse session (no cache mismatch issues)
1205
+ self.update_workflow_model_paths(result)
1206
+
1207
+ return result
1208
+
1209
+ def apply_resolution(
1210
+ self,
1211
+ resolution: ResolutionResult,
1212
+ config: dict | None = None
1213
+ ) -> None:
1214
+ """Apply resolutions with smart defaults and reconciliation.
1215
+
1216
+ Auto-applies sensible criticality defaults, etc.
1217
+
1218
+ Args:
1219
+ resolution: Result with auto-resolved dependencies from resolve_workflow()
1220
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
1221
+ """
1222
+ from comfygit_core.models.manifest import ManifestModel, ManifestWorkflowModel
1223
+
1224
+ is_batch = config is not None
1225
+ if not is_batch:
1226
+ config = self.pyproject.load()
1227
+
1228
+ workflow_name = resolution.workflow_name
1229
+
1230
+ # Phase 1: Reconcile nodes (unchanged)
1231
+ target_node_pack_ids = set()
1232
+ target_node_types = set()
1233
+
1234
+ for pkg in resolution.nodes_resolved:
1235
+ if pkg.is_optional:
1236
+ target_node_types.add(pkg.node_type)
1237
+ elif pkg.package_id is not None:
1238
+ normalized_id = self._normalize_package_id(pkg.package_id)
1239
+ target_node_pack_ids.add(normalized_id)
1240
+ target_node_types.add(pkg.node_type)
1241
+
1242
+ for node in resolution.nodes_unresolved:
1243
+ target_node_types.add(node.type)
1244
+ for packages in resolution.nodes_ambiguous:
1245
+ if packages:
1246
+ target_node_types.add(packages[0].node_type)
1247
+
1248
+ if target_node_pack_ids:
1249
+ self.pyproject.workflows.set_node_packs(workflow_name, target_node_pack_ids, config=config)
1250
+ else:
1251
+ self.pyproject.workflows.set_node_packs(workflow_name, None, config=config)
1252
+
1253
+ # Reconcile custom_node_map
1254
+ existing_custom_map = self.pyproject.workflows.get_custom_node_map(workflow_name, config=config)
1255
+ for node_type in list(existing_custom_map.keys()):
1256
+ if node_type not in target_node_types:
1257
+ self.pyproject.workflows.remove_custom_node_mapping(workflow_name, node_type, config=config)
1258
+
1259
+ # Phase 2: Build ManifestWorkflowModel entries with smart defaults
1260
+ manifest_models: list[ManifestWorkflowModel] = []
1261
+
1262
+ # Group resolved models by hash
1263
+ hash_to_refs: dict[str, list[WorkflowNodeWidgetRef]] = {}
1264
+ for resolved in resolution.models_resolved:
1265
+ if resolved.resolved_model:
1266
+ model_hash = resolved.resolved_model.hash
1267
+ if model_hash not in hash_to_refs:
1268
+ hash_to_refs[model_hash] = []
1269
+ hash_to_refs[model_hash].append(resolved.reference)
1270
+ elif resolved.match_type == "download_intent":
1271
+ # Download intent from previous session - preserve it in manifest
1272
+ category = self._get_category_for_node_ref(resolved.reference)
1273
+ manifest_model = ManifestWorkflowModel(
1274
+ filename=resolved.reference.widget_value,
1275
+ category=category,
1276
+ criticality="flexible",
1277
+ status="unresolved",
1278
+ nodes=[resolved.reference],
1279
+ sources=[resolved.model_source] if resolved.model_source else [],
1280
+ relative_path=str(resolved.target_path) if resolved.target_path else None
1281
+ )
1282
+ manifest_models.append(manifest_model)
1283
+ elif resolved.is_optional:
1284
+ # Type C: Optional unresolved (user marked as optional, no model data)
1285
+ category = self._get_category_for_node_ref(resolved.reference)
1286
+ manifest_model = ManifestWorkflowModel(
1287
+ filename=resolved.reference.widget_value,
1288
+ category=category,
1289
+ criticality="optional",
1290
+ status="unresolved",
1291
+ nodes=[resolved.reference],
1292
+ sources=[]
1293
+ )
1294
+ manifest_models.append(manifest_model)
1295
+
1296
+ # Create manifest entries for resolved models
1297
+ for model_hash, refs in hash_to_refs.items():
1298
+ # Get model from first resolved entry
1299
+ model = next(
1300
+ (r.resolved_model for r in resolution.models_resolved if r.resolved_model and r.resolved_model.hash == model_hash),
1301
+ None
1302
+ )
1303
+ if not model:
1304
+ continue
1305
+
1306
+ # Determine criticality with smart defaults
1307
+ criticality = self._get_default_criticality(model.category)
1308
+
1309
+ # Fetch sources from repository to enrich global table
1310
+ sources_from_repo = self.model_repository.get_sources(model.hash)
1311
+ sources = [s['url'] for s in sources_from_repo]
1312
+
1313
+ # Workflow model: lightweight reference (no sources - hash is the key)
1314
+ manifest_model = ManifestWorkflowModel(
1315
+ hash=model.hash,
1316
+ filename=model.filename,
1317
+ category=model.category,
1318
+ criticality=criticality,
1319
+ status="resolved",
1320
+ nodes=refs,
1321
+ sources=[] # Empty - sources stored in global table only
1322
+ )
1323
+ manifest_models.append(manifest_model)
1324
+
1325
+ # Global table: enrich with sources from SQLite
1326
+ global_model = ManifestModel(
1327
+ hash=model.hash,
1328
+ filename=model.filename,
1329
+ size=model.file_size,
1330
+ relative_path=model.relative_path,
1331
+ category=model.category,
1332
+ sources=sources # From SQLite - authoritative source
1333
+ )
1334
+ self.pyproject.models.add_model(global_model, config=config)
1335
+
1336
+ # Load existing workflow models to preserve download intents from previous sessions
1337
+ existing_workflow_models = self.pyproject.workflows.get_workflow_models(workflow_name, config=config)
1338
+ existing_by_filename = {m.filename: m for m in existing_workflow_models}
1339
+
1340
+ # Add unresolved models
1341
+ for ref in resolution.models_unresolved:
1342
+ category = self._get_category_for_node_ref(ref)
1343
+ criticality = self._get_default_criticality(category)
1344
+
1345
+ # Check if this model already has a download intent from a previous session
1346
+ existing = existing_by_filename.get(ref.widget_value)
1347
+ sources = []
1348
+ relative_path = None
1349
+ if existing and existing.status == "unresolved" and existing.sources:
1350
+ # Preserve download intent from previous session
1351
+ sources = existing.sources
1352
+ relative_path = existing.relative_path
1353
+ logger.debug(f"Preserving download intent for '{ref.widget_value}': sources={sources}, path={relative_path}")
1354
+
1355
+ manifest_model = ManifestWorkflowModel(
1356
+ filename=ref.widget_value,
1357
+ category=category,
1358
+ criticality=criticality,
1359
+ status="unresolved",
1360
+ nodes=[ref],
1361
+ sources=sources,
1362
+ relative_path=relative_path
1363
+ )
1364
+ manifest_models.append(manifest_model)
1365
+
1366
+ # Write all models to workflow
1367
+ self.pyproject.workflows.set_workflow_models(workflow_name, manifest_models, config=config)
1368
+
1369
+ # Clean up deleted workflows from pyproject.toml
1370
+ # This handles both:
1371
+ # 1. Committed workflows that were deleted (in .cec, in pyproject, not in ComfyUI)
1372
+ # 2. Resolved-but-not-committed workflows (in pyproject, not in .cec, not in ComfyUI)
1373
+ # Read from in-memory config instead of loading from disk
1374
+ workflows_in_pyproject = set(config.get('tool', {}).get('comfygit', {}).get('workflows', {}).keys())
1375
+ workflows_in_comfyui = set()
1376
+ if self.comfyui_workflows.exists():
1377
+ for workflow_file in self.comfyui_workflows.glob("*.json"):
1378
+ workflows_in_comfyui.add(workflow_file.stem)
1379
+
1380
+ workflows_to_remove = workflows_in_pyproject - workflows_in_comfyui
1381
+ if workflows_to_remove:
1382
+ removed_count = self.pyproject.workflows.remove_workflows(list(workflows_to_remove), config=config)
1383
+ if removed_count > 0:
1384
+ logger.info(f"Cleaned up {removed_count} deleted workflow(s) from pyproject.toml")
1385
+
1386
+ # Clean up orphaned models (must run AFTER workflow sections are removed)
1387
+ self.pyproject.models.cleanup_orphans(config=config)
1388
+
1389
+ # Save if not in batch mode
1390
+ if not is_batch:
1391
+ self.pyproject.save(config)
1392
+
1393
+ # Phase 3: Update workflow JSON with resolved paths
1394
+ self.update_workflow_model_paths(resolution)
1395
+
1396
+ def update_workflow_model_paths(
1397
+ self,
1398
+ resolution: ResolutionResult
1399
+ ) -> None:
1400
+ """Update workflow JSON files with resolved and stripped model paths.
1401
+
1402
+ IMPORTANT: Only updates paths for BUILTIN ComfyUI nodes. Custom nodes are
1403
+ skipped to preserve their original widget values and avoid breaking validation.
1404
+
1405
+ This strips the base directory prefix (e.g., 'checkpoints/') from model paths
1406
+ because ComfyUI builtin node loaders automatically prepend their base directories.
1407
+
1408
+ See: docs/knowledge/comfyui-node-loader-base-directories.md for detailed explanation.
1409
+
1410
+ Args:
1411
+ resolution: Resolution result with ref→model mapping
1412
+
1413
+ Raises:
1414
+ FileNotFoundError if workflow not found
1415
+ """
1416
+ workflow_name = resolution.workflow_name
1417
+
1418
+ # Load workflow from ComfyUI directory
1419
+ workflow_path = self.get_workflow_path(workflow_name)
1420
+
1421
+ workflow = WorkflowRepository.load(workflow_path)
1422
+
1423
+ updated_count = 0
1424
+ skipped_count = 0
1425
+
1426
+ # Update each resolved model's path in the workflow
1427
+ for resolved in resolution.models_resolved:
1428
+ ref = resolved.reference
1429
+ model = resolved.resolved_model
1430
+
1431
+ # Skip if model is None (Type 1 optional unresolved)
1432
+ if model is None:
1433
+ continue
1434
+
1435
+ node_id = ref.node_id
1436
+ widget_idx = ref.widget_index
1437
+
1438
+ # Skip custom nodes - they have undefined path behavior
1439
+ if not self.model_resolver.model_config.is_model_loader_node(ref.node_type):
1440
+ logger.debug(
1441
+ f"Skipping path update for custom node '{ref.node_type}' "
1442
+ f"(node_id={node_id}, widget={widget_idx}). "
1443
+ f"Custom nodes manage their own model paths."
1444
+ )
1445
+ skipped_count += 1
1446
+ continue
1447
+
1448
+ # Update the node's widget value with resolved path
1449
+ if node_id in workflow.nodes:
1450
+ node = workflow.nodes[node_id]
1451
+ if widget_idx < len(node.widgets_values):
1452
+ old_path = node.widgets_values[widget_idx]
1453
+ # Strip base directory prefix for ComfyUI BUILTIN node loaders
1454
+ # e.g., "checkpoints/sd15/model.ckpt" → "sd15/model.ckpt"
1455
+ display_path = self._strip_base_directory_for_node(ref.node_type, model.relative_path)
1456
+ node.widgets_values[widget_idx] = display_path
1457
+ logger.debug(f"Updated node {node_id} widget {widget_idx}: {old_path} → {display_path}")
1458
+ updated_count += 1
1459
+
1460
+ # Only save if we actually updated something
1461
+ if updated_count > 0:
1462
+ WorkflowRepository.save(workflow, workflow_path)
1463
+
1464
+ # Invalidate cache since workflow content changed
1465
+ self.workflow_cache.invalidate(
1466
+ env_name=self.environment_name,
1467
+ workflow_name=workflow_name
1468
+ )
1469
+
1470
+ logger.info(
1471
+ f"Updated workflow JSON: {workflow_path} "
1472
+ f"({updated_count} builtin nodes updated, {skipped_count} custom nodes preserved)"
1473
+ )
1474
+ else:
1475
+ logger.debug(f"No path updates needed for workflow '{workflow_name}'")
1476
+
1477
+ # Note: We intentionally do NOT update .cec here
1478
+ # The .cec copy represents "committed state" and should only be updated during commit
1479
+ # This ensures workflow status correctly shows as "new" or "modified" until committed
1480
+
1481
+ def _get_default_criticality(self, category: str) -> str:
1482
+ """Determine smart default criticality based on model category.
1483
+
1484
+ Args:
1485
+ category: Model category (checkpoints, loras, etc.)
1486
+
1487
+ Returns:
1488
+ Criticality level: "required", "flexible", or "optional"
1489
+ """
1490
+ return CATEGORY_CRITICALITY_DEFAULTS.get(category, "required")
1491
+
1492
+ def _get_category_for_node_ref(self, node_ref: WorkflowNodeWidgetRef) -> str:
1493
+ """Get model category from node type.
1494
+
1495
+ Args:
1496
+ node_type: ComfyUI node type
1497
+
1498
+ Returns:
1499
+ Model category string
1500
+ """
1501
+ # First see if node type is explicitly mapped to a category.
1502
+ node_type = node_ref.node_type
1503
+ directories = self.model_resolver.model_config.get_directories_for_node(node_type)
1504
+ if directories:
1505
+ logger.debug(f"Found directory mapping for node type '{node_type}': {directories}")
1506
+ return directories[0] # Use first directory as category
1507
+
1508
+ # Next check if widget value path can be converted to category:
1509
+ from ..utils.model_categories import get_model_category
1510
+ category = get_model_category(node_ref.widget_value)
1511
+ logger.debug(f"Found directory mapping for widget value '{node_ref.widget_value}': {category}")
1512
+ return category
1513
+
1514
+ def _check_path_needs_sync(
1515
+ self,
1516
+ resolved: ResolvedModel,
1517
+ workflow: Workflow
1518
+ ) -> bool:
1519
+ """Check if a resolved model's path differs from workflow JSON.
1520
+
1521
+ Args:
1522
+ resolved: ResolvedModel with reference and resolved_model
1523
+ workflow: Loaded workflow JSON
1524
+
1525
+ Returns:
1526
+ True if workflow path differs from expected resolved path
1527
+ """
1528
+ ref = resolved.reference
1529
+ model = resolved.resolved_model
1530
+
1531
+ # Only check builtin nodes (custom nodes manage their own paths)
1532
+ if not self.model_resolver.model_config.is_model_loader_node(ref.node_type):
1533
+ return False
1534
+
1535
+ # Can't sync if model didn't resolve
1536
+ if not model:
1537
+ return False
1538
+
1539
+ # Get expected path after stripping base directory (already normalized to forward slashes)
1540
+ expected_path = self._strip_base_directory_for_node(
1541
+ ref.node_type,
1542
+ model.relative_path
1543
+ )
1544
+
1545
+ # Normalize current path for comparison (handles Windows backslashes)
1546
+ current_path = ref.widget_value.replace('\\', '/')
1547
+
1548
+ # If paths differ, check if current path exists with same hash (duplicate models)
1549
+ if current_path != expected_path:
1550
+ # Try to find the current path in model repository
1551
+ # For builtin loaders, we need to reconstruct the full path
1552
+ all_models = self.model_repository.get_all_models()
1553
+
1554
+ # Try exact match with current path
1555
+ current_matches = self.model_resolver._try_exact_match(current_path, all_models)
1556
+
1557
+ # If not found, try reconstructing the path (for builtin loaders)
1558
+ if not current_matches and self.model_resolver.model_config.is_model_loader_node(ref.node_type):
1559
+ reconstructed_paths = self.model_resolver.model_config.reconstruct_model_path(
1560
+ ref.node_type, current_path
1561
+ )
1562
+ for path in reconstructed_paths:
1563
+ current_matches = self.model_resolver._try_exact_match(path, all_models)
1564
+ if current_matches:
1565
+ break
1566
+
1567
+ # If current path exists and has same hash as resolved model, no sync needed
1568
+ if current_matches and current_matches[0].hash == model.hash:
1569
+ return False
1570
+
1571
+ # Return True if paths differ and current path is invalid or has different hash
1572
+ return current_path != expected_path
1573
+
1574
+ def _strip_base_directory_for_node(self, node_type: str, relative_path: str) -> str:
1575
+ """Strip base directory prefix from path for BUILTIN ComfyUI node loaders.
1576
+
1577
+ ⚠️ IMPORTANT: This function should ONLY be called for builtin node types that
1578
+ are in the node_directory_mappings. Custom nodes should skip path updates entirely.
1579
+
1580
+ ComfyUI builtin node loaders automatically prepend their base directories:
1581
+ - CheckpointLoaderSimple prepends "checkpoints/"
1582
+ - LoraLoader prepends "loras/"
1583
+ - VAELoader prepends "vae/"
1584
+
1585
+ The widget value should NOT include the base directory to avoid path doubling.
1586
+
1587
+ See: docs/knowledge/comfyui-node-loader-base-directories.md for detailed explanation.
1588
+
1589
+ Args:
1590
+ node_type: BUILTIN ComfyUI node type (e.g., "CheckpointLoaderSimple")
1591
+ relative_path: Full path relative to models/ (e.g., "checkpoints/SD1.5/model.safetensors")
1592
+
1593
+ Returns:
1594
+ Path without base directory prefix (e.g., "SD1.5/model.safetensors")
1595
+
1596
+ Examples:
1597
+ >>> _strip_base_directory_for_node("CheckpointLoaderSimple", "checkpoints/sd15/model.ckpt")
1598
+ "sd15/model.ckpt"
1599
+
1600
+ >>> _strip_base_directory_for_node("LoraLoader", "loras/style.safetensors")
1601
+ "style.safetensors"
1602
+
1603
+ >>> _strip_base_directory_for_node("CheckpointLoaderSimple", "checkpoints/a/b/c/model.ckpt")
1604
+ "a/b/c/model.ckpt" # Subdirectories preserved
1605
+ """
1606
+ from ..configs.model_config import ModelConfig
1607
+
1608
+ # Normalize to forward slashes for cross-platform compatibility (Windows uses backslashes)
1609
+ relative_path = relative_path.replace('\\', '/')
1610
+
1611
+ model_config = ModelConfig.load()
1612
+ base_dirs = model_config.get_directories_for_node(node_type)
1613
+
1614
+ # Warn if called for custom node (should be skipped in caller)
1615
+ if not base_dirs:
1616
+ logger.warning(
1617
+ f"_strip_base_directory_for_node called for unknown/custom node type: {node_type}. "
1618
+ f"Custom nodes should skip path updates entirely. Returning path unchanged."
1619
+ )
1620
+ return relative_path
1621
+
1622
+ for base_dir in base_dirs:
1623
+ prefix = base_dir + "/"
1624
+ if relative_path.startswith(prefix):
1625
+ # Strip the base directory but preserve subdirectories
1626
+ return relative_path[len(prefix):]
1627
+
1628
+ # Path doesn't have expected prefix - return unchanged
1629
+ return relative_path
1630
+
1631
+ def search_models(
1632
+ self,
1633
+ search_term: str,
1634
+ node_type: str | None = None,
1635
+ limit: int = 9
1636
+ ) -> list[ScoredMatch]:
1637
+ """Search for models using SQL + fuzzy matching.
1638
+
1639
+ Combines fast SQL LIKE search with difflib scoring for ranked results.
1640
+
1641
+ Args:
1642
+ search_term: Search term (filename, partial name, etc.)
1643
+ node_type: Optional node type to filter by category
1644
+ limit: Maximum number of results to return
1645
+
1646
+ Returns:
1647
+ List of ScoredMatch objects sorted by relevance (highest first)
1648
+ """
1649
+ from difflib import SequenceMatcher
1650
+
1651
+ from ..configs.model_config import ModelConfig
1652
+
1653
+ # If node_type provided, filter by category
1654
+ if node_type:
1655
+ model_config = ModelConfig.load()
1656
+ directories = model_config.get_directories_for_node(node_type)
1657
+
1658
+ if directories:
1659
+ # Get models from all relevant categories
1660
+ candidates = []
1661
+ for directory in directories:
1662
+ models = self.model_repository.get_by_category(directory)
1663
+ candidates.extend(models)
1664
+ else:
1665
+ # Unknown node type - search all models
1666
+ candidates = self.model_repository.search(search_term)
1667
+ else:
1668
+ # No node type - search all models
1669
+ candidates = self.model_repository.search(search_term)
1670
+
1671
+ if not candidates:
1672
+ return []
1673
+
1674
+ # Score candidates using fuzzy matching
1675
+ scored = []
1676
+ search_lower = search_term.lower()
1677
+ search_stem = Path(search_term).stem.lower()
1678
+
1679
+ for model in candidates:
1680
+ filename_lower = model.filename.lower()
1681
+ filename_stem = Path(model.filename).stem.lower()
1682
+
1683
+ # Calculate scores for both full filename and stem
1684
+ full_score = SequenceMatcher(None, search_lower, filename_lower).ratio()
1685
+ stem_score = SequenceMatcher(None, search_stem, filename_stem).ratio()
1686
+
1687
+ # Use best score
1688
+ score = max(full_score, stem_score)
1689
+
1690
+ # Boost exact substring matches
1691
+ if search_lower in filename_lower:
1692
+ score = min(1.0, score + 0.15)
1693
+
1694
+ if score > 0.3: # Minimum 30% similarity threshold
1695
+ confidence = "high" if score > 0.8 else "good" if score > 0.6 else "possible"
1696
+ scored.append(ScoredMatch(
1697
+ model=model,
1698
+ score=score,
1699
+ confidence=confidence
1700
+ ))
1701
+
1702
+ # Sort by score descending
1703
+ scored.sort(key=lambda x: x.score, reverse=True)
1704
+
1705
+ return scored[:limit]
1706
+
1707
+ def update_model_criticality(
1708
+ self,
1709
+ workflow_name: str,
1710
+ model_identifier: str,
1711
+ new_criticality: str
1712
+ ) -> bool:
1713
+ """Update criticality for a model in a workflow.
1714
+
1715
+ Allows changing model criticality after initial resolution without
1716
+ re-resolving the entire workflow.
1717
+
1718
+ Args:
1719
+ workflow_name: Workflow to update
1720
+ model_identifier: Filename or hash to match
1721
+ new_criticality: "required", "flexible", or "optional"
1722
+
1723
+ Returns:
1724
+ True if model was found and updated, False otherwise
1725
+
1726
+ Raises:
1727
+ ValueError: If new_criticality is not valid
1728
+ """
1729
+ # Validate criticality
1730
+ if new_criticality not in ("required", "flexible", "optional"):
1731
+ raise ValueError(f"Invalid criticality: {new_criticality}")
1732
+
1733
+ # Load workflow models
1734
+ models = self.pyproject.workflows.get_workflow_models(workflow_name)
1735
+
1736
+ if not models:
1737
+ return False
1738
+
1739
+ # Find matching model(s)
1740
+ matches = []
1741
+ for i, model in enumerate(models):
1742
+ if model.hash == model_identifier or model.filename == model_identifier:
1743
+ matches.append((i, model))
1744
+
1745
+ if not matches:
1746
+ return False
1747
+
1748
+ # If single match, update directly
1749
+ if len(matches) == 1:
1750
+ idx, model = matches[0]
1751
+ old_criticality = model.criticality
1752
+ models[idx].criticality = new_criticality
1753
+ self.pyproject.workflows.set_workflow_models(workflow_name, models)
1754
+ logger.info(
1755
+ f"Updated '{model.filename}' criticality: "
1756
+ f"{old_criticality} → {new_criticality}"
1757
+ )
1758
+ return True
1759
+
1760
+ # Multiple matches - update all and return True
1761
+ for idx, model in matches:
1762
+ models[idx].criticality = new_criticality
1763
+
1764
+ self.pyproject.workflows.set_workflow_models(workflow_name, models)
1765
+ logger.info(
1766
+ f"Updated {len(matches)} model(s) with identifier '{model_identifier}' "
1767
+ f"to criticality '{new_criticality}'"
1768
+ )
1769
+ return True
1770
+
1771
+ def _update_model_hash(
1772
+ self,
1773
+ workflow_name: str,
1774
+ reference: WorkflowNodeWidgetRef,
1775
+ new_hash: str
1776
+ ) -> None:
1777
+ """Update hash for a model after download completes.
1778
+
1779
+ Updates download intent (status=unresolved, sources=[URL]) to resolved state
1780
+ by atomically: 1) creating global table entry, 2) updating workflow model.
1781
+
1782
+ Args:
1783
+ workflow_name: Workflow containing the model
1784
+ reference: Widget reference to identify the model
1785
+ new_hash: Hash of downloaded model
1786
+
1787
+ Raises:
1788
+ ValueError: If model not found in workflow or repository
1789
+ """
1790
+ from comfygit_core.models.manifest import ManifestModel
1791
+
1792
+ # Load workflow models
1793
+ models = self.pyproject.workflows.get_workflow_models(workflow_name)
1794
+
1795
+ # Find model matching the reference
1796
+ for idx, model in enumerate(models):
1797
+ if reference in model.nodes:
1798
+ # Capture download metadata before clearing
1799
+ download_sources = model.sources if model.sources else []
1800
+
1801
+ # STEP 1: Get model from repository (should always exist after download)
1802
+ resolved_model = self.model_repository.get_model(new_hash)
1803
+ if not resolved_model:
1804
+ raise ValueError(
1805
+ f"Model {new_hash} not found in repository after download. "
1806
+ f"This indicates the model wasn't properly indexed."
1807
+ )
1808
+
1809
+ # STEP 2: Create global table entry FIRST (before clearing workflow model)
1810
+ manifest_model = ManifestModel(
1811
+ hash=new_hash,
1812
+ filename=resolved_model.filename,
1813
+ relative_path=resolved_model.relative_path,
1814
+ category=model.category,
1815
+ size=resolved_model.file_size,
1816
+ sources=download_sources
1817
+ )
1818
+ self.pyproject.models.add_model(manifest_model)
1819
+
1820
+ # STEP 3: Update workflow model (clear transient fields, set hash)
1821
+ models[idx].hash = new_hash
1822
+ models[idx].status = "resolved"
1823
+ models[idx].sources = []
1824
+ models[idx].relative_path = None
1825
+
1826
+ # STEP 4: Save workflow models
1827
+ self.pyproject.workflows.set_workflow_models(workflow_name, models)
1828
+
1829
+ logger.info(f"Updated model '{model.filename}' with hash {new_hash}")
1830
+ return
1831
+
1832
+ raise ValueError(f"Model with reference {reference} not found in workflow '{workflow_name}'")
1833
+
1834
+ def execute_pending_downloads(
1835
+ self,
1836
+ result: ResolutionResult,
1837
+ callbacks: BatchDownloadCallbacks | None = None
1838
+ ) -> list:
1839
+ """Execute batch downloads for all download intents in result.
1840
+
1841
+ All user-facing output is delivered via callbacks.
1842
+
1843
+ Args:
1844
+ result: Resolution result containing download intents
1845
+ callbacks: Optional callbacks for progress/status (provided by CLI)
1846
+
1847
+ Returns:
1848
+ List of DownloadResult objects
1849
+ """
1850
+ from ..models.workflow import DownloadResult
1851
+
1852
+ # Collect download intents
1853
+ intents = [r for r in result.models_resolved if r.match_type == "download_intent"]
1854
+
1855
+ if not intents:
1856
+ return []
1857
+
1858
+ # Notify batch start
1859
+ if callbacks and callbacks.on_batch_start:
1860
+ callbacks.on_batch_start(len(intents))
1861
+
1862
+ results = []
1863
+ for idx, resolved in enumerate(intents, 1):
1864
+ filename = resolved.reference.widget_value
1865
+
1866
+ # Notify file start
1867
+ if callbacks and callbacks.on_file_start:
1868
+ callbacks.on_file_start(filename, idx, len(intents))
1869
+
1870
+ # Check if already downloaded (deduplication)
1871
+ if resolved.model_source:
1872
+ existing = self.model_repository.find_by_source_url(resolved.model_source)
1873
+ if existing:
1874
+ # Reuse existing model - update pyproject with hash
1875
+ self._update_model_hash(
1876
+ result.workflow_name,
1877
+ resolved.reference,
1878
+ existing.hash
1879
+ )
1880
+ # Notify success (reused existing)
1881
+ if callbacks and callbacks.on_file_complete:
1882
+ callbacks.on_file_complete(filename, True, None)
1883
+ results.append(DownloadResult(
1884
+ success=True,
1885
+ filename=filename,
1886
+ model=existing,
1887
+ reused=True
1888
+ ))
1889
+ continue
1890
+
1891
+ # Validate required fields
1892
+ if not resolved.target_path or not resolved.model_source:
1893
+ error_msg = "Download intent missing target_path or model_source"
1894
+ if callbacks and callbacks.on_file_complete:
1895
+ callbacks.on_file_complete(filename, False, error_msg)
1896
+ results.append(DownloadResult(
1897
+ success=False,
1898
+ filename=filename,
1899
+ error=error_msg
1900
+ ))
1901
+ continue
1902
+
1903
+ # Download new model
1904
+ from ..services.model_downloader import DownloadRequest
1905
+
1906
+ target_path = self.downloader.models_dir / resolved.target_path
1907
+ request = DownloadRequest(
1908
+ url=resolved.model_source,
1909
+ target_path=target_path,
1910
+ workflow_name=result.workflow_name
1911
+ )
1912
+
1913
+ # Use per-file progress callback if provided
1914
+ progress_callback = callbacks.on_file_progress if callbacks else None
1915
+ download_result = self.downloader.download(request, progress_callback=progress_callback)
1916
+
1917
+ if download_result.success and download_result.model:
1918
+ # Update pyproject with actual hash
1919
+ self._update_model_hash(
1920
+ result.workflow_name,
1921
+ resolved.reference,
1922
+ download_result.model.hash
1923
+ )
1924
+ # Notify success
1925
+ if callbacks and callbacks.on_file_complete:
1926
+ callbacks.on_file_complete(filename, True, None)
1927
+ else:
1928
+ # Notify failure (model remains unresolved with source in pyproject)
1929
+ if callbacks and callbacks.on_file_complete:
1930
+ callbacks.on_file_complete(filename, False, download_result.error)
1931
+
1932
+ results.append(DownloadResult(
1933
+ success=download_result.success,
1934
+ filename=filename,
1935
+ model=download_result.model if download_result.success else None,
1936
+ error=download_result.error if not download_result.success else None
1937
+ ))
1938
+
1939
+ # Notify batch complete
1940
+ if callbacks and callbacks.on_batch_complete:
1941
+ success_count = sum(1 for r in results if r.success)
1942
+ callbacks.on_batch_complete(success_count, len(results))
1943
+
1944
+ return results