comfygit 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2701 @@
1
+ """Environment-specific commands for ComfyGit CLI."""
2
+ from __future__ import annotations
3
+
4
+ import argparse
5
+ import os
6
+ import subprocess
7
+ import sys
8
+ from functools import cached_property
9
+ from typing import TYPE_CHECKING, Any
10
+
11
+ from comfygit_core.models.exceptions import CDDependencyConflictError, CDEnvironmentError, CDNodeConflictError, CDRegistryDataError, UVCommandError
12
+ from comfygit_core.utils.uv_error_handler import handle_uv_error
13
+
14
+ from .formatters.error_formatter import NodeErrorFormatter
15
+ from .strategies.interactive import InteractiveModelStrategy, InteractiveNodeStrategy
16
+
17
+ if TYPE_CHECKING:
18
+ from comfygit_core.core.environment import Environment
19
+ from comfygit_core.core.workspace import Workspace
20
+ from comfygit_core.models.environment import EnvironmentStatus
21
+ from comfygit_core.models.workflow import WorkflowAnalysisStatus
22
+
23
+ from .cli_utils import get_workspace_or_exit
24
+ from .logging.environment_logger import with_env_logging
25
+ from .logging.logging_config import get_logger
26
+
27
+ logger = get_logger(__name__)
28
+
29
+
30
+ class EnvironmentCommands:
31
+ """Handler for environment-specific commands - simplified for MVP."""
32
+
33
+ def __init__(self) -> None:
34
+ """Initialize environment commands handler."""
35
+ pass
36
+
37
+ @cached_property
38
+ def workspace(self) -> Workspace:
39
+ return get_workspace_or_exit()
40
+
41
+ def _get_or_create_workspace(self, args: argparse.Namespace) -> Workspace:
42
+ """Get existing workspace or initialize a new one with user confirmation.
43
+
44
+ This is a delegation to GlobalCommands._get_or_create_workspace to avoid duplication.
45
+ We import and use GlobalCommands here for the shared logic.
46
+
47
+ Args:
48
+ args: Command arguments, must have 'yes' attribute for non-interactive mode
49
+
50
+ Returns:
51
+ Workspace instance (existing or newly created)
52
+ """
53
+ from .global_commands import GlobalCommands
54
+
55
+ global_cmds = GlobalCommands()
56
+ return global_cmds._get_or_create_workspace(args)
57
+
58
+ def _get_env(self, args) -> Environment:
59
+ """Get environment from global -e flag or active environment.
60
+
61
+ Args:
62
+ args: Parsed command line arguments
63
+
64
+ Returns:
65
+ Environment instance
66
+
67
+ Raises:
68
+ SystemExit if no environment specified
69
+ """
70
+ # Check global -e flag first
71
+ if hasattr(args, 'target_env') and args.target_env:
72
+ try:
73
+ env = self.workspace.get_environment(args.target_env)
74
+ return env
75
+ except Exception:
76
+ print(f"✗ Unknown environment: {args.target_env}")
77
+ print("Available environments:")
78
+ for e in self.workspace.list_environments():
79
+ print(f" • {e.name}")
80
+ sys.exit(1)
81
+
82
+ # Fall back to active environment
83
+ active = self.workspace.get_active_environment()
84
+ if not active:
85
+ print("✗ No environment specified. Either:")
86
+ print(" • Use -e flag: cg -e my-env <command>")
87
+ print(" • Set active: cg use <name>")
88
+ sys.exit(1)
89
+ return active
90
+
91
+ def _format_size(self, size_bytes: int) -> str:
92
+ """Format bytes as human-readable size."""
93
+ for unit in ("B", "KB", "MB", "GB"):
94
+ if abs(size_bytes) < 1024:
95
+ return f"{size_bytes:.1f} {unit}"
96
+ size_bytes /= 1024 # type: ignore[assignment]
97
+ return f"{size_bytes:.1f} TB"
98
+
99
+ def _display_diff_preview(self, diff: Any) -> None:
100
+ """Display a RefDiff to the user."""
101
+ from comfygit_core.models.ref_diff import RefDiff
102
+
103
+ if not isinstance(diff, RefDiff):
104
+ return
105
+
106
+ summary = diff.summary()
107
+ print(f"\nChanges from {diff.target_ref}:")
108
+ print("-" * 40)
109
+
110
+ # Nodes
111
+ if diff.node_changes:
112
+ print("\nNodes:")
113
+ for node_change in diff.node_changes:
114
+ symbol = {"added": "+", "removed": "-", "version_changed": "~"}[
115
+ node_change.change_type
116
+ ]
117
+ conflict_mark = " (CONFLICT)" if node_change.conflict else ""
118
+ version_info = ""
119
+ if node_change.change_type == "version_changed":
120
+ version_info = f" ({node_change.base_version} -> {node_change.target_version})"
121
+ print(f" {symbol} {node_change.name}{version_info}{conflict_mark}")
122
+
123
+ # Models
124
+ if diff.model_changes:
125
+ print("\nModels:")
126
+ for model_change in diff.model_changes:
127
+ symbol = "+" if model_change.change_type == "added" else "-"
128
+ size_str = self._format_size(model_change.size)
129
+ print(f" {symbol} {model_change.filename} ({size_str})")
130
+
131
+ # Workflows
132
+ if diff.workflow_changes:
133
+ print("\nWorkflows:")
134
+ for wf_change in diff.workflow_changes:
135
+ symbol = {"added": "+", "deleted": "-", "modified": "~"}[
136
+ wf_change.change_type
137
+ ]
138
+ conflict_mark = " (CONFLICT)" if wf_change.conflict else ""
139
+ print(f" {symbol} {wf_change.name}.json{conflict_mark}")
140
+
141
+ # Dependencies
142
+ deps = diff.dependency_changes
143
+ if deps.has_changes:
144
+ print("\nDependencies:")
145
+ for dep in deps.added:
146
+ print(f" + {dep.get('name', 'unknown')}")
147
+ for dep in deps.removed:
148
+ print(f" - {dep.get('name', 'unknown')}")
149
+ for dep in deps.updated:
150
+ print(f" ~ {dep.get('name', 'unknown')} ({dep.get('old', '?')} -> {dep.get('new', '?')})")
151
+
152
+ # Summary
153
+ print()
154
+ summary_parts = []
155
+ if summary["nodes_added"] or summary["nodes_removed"]:
156
+ summary_parts.append(
157
+ f"{summary['nodes_added']} nodes added, {summary['nodes_removed']} removed"
158
+ )
159
+ if summary["models_added"]:
160
+ summary_parts.append(
161
+ f"{summary['models_added']} models to download ({self._format_size(summary['models_added_size'])})"
162
+ )
163
+ if summary["workflows_added"] or summary["workflows_modified"] or summary["workflows_deleted"]:
164
+ summary_parts.append(
165
+ f"{summary['workflows_added']} workflows added, {summary['workflows_modified']} modified, {summary['workflows_deleted']} deleted"
166
+ )
167
+ if summary["conflicts"]:
168
+ summary_parts.append(f"{summary['conflicts']} conflicts to resolve")
169
+
170
+ if summary_parts:
171
+ print("Summary:")
172
+ for part in summary_parts:
173
+ print(f" {part}")
174
+
175
+ # === Commands that operate ON environments ===
176
+
177
+ @with_env_logging("env create")
178
+ def create(self, args: argparse.Namespace, logger=None) -> None:
179
+ """Create a new environment."""
180
+ # Ensure workspace exists, creating it if necessary
181
+ workspace = self._get_or_create_workspace(args)
182
+
183
+ print(f"🚀 Creating environment: {args.name}")
184
+ print(" This will download PyTorch and dependencies (may take a few minutes)...")
185
+ print()
186
+
187
+ try:
188
+ workspace.create_environment(
189
+ name=args.name,
190
+ comfyui_version=args.comfyui,
191
+ python_version=args.python,
192
+ template_path=args.template,
193
+ torch_backend=args.torch_backend,
194
+ )
195
+ except Exception as e:
196
+ if logger:
197
+ logger.error(f"Environment creation failed for '{args.name}': {e}", exc_info=True)
198
+ print(f"✗ Failed to create environment: {e}", file=sys.stderr)
199
+ sys.exit(1)
200
+
201
+ if args.use:
202
+ try:
203
+ workspace.set_active_environment(args.name)
204
+
205
+ except Exception as e:
206
+ if logger:
207
+ logger.error(f"Failed to set active environment '{args.name}': {e}", exc_info=True)
208
+ print(f"✗ Failed to set active environment: {e}", file=sys.stderr)
209
+ sys.exit(1)
210
+
211
+ print(f"✓ Environment created: {args.name}")
212
+ if args.use:
213
+ print(f"✓ Active environment set to: {args.name}")
214
+ print("\nNext steps:")
215
+ print(" • Run ComfyUI: cg run")
216
+ print(" • Add nodes: cg node add <node-name>")
217
+ else:
218
+ print("\nNext steps:")
219
+ print(f" • Run ComfyUI: cg -e {args.name} run")
220
+ print(f" • Add nodes: cg -e {args.name} node add <node-name>")
221
+ print(f" • Set as active: cg use {args.name}")
222
+
223
+ @with_env_logging("env use")
224
+ def use(self, args: argparse.Namespace, logger=None) -> None:
225
+ """Set the active environment."""
226
+ from comfygit_cli.utils.progress import create_model_sync_progress
227
+
228
+ try:
229
+ progress = create_model_sync_progress()
230
+ self.workspace.set_active_environment(args.name, progress=progress)
231
+ except Exception as e:
232
+ if logger:
233
+ logger.error(f"Failed to set active environment '{args.name}': {e}", exc_info=True)
234
+ print(f"✗ Failed to set active environment: {e}", file=sys.stderr)
235
+ sys.exit(1)
236
+
237
+ print(f"✓ Active environment set to: {args.name}")
238
+ print("You can now run commands without the -e flag")
239
+
240
+ @with_env_logging("env delete")
241
+ def delete(self, args: argparse.Namespace, logger=None) -> None:
242
+ """Delete an environment."""
243
+ # Check that environment exists (don't require active environment)
244
+ env_path = self.workspace.paths.environments / args.name
245
+ if not env_path.exists():
246
+ print(f"✗ Environment '{args.name}' not found")
247
+ print("\nAvailable environments:")
248
+ for env in self.workspace.list_environments():
249
+ print(f" • {env.name}")
250
+ sys.exit(1)
251
+
252
+ # Confirm deletion unless --yes is specified
253
+ if not args.yes:
254
+ response = input(f"Delete environment '{args.name}'? This cannot be undone. (y/N): ")
255
+ if response.lower() != 'y':
256
+ print("Cancelled")
257
+ return
258
+
259
+ print(f"🗑 Deleting environment: {args.name}")
260
+
261
+ try:
262
+ self.workspace.delete_environment(args.name)
263
+ except Exception as e:
264
+ if logger:
265
+ logger.error(f"Environment deletion failed for '{args.name}': {e}", exc_info=True)
266
+ print(f"✗ Failed to delete environment: {e}", file=sys.stderr)
267
+ sys.exit(1)
268
+
269
+ print(f"✓ Environment deleted: {args.name}")
270
+
271
+ # === Commands that operate IN environments ===
272
+
273
+ @with_env_logging("env run")
274
+ def run(self, args: argparse.Namespace) -> None:
275
+ """Run ComfyUI in the specified environment."""
276
+ RESTART_EXIT_CODE = 42
277
+ env = self._get_env(args)
278
+ comfyui_args = args.args if hasattr(args, 'args') else []
279
+ no_sync = getattr(args, 'no_sync', False)
280
+
281
+ current_branch = env.get_current_branch()
282
+ branch_display = f" (on {current_branch})" if current_branch else " (detached HEAD)"
283
+
284
+ while True:
285
+ # Sync before running (unless --no-sync)
286
+ if not no_sync:
287
+ print(f"🔄 Syncing environment: {env.name}")
288
+ env.sync(preserve_workflows=True, remove_extra_nodes=False)
289
+
290
+ print(f"🎮 Starting ComfyUI in environment: {env.name}{branch_display}")
291
+ if comfyui_args:
292
+ print(f" Arguments: {' '.join(comfyui_args)}")
293
+
294
+ result = env.run(comfyui_args)
295
+
296
+ if result.returncode == RESTART_EXIT_CODE:
297
+ print("\n🔄 Restart requested, syncing dependencies...\n")
298
+ no_sync = False # Ensure sync runs on restart
299
+ continue
300
+
301
+ sys.exit(result.returncode)
302
+
303
+ def manifest(self, args: argparse.Namespace) -> None:
304
+ """Show environment manifest (pyproject.toml configuration)."""
305
+ env = self._get_env(args)
306
+
307
+ # Handle --ide flag: open in editor and exit
308
+ if hasattr(args, 'ide') and args.ide:
309
+ import os
310
+ import subprocess
311
+ editor = args.ide if args.ide != "auto" else os.environ.get("EDITOR", "code")
312
+ subprocess.run([editor, str(env.pyproject.path)])
313
+ return
314
+
315
+ import tomlkit
316
+ import yaml
317
+
318
+ # Load raw TOML config
319
+ config = env.pyproject.load()
320
+
321
+ # Handle section filtering if requested
322
+ if hasattr(args, 'section') and args.section:
323
+ # Navigate to requested section using dot notation
324
+ keys = args.section.split('.')
325
+ current = config
326
+ try:
327
+ for key in keys:
328
+ current = current[key]
329
+ config = {args.section: current}
330
+ except (KeyError, TypeError):
331
+ print(f"✗ Section not found: {args.section}")
332
+ print("\nAvailable sections:")
333
+ print(" • project")
334
+ print(" • tool.comfygit")
335
+ print(" • tool.comfygit.nodes")
336
+ print(" • tool.comfygit.workflows")
337
+ print(" • tool.comfygit.models")
338
+ print(" • tool.uv")
339
+ print(" • dependency-groups")
340
+ sys.exit(1)
341
+
342
+ # Output format
343
+ if hasattr(args, 'pretty') and args.pretty:
344
+ # Convert tomlkit objects to plain Python types recursively
345
+ def to_plain(obj):
346
+ """Recursively convert tomlkit objects to plain Python types."""
347
+ if isinstance(obj, dict):
348
+ return {k: to_plain(v) for k, v in obj.items()}
349
+ elif isinstance(obj, list):
350
+ return [to_plain(item) for item in obj]
351
+ elif hasattr(obj, 'unwrap'): # tomlkit items have unwrap()
352
+ return to_plain(obj.unwrap())
353
+ else:
354
+ return obj
355
+
356
+ plain_dict = to_plain(config)
357
+ print(yaml.dump(plain_dict, default_flow_style=False, sort_keys=False))
358
+ else:
359
+ # Default: raw TOML (exact file representation)
360
+ print(tomlkit.dumps(config))
361
+
362
+ @with_env_logging("env status")
363
+ def status(self, args: argparse.Namespace) -> None:
364
+ """Show environment status using semantic methods."""
365
+ env = self._get_env(args)
366
+
367
+ status = env.status()
368
+
369
+ # Always show git state - never leave it blank
370
+ if status.git.current_branch:
371
+ branch_info = f" (on {status.git.current_branch})"
372
+ else:
373
+ branch_info = " (detached HEAD)"
374
+
375
+ # Clean state - everything is good (but check for detached HEAD)
376
+ if status.is_synced and not status.git.has_changes and status.workflow.sync_status.total_count == 0:
377
+ # Determine status indicator
378
+ if status.git.current_branch is None:
379
+ status_indicator = "⚠️" # Warning for detached HEAD even when clean
380
+ else:
381
+ status_indicator = "✓" # All good
382
+
383
+ print(f"Environment: {env.name}{branch_info} {status_indicator}")
384
+
385
+ # Show detached HEAD warning even in clean state
386
+ if status.git.current_branch is None:
387
+ print("⚠️ You are in detached HEAD state")
388
+ print(" Any commits you make will not be saved to a branch!")
389
+ print(" Create a branch: cg checkout -b <branch-name>")
390
+ print() # Extra spacing before clean state messages
391
+
392
+ print("\n✓ No workflows")
393
+ print("✓ No uncommitted changes")
394
+ return
395
+
396
+ # Show environment name with branch
397
+ print(f"Environment: {env.name}{branch_info}")
398
+
399
+ # Detached HEAD warning (shown prominently at top)
400
+ if status.git.current_branch is None:
401
+ print("⚠️ You are in detached HEAD state")
402
+ print(" Any commits you make will not be saved to a branch!")
403
+ print(" Create a branch: cg checkout -b <branch-name>")
404
+ print() # Extra spacing
405
+
406
+ # Workflows section - consolidated with issues
407
+ if status.workflow.sync_status.total_count > 0 or status.workflow.sync_status.has_changes:
408
+ print("\n📋 Workflows:")
409
+
410
+ # Group workflows by state and show with issues inline
411
+ all_workflows = {}
412
+
413
+ # Build workflow map with their analysis
414
+ for wf_analysis in status.workflow.analyzed_workflows:
415
+ all_workflows[wf_analysis.name] = {
416
+ 'state': wf_analysis.sync_state,
417
+ 'has_issues': wf_analysis.has_issues,
418
+ 'analysis': wf_analysis
419
+ }
420
+
421
+ # Show workflows with inline issue details
422
+ verbose = args.verbose
423
+ for name in status.workflow.sync_status.synced:
424
+ if name in all_workflows:
425
+ wf = all_workflows[name]['analysis']
426
+ # Check if workflow has missing models (from direct repo query, not cache)
427
+ missing_for_wf = [m for m in status.missing_models if name in m.workflow_names]
428
+ # Show warning if has issues OR path sync needed OR missing models
429
+ if wf.has_issues or wf.has_path_sync_issues:
430
+ print(f" ⚠️ {name} (synced)")
431
+ self._print_workflow_issues(wf, verbose)
432
+ elif missing_for_wf:
433
+ print(f" ⚠️ {name} (synced, {len(missing_for_wf)} missing models)")
434
+ else:
435
+ print(f" ✓ {name}")
436
+
437
+ for name in status.workflow.sync_status.new:
438
+ if name in all_workflows:
439
+ wf = all_workflows[name]['analysis']
440
+ # Check if workflow has missing models (from direct repo query, not cache)
441
+ missing_for_wf = [m for m in status.missing_models if name in m.workflow_names]
442
+ # Show warning if has issues OR path sync needed OR missing models
443
+ if wf.has_issues or wf.has_path_sync_issues:
444
+ print(f" ⚠️ {name} (new)")
445
+ self._print_workflow_issues(wf, verbose)
446
+ elif missing_for_wf:
447
+ print(f" ⚠️ {name} (new, {len(missing_for_wf)} missing models)")
448
+ else:
449
+ print(f" 🆕 {name} (new, ready to commit)")
450
+
451
+ for name in status.workflow.sync_status.modified:
452
+ if name in all_workflows:
453
+ wf = all_workflows[name]['analysis']
454
+ # Check if workflow has missing models
455
+ missing_for_wf = [m for m in status.missing_models if name in m.workflow_names]
456
+
457
+ # Show warning if has issues OR path sync needed
458
+ if wf.has_issues or wf.has_path_sync_issues:
459
+ print(f" ⚠️ {name} (modified)")
460
+ self._print_workflow_issues(wf, verbose)
461
+ elif missing_for_wf:
462
+ print(f" ⬇️ {name} (modified, missing models)")
463
+ print(f" {len(missing_for_wf)} model(s) need downloading")
464
+ else:
465
+ print(f" 📝 {name} (modified)")
466
+
467
+ for name in status.workflow.sync_status.deleted:
468
+ print(f" 🗑️ {name} (deleted)")
469
+
470
+ # Environment drift (manual edits)
471
+ if not status.comparison.is_synced:
472
+ print("\n⚠️ Environment needs repair:")
473
+
474
+ if status.comparison.missing_nodes:
475
+ print(f" • {len(status.comparison.missing_nodes)} nodes in pyproject.toml not installed")
476
+
477
+ if status.comparison.extra_nodes:
478
+ print(f" • {len(status.comparison.extra_nodes)} untracked nodes on filesystem:")
479
+ limit = None if args.verbose else 5
480
+ nodes_to_show = status.comparison.extra_nodes if limit is None else status.comparison.extra_nodes[:limit]
481
+ for node_name in nodes_to_show:
482
+ print(f" - {node_name}")
483
+ if limit and len(status.comparison.extra_nodes) > limit:
484
+ print(f" ... and {len(status.comparison.extra_nodes) - limit} more")
485
+
486
+ if status.comparison.version_mismatches:
487
+ print(f" • {len(status.comparison.version_mismatches)} version mismatches")
488
+
489
+ if not status.comparison.packages_in_sync:
490
+ print(" • Python packages out of sync")
491
+
492
+ # Disabled nodes (informational, not a warning)
493
+ if status.comparison.disabled_nodes:
494
+ print("\n📴 Disabled nodes:")
495
+ for node_name in status.comparison.disabled_nodes:
496
+ print(f" • {node_name}")
497
+
498
+ # Git changes
499
+ if status.git.has_changes:
500
+ has_specific_changes = (
501
+ status.git.nodes_added or
502
+ status.git.nodes_removed or
503
+ status.git.workflow_changes
504
+ )
505
+
506
+ if has_specific_changes:
507
+ print("\n📦 Uncommitted changes:")
508
+ limit = None if args.verbose else 3
509
+
510
+ if status.git.nodes_added:
511
+ nodes_to_show = status.git.nodes_added if limit is None else status.git.nodes_added[:limit]
512
+ for node in nodes_to_show:
513
+ name = node['name'] if isinstance(node, dict) else node
514
+ print(f" • Added node: {name}")
515
+ if limit and len(status.git.nodes_added) > limit:
516
+ print(f" • ... and {len(status.git.nodes_added) - limit} more nodes")
517
+
518
+ if status.git.nodes_removed:
519
+ nodes_to_show = status.git.nodes_removed if limit is None else status.git.nodes_removed[:limit]
520
+ for node in nodes_to_show:
521
+ name = node['name'] if isinstance(node, dict) else node
522
+ print(f" • Removed node: {name}")
523
+ if limit and len(status.git.nodes_removed) > limit:
524
+ print(f" • ... and {len(status.git.nodes_removed) - limit} more nodes")
525
+
526
+ if status.git.workflow_changes:
527
+ count = len(status.git.workflow_changes)
528
+ print(f" • {count} workflow(s) changed")
529
+
530
+ # Show other changes if present
531
+ if status.git.has_other_changes:
532
+ print(" • Other files modified in .cec/")
533
+ else:
534
+ # Generic message for other changes (e.g., model resolutions)
535
+ print("\n📦 Uncommitted changes:")
536
+ if status.git.has_other_changes:
537
+ print(" • Other files modified in .cec/")
538
+ else:
539
+ print(" • Configuration updated")
540
+
541
+ # Suggested actions - smart and contextual
542
+ self._show_smart_suggestions(status)
543
+
544
+ # Removed: _has_uninstalled_packages - this logic is now in core's WorkflowAnalysisStatus
545
+
546
+ def _print_workflow_issues(self, wf_analysis: WorkflowAnalysisStatus, verbose: bool = False) -> None:
547
+ """Print compact workflow issues summary using model properties only."""
548
+ # Build compact summary using WorkflowAnalysisStatus properties (no pyproject access!)
549
+ parts = []
550
+
551
+ # Path sync warnings (FIRST - most actionable fix)
552
+ if wf_analysis.models_needing_path_sync_count > 0:
553
+ parts.append(f"{wf_analysis.models_needing_path_sync_count} model paths need syncing")
554
+
555
+ # Category mismatch (blocking - model in wrong directory for loader)
556
+ if wf_analysis.models_with_category_mismatch_count > 0:
557
+ parts.append(f"{wf_analysis.models_with_category_mismatch_count} models in wrong directory")
558
+
559
+ # Use the uninstalled_count property (populated by core)
560
+ if wf_analysis.uninstalled_count > 0:
561
+ parts.append(f"{wf_analysis.uninstalled_count} packages needed for installation")
562
+
563
+ # Resolution issues
564
+ if wf_analysis.resolution.nodes_unresolved:
565
+ parts.append(f"{len(wf_analysis.resolution.nodes_unresolved)} nodes couldn't be resolved")
566
+ if wf_analysis.resolution.models_unresolved:
567
+ parts.append(f"{len(wf_analysis.resolution.models_unresolved)} models not found")
568
+ if wf_analysis.resolution.models_ambiguous:
569
+ parts.append(f"{len(wf_analysis.resolution.models_ambiguous)} ambiguous models")
570
+
571
+ # Show download intents as pending work (not blocking but needs attention)
572
+ download_intents = [m for m in wf_analysis.resolution.models_resolved if m.match_type == "download_intent"]
573
+ if download_intents:
574
+ parts.append(f"{len(download_intents)} models queued for download")
575
+
576
+ # Print compact issue line
577
+ if parts:
578
+ print(f" {', '.join(parts)}")
579
+
580
+ # Detailed category mismatch info (always show brief, verbose shows full details)
581
+ if wf_analysis.has_category_mismatch_issues:
582
+ for model in wf_analysis.resolution.models_resolved:
583
+ if model.has_category_mismatch:
584
+ expected = model.expected_categories[0] if model.expected_categories else "unknown"
585
+ if verbose:
586
+ print(f" ↳ {model.name}")
587
+ print(f" Node: {model.reference.node_type} expects {expected}/")
588
+ print(f" Actual: {model.actual_category}/")
589
+ print(f" Fix: Move file to models/{expected}/ or re-download")
590
+ else:
591
+ print(f" ↳ {model.name}: in {model.actual_category}/, needs {expected}/")
592
+
593
+ def _show_smart_suggestions(self, status: EnvironmentStatus) -> None:
594
+ """Show contextual suggestions based on current state."""
595
+ suggestions = []
596
+
597
+ # Differentiate workflow-related nodes from orphan nodes
598
+ uninstalled_workflow_nodes = set()
599
+ for wf in status.workflow.analyzed_workflows:
600
+ uninstalled_workflow_nodes.update(wf.uninstalled_nodes)
601
+
602
+ orphan_missing_nodes = set(status.comparison.missing_nodes) - uninstalled_workflow_nodes
603
+ has_orphan_nodes = bool(orphan_missing_nodes or status.comparison.extra_nodes)
604
+
605
+ # Missing models + environment drift: check if repair needed first
606
+ if status.missing_models and has_orphan_nodes:
607
+ suggestions.append("Install missing nodes: cg repair")
608
+
609
+ # Group workflows with missing models
610
+ workflows_with_missing = {}
611
+ for missing_info in status.missing_models:
612
+ for wf_name in missing_info.workflow_names:
613
+ if wf_name not in workflows_with_missing:
614
+ workflows_with_missing[wf_name] = []
615
+ workflows_with_missing[wf_name].append(missing_info)
616
+
617
+ if len(workflows_with_missing) == 1:
618
+ wf_name = list(workflows_with_missing.keys())[0]
619
+ suggestions.append(f"Then resolve workflow: cg workflow resolve \"{wf_name}\"")
620
+ else:
621
+ suggestions.append("Then resolve workflow (pick one):")
622
+ for wf_name in list(workflows_with_missing.keys())[:2]:
623
+ suggestions.append(f" cg workflow resolve \"{wf_name}\"")
624
+
625
+ print("\n💡 Next:")
626
+ for s in suggestions:
627
+ print(f" {s}")
628
+ return
629
+
630
+ # Missing models only (no orphan nodes) - workflow resolve handles everything
631
+ if status.missing_models:
632
+ workflows_with_missing = {}
633
+ for missing_info in status.missing_models:
634
+ for wf_name in missing_info.workflow_names:
635
+ if wf_name not in workflows_with_missing:
636
+ workflows_with_missing[wf_name] = []
637
+ workflows_with_missing[wf_name].append(missing_info)
638
+
639
+ if len(workflows_with_missing) == 1:
640
+ wf_name = list(workflows_with_missing.keys())[0]
641
+ suggestions.append(f"Resolve workflow: cg workflow resolve \"{wf_name}\"")
642
+ else:
643
+ suggestions.append("Resolve workflows with missing models (pick one):")
644
+ for wf_name in list(workflows_with_missing.keys())[:3]:
645
+ suggestions.append(f" cg workflow resolve \"{wf_name}\"")
646
+ if len(workflows_with_missing) > 3:
647
+ suggestions.append(f" ... and {len(workflows_with_missing) - 3} more")
648
+
649
+ print("\n💡 Next:")
650
+ for s in suggestions:
651
+ print(f" {s}")
652
+ return
653
+
654
+ # Environment drift only (no workflow issues)
655
+ if not status.comparison.is_synced:
656
+ # If only extra nodes, suggest tracking them as dev nodes
657
+ if status.comparison.extra_nodes and not status.comparison.missing_nodes and not status.comparison.version_mismatches and status.comparison.packages_in_sync:
658
+ if len(status.comparison.extra_nodes) == 1:
659
+ node_name = status.comparison.extra_nodes[0]
660
+ suggestions.append(f"Track as dev node: cg node add {node_name} --dev")
661
+ else:
662
+ suggestions.append("Track as dev nodes:")
663
+ for node_name in status.comparison.extra_nodes[:3]:
664
+ suggestions.append(f" cg node add {node_name} --dev")
665
+ if len(status.comparison.extra_nodes) > 3:
666
+ suggestions.append(f" ... and {len(status.comparison.extra_nodes) - 3} more")
667
+ suggestions.append("Or remove untracked: cg repair")
668
+ else:
669
+ suggestions.append("Run: cg repair")
670
+ print("\n💡 Next:")
671
+ for s in suggestions:
672
+ print(f" {s}")
673
+ return
674
+
675
+ # Category mismatch (blocking - model in wrong directory for loader)
676
+ workflows_with_category_mismatch = [
677
+ w for w in status.workflow.analyzed_workflows
678
+ if w.has_category_mismatch_issues
679
+ ]
680
+
681
+ if workflows_with_category_mismatch:
682
+ suggestions.append("Models in wrong directory (move files manually):")
683
+ for wf in workflows_with_category_mismatch[:2]:
684
+ for m in wf.resolution.models_resolved:
685
+ if m.has_category_mismatch:
686
+ expected = m.expected_categories[0] if m.expected_categories else "unknown"
687
+ suggestions.append(f" {m.actual_category}/{m.name} → {expected}/")
688
+
689
+ print("\n💡 Next:")
690
+ for s in suggestions:
691
+ print(f" {s}")
692
+ return
693
+
694
+ # Path sync warnings (prioritize - quick fix!)
695
+ workflows_needing_sync = [
696
+ w for w in status.workflow.analyzed_workflows
697
+ if w.has_path_sync_issues
698
+ ]
699
+
700
+ if workflows_needing_sync:
701
+ workflow_names = [w.name for w in workflows_needing_sync]
702
+ if len(workflow_names) == 1:
703
+ suggestions.append(f"Sync model paths: cg workflow resolve \"{workflow_names[0]}\"")
704
+ else:
705
+ suggestions.append(f"Sync model paths in {len(workflow_names)} workflows: cg workflow resolve \"<name>\"")
706
+
707
+ # Check for workflows with download intents
708
+ workflows_with_downloads = []
709
+ for wf in status.workflow.analyzed_workflows:
710
+ download_intents = [m for m in wf.resolution.models_resolved if m.match_type == "download_intent"]
711
+ if download_intents:
712
+ workflows_with_downloads.append(wf.name)
713
+
714
+ # Workflows with issues (unresolved/ambiguous)
715
+ workflows_with_issues = [w.name for w in status.workflow.workflows_with_issues]
716
+ if workflows_with_issues:
717
+ if len(workflows_with_issues) == 1:
718
+ suggestions.append(f"Fix issues: cg workflow resolve \"{workflows_with_issues[0]}\"")
719
+ else:
720
+ suggestions.append("Fix workflows (pick one):")
721
+ for wf_name in workflows_with_issues[:3]:
722
+ suggestions.append(f" cg workflow resolve \"{wf_name}\"")
723
+ if len(workflows_with_issues) > 3:
724
+ suggestions.append(f" ... and {len(workflows_with_issues) - 3} more")
725
+
726
+ # Only suggest committing if there are uncommitted changes
727
+ if status.git.has_changes:
728
+ suggestions.append("Or commit anyway: cg commit -m \"...\" --allow-issues")
729
+
730
+ # Workflows with queued downloads (no other issues)
731
+ elif workflows_with_downloads:
732
+ if len(workflows_with_downloads) == 1:
733
+ suggestions.append(f"Complete downloads: cg workflow resolve \"{workflows_with_downloads[0]}\"")
734
+ else:
735
+ suggestions.append("Complete downloads (pick one):")
736
+ for wf_name in workflows_with_downloads[:3]:
737
+ suggestions.append(f" cg workflow resolve \"{wf_name}\"")
738
+
739
+ # Ready to commit (workflow changes OR git changes)
740
+ elif status.workflow.sync_status.has_changes and status.workflow.is_commit_safe:
741
+ suggestions.append("Commit workflows: cg commit -m \"<message>\"")
742
+ elif status.git.has_changes:
743
+ # Uncommitted pyproject changes without workflow issues
744
+ suggestions.append("Commit changes: cg commit -m \"<message>\"")
745
+
746
+ # Show suggestions if any
747
+ if suggestions:
748
+ print("\n💡 Next:")
749
+ for s in suggestions:
750
+ print(f" {s}")
751
+
752
+ def _show_git_changes(self, status: EnvironmentStatus) -> None:
753
+ """Helper method to show git changes in a structured way."""
754
+ # Show node changes
755
+ if status.git.nodes_added or status.git.nodes_removed:
756
+ print("\n Custom Nodes:")
757
+ for node in status.git.nodes_added:
758
+ if isinstance(node, dict):
759
+ name = node['name']
760
+ suffix = ' (development)' if node.get('is_development') else ''
761
+ print(f" + {name}{suffix}")
762
+ else:
763
+ # Backwards compatibility for string format
764
+ print(f" + {node}")
765
+ for node in status.git.nodes_removed:
766
+ if isinstance(node, dict):
767
+ name = node['name']
768
+ suffix = ' (development)' if node.get('is_development') else ''
769
+ print(f" - {name}{suffix}")
770
+ else:
771
+ # Backwards compatibility for string format
772
+ print(f" - {node}")
773
+
774
+ # Show dependency changes
775
+ if status.git.dependencies_added or status.git.dependencies_removed or status.git.dependencies_updated:
776
+ print("\n Python Packages:")
777
+ for dep in status.git.dependencies_added:
778
+ version = dep.get('version', 'any')
779
+ source = dep.get('source', '')
780
+ if source:
781
+ print(f" + {dep['name']} ({version}) [{source}]")
782
+ else:
783
+ print(f" + {dep['name']} ({version})")
784
+ for dep in status.git.dependencies_removed:
785
+ version = dep.get('version', 'any')
786
+ print(f" - {dep['name']} ({version})")
787
+ for dep in status.git.dependencies_updated:
788
+ old = dep.get('old_version', 'any')
789
+ new = dep.get('new_version', 'any')
790
+ print(f" ~ {dep['name']}: {old} → {new}")
791
+
792
+ # Show constraint changes
793
+ if status.git.constraints_added or status.git.constraints_removed:
794
+ print("\n Constraint Dependencies:")
795
+ for constraint in status.git.constraints_added:
796
+ print(f" + {constraint}")
797
+ for constraint in status.git.constraints_removed:
798
+ print(f" - {constraint}")
799
+
800
+ # Show workflow changes (tracking and content)
801
+ workflow_changes_shown = False
802
+
803
+ # Workflow tracking no longer needed - all workflows are automatically managed
804
+
805
+ # Show workflow file changes
806
+ if status.git.workflow_changes:
807
+ if not workflow_changes_shown:
808
+ print("\n Workflows:")
809
+ workflow_changes_shown = True
810
+ for workflow_name, git_status in status.git.workflow_changes.items():
811
+ if git_status == "modified":
812
+ print(f" ~ {workflow_name}.json")
813
+ elif git_status == "added":
814
+ print(f" + {workflow_name}.json")
815
+ elif git_status == "deleted":
816
+ print(f" - {workflow_name}.json")
817
+
818
+ @with_env_logging("log")
819
+ def log(self, args: argparse.Namespace, logger=None) -> None:
820
+ """Show commit history for this environment."""
821
+ env = self._get_env(args)
822
+
823
+ try:
824
+ limit = args.limit if hasattr(args, 'limit') else 20
825
+ history = env.get_commit_history(limit=limit)
826
+
827
+ if not history:
828
+ print("No commits yet")
829
+ print("\nTip: Run 'cg commit' to create your first commit")
830
+ return
831
+
832
+ print(f"Commit history for environment '{env.name}':\n")
833
+
834
+ if not args.verbose:
835
+ # Compact: hash + refs + message + relative date
836
+ for commit in history: # Already newest first
837
+ refs_display = f" ({commit['refs']})" if commit['refs'] else ""
838
+ print(f"{commit['hash']}{refs_display} {commit['message']} ({commit['date_relative']})")
839
+ print()
840
+ else:
841
+ # Verbose: multi-line with full info
842
+ for commit in history:
843
+ refs_display = f" ({commit['refs']})" if commit['refs'] else ""
844
+ print(f"Commit: {commit['hash']}{refs_display}")
845
+ print(f"Date: {commit['date'][:19]}")
846
+ print(f"Message: {commit['message']}")
847
+ print()
848
+
849
+ # Show detached HEAD status if applicable
850
+ current_branch = env.get_current_branch()
851
+ if current_branch is None:
852
+ print()
853
+ print("⚠️ You are currently in detached HEAD state")
854
+ print(" Commits will not be saved to any branch!")
855
+ print(" Create a branch: cg checkout -b <branch-name>")
856
+ print()
857
+
858
+ print("Use 'cg checkout <hash>' to view a specific commit")
859
+ print("Use 'cg revert <hash>' to undo changes from a commit (safe)")
860
+ print("Use 'cg checkout -b <branch> <hash>' to create branch from commit")
861
+
862
+ except Exception as e:
863
+ if logger:
864
+ logger.error(f"Failed to read commit history for environment '{env.name}': {e}", exc_info=True)
865
+ print(f"✗ Could not read commit history: {e}", file=sys.stderr)
866
+ sys.exit(1)
867
+
868
+ # === Node management ===
869
+
870
+ @with_env_logging("env node add")
871
+ def node_add(self, args: argparse.Namespace, logger=None) -> None:
872
+ """Add custom node(s) - directly modifies pyproject.toml."""
873
+ env = self._get_env(args)
874
+
875
+ # Batch mode: multiple nodes
876
+ if len(args.node_names) > 1:
877
+ print(f"📦 Adding {len(args.node_names)} nodes...")
878
+
879
+ # Create callbacks for progress display
880
+ def on_node_start(node_id, idx, total):
881
+ print(f" [{idx}/{total}] Installing {node_id}...", end=" ", flush=True)
882
+
883
+ def on_node_complete(node_id, success, error):
884
+ if success:
885
+ print("✓")
886
+ else:
887
+ print(f"✗ ({error})")
888
+
889
+ from comfygit_core.models.workflow import NodeInstallCallbacks
890
+ callbacks = NodeInstallCallbacks(
891
+ on_node_start=on_node_start,
892
+ on_node_complete=on_node_complete
893
+ )
894
+
895
+ # Install nodes with progress feedback
896
+ installed_count, failed_nodes = env.install_nodes_with_progress(
897
+ args.node_names,
898
+ callbacks=callbacks
899
+ )
900
+
901
+ if installed_count > 0:
902
+ print(f"\n✅ Installed {installed_count}/{len(args.node_names)} nodes")
903
+
904
+ if failed_nodes:
905
+ print(f"\n⚠️ Failed to install {len(failed_nodes)} nodes:")
906
+ for node_id, error in failed_nodes:
907
+ print(f" • {node_id}: {error}")
908
+
909
+ print(f"\nRun 'cg -e {env.name} env status' to review changes")
910
+ return
911
+
912
+ # Single node mode (original behavior)
913
+ node_name = args.node_names[0]
914
+
915
+ if args.dev:
916
+ print(f"📦 Adding development node: {node_name}")
917
+ else:
918
+ print(f"📦 Adding node: {node_name}")
919
+
920
+ # Create confirmation strategy for dev node replacement
921
+ from comfygit_core.strategies.confirmation import InteractiveConfirmStrategy
922
+ confirmation_strategy = InteractiveConfirmStrategy()
923
+
924
+ # Directly add the node
925
+ try:
926
+ node_info = env.add_node(
927
+ node_name,
928
+ is_development=args.dev,
929
+ no_test=args.no_test,
930
+ force=args.force,
931
+ confirmation_strategy=confirmation_strategy
932
+ )
933
+ except CDRegistryDataError as e:
934
+ # Registry data unavailable
935
+ formatted = NodeErrorFormatter.format_registry_error(e)
936
+ if logger:
937
+ logger.error(f"Registry data unavailable for node add: {e}", exc_info=True)
938
+ print(f"✗ Cannot add node - registry data unavailable", file=sys.stderr)
939
+ print(formatted, file=sys.stderr)
940
+ sys.exit(1)
941
+ except CDDependencyConflictError as e:
942
+ # Dependency conflict with enhanced formatting
943
+ formatted = NodeErrorFormatter.format_dependency_conflict_error(e, verbose=args.verbose)
944
+ if logger:
945
+ logger.error(f"Dependency conflict for '{node_name}': {e}", exc_info=True)
946
+ print(formatted, file=sys.stderr)
947
+ sys.exit(1)
948
+ except CDNodeConflictError as e:
949
+ # Use formatter to render error with CLI commands
950
+ formatted = NodeErrorFormatter.format_conflict_error(e)
951
+ if logger:
952
+ logger.error(f"Node conflict for '{node_name}': {e}", exc_info=True)
953
+ print(f"✗ Cannot add node '{node_name}'", file=sys.stderr)
954
+ print(formatted, file=sys.stderr)
955
+ sys.exit(1)
956
+ except Exception as e:
957
+ if logger:
958
+ logger.error(f"Node add failed for '{node_name}': {e}", exc_info=True)
959
+ print(f"✗ Failed to add node '{node_name}'", file=sys.stderr)
960
+ print(f" {e}", file=sys.stderr)
961
+ sys.exit(1)
962
+
963
+ if args.dev:
964
+ print(f"✓ Development node '{node_info.name}' added and tracked")
965
+ else:
966
+ print(f"✓ Node '{node_info.name}' added to pyproject.toml")
967
+
968
+ print(f"\nRun 'cg -e {env.name} env status' to review changes")
969
+
970
+ @with_env_logging("env node remove")
971
+ def node_remove(self, args: argparse.Namespace, logger=None) -> None:
972
+ """Remove custom node(s) - handles filesystem immediately."""
973
+ env = self._get_env(args)
974
+
975
+ # Batch mode: multiple nodes
976
+ if len(args.node_names) > 1:
977
+ print(f"🗑 Removing {len(args.node_names)} nodes...")
978
+
979
+ # Create callbacks for progress display
980
+ def on_node_start(node_id, idx, total):
981
+ print(f" [{idx}/{total}] Removing {node_id}...", end=" ", flush=True)
982
+
983
+ def on_node_complete(node_id, success, error):
984
+ if success:
985
+ print("✓")
986
+ else:
987
+ print(f"✗ ({error})")
988
+
989
+ from comfygit_core.models.workflow import NodeInstallCallbacks
990
+ callbacks = NodeInstallCallbacks(
991
+ on_node_start=on_node_start,
992
+ on_node_complete=on_node_complete
993
+ )
994
+
995
+ # Remove nodes with progress feedback
996
+ removed_count, failed_nodes = env.remove_nodes_with_progress(
997
+ args.node_names,
998
+ callbacks=callbacks
999
+ )
1000
+
1001
+ if removed_count > 0:
1002
+ print(f"\n✅ Removed {removed_count}/{len(args.node_names)} nodes")
1003
+
1004
+ if failed_nodes:
1005
+ print(f"\n⚠️ Failed to remove {len(failed_nodes)} nodes:")
1006
+ for node_id, error in failed_nodes:
1007
+ print(f" • {node_id}: {error}")
1008
+
1009
+ print(f"\nRun 'cg -e {env.name} env status' to review changes")
1010
+ return
1011
+
1012
+ # Single node mode (original behavior)
1013
+ node_name = args.node_names[0]
1014
+ untrack_only = getattr(args, 'untrack', False)
1015
+
1016
+ if untrack_only:
1017
+ print(f"🔓 Untracking node: {node_name}")
1018
+ else:
1019
+ print(f"🗑 Removing node: {node_name}")
1020
+
1021
+ # Remove the node (handles filesystem imperatively)
1022
+ try:
1023
+ result = env.remove_node(node_name, untrack_only=untrack_only)
1024
+ except Exception as e:
1025
+ if logger:
1026
+ logger.error(f"Node remove failed for '{node_name}': {e}", exc_info=True)
1027
+ print(f"✗ Failed to remove node '{node_name}'", file=sys.stderr)
1028
+ print(f" {e}", file=sys.stderr)
1029
+ sys.exit(1)
1030
+
1031
+ # Render result based on node type and action
1032
+ if result.filesystem_action == "none":
1033
+ # Untrack mode - no filesystem changes
1034
+ print(f"✓ Node '{result.name}' removed from tracking")
1035
+ print(" (filesystem unchanged)")
1036
+ elif result.source == "development":
1037
+ if result.filesystem_action == "disabled":
1038
+ print(f"ℹ️ Development node '{result.name}' removed from tracking")
1039
+ print(f" Files preserved at: custom_nodes/{result.name}.disabled/")
1040
+ else:
1041
+ print(f"✓ Development node '{result.name}' removed from tracking")
1042
+ else:
1043
+ print(f"✓ Node '{result.name}' removed from environment")
1044
+ if result.filesystem_action == "deleted":
1045
+ print(" (cached globally, can reinstall)")
1046
+
1047
+ print(f"\nRun 'cg -e {env.name} env status' to review changes")
1048
+
1049
+ @with_env_logging("env node prune")
1050
+ def node_prune(self, args: argparse.Namespace, logger=None) -> None:
1051
+ """Remove unused custom nodes from environment."""
1052
+ env = self._get_env(args)
1053
+
1054
+ # Get unused nodes
1055
+ exclude = args.exclude if hasattr(args, 'exclude') and args.exclude else None
1056
+ try:
1057
+ unused = env.get_unused_nodes(exclude=exclude)
1058
+ except Exception as e:
1059
+ if logger:
1060
+ logger.error(f"Failed to get unused nodes: {e}", exc_info=True)
1061
+ print(f"✗ Failed to get unused nodes: {e}", file=sys.stderr)
1062
+ sys.exit(1)
1063
+
1064
+ if not unused:
1065
+ print("✓ No unused nodes found")
1066
+ return
1067
+
1068
+ # Display table
1069
+ print(f"\nFound {len(unused)} unused node(s):\n")
1070
+ for node in unused:
1071
+ node_id = node.registry_id or node.name
1072
+ print(f" • {node_id}")
1073
+
1074
+ # Confirm unless --yes flag
1075
+ if not args.yes:
1076
+ try:
1077
+ confirm = input(f"\nRemove {len(unused)} node(s)? [y/N]: ")
1078
+ if confirm.lower() != 'y':
1079
+ print("Cancelled")
1080
+ return
1081
+ except (EOFError, KeyboardInterrupt):
1082
+ print("\nCancelled")
1083
+ return
1084
+
1085
+ # Remove with progress
1086
+ print(f"\n🗑 Pruning {len(unused)} unused nodes...")
1087
+
1088
+ def on_node_start(node_id, idx, total):
1089
+ print(f" [{idx}/{total}] Removing {node_id}...", end=" ", flush=True)
1090
+
1091
+ def on_node_complete(node_id, success, error):
1092
+ if success:
1093
+ print("✓")
1094
+ else:
1095
+ print(f"✗ ({error})")
1096
+
1097
+ from comfygit_core.models.workflow import NodeInstallCallbacks
1098
+ callbacks = NodeInstallCallbacks(
1099
+ on_node_start=on_node_start,
1100
+ on_node_complete=on_node_complete
1101
+ )
1102
+
1103
+ try:
1104
+ success_count, failed = env.prune_unused_nodes(exclude=exclude, callbacks=callbacks)
1105
+ except Exception as e:
1106
+ if logger:
1107
+ logger.error(f"Prune failed: {e}", exc_info=True)
1108
+ print(f"\n✗ Prune failed: {e}", file=sys.stderr)
1109
+ sys.exit(1)
1110
+
1111
+ print(f"\n✓ Removed {success_count} node(s)")
1112
+ if failed:
1113
+ print(f"✗ Failed to remove {len(failed)} node(s):")
1114
+ for node_id, error in failed:
1115
+ print(f" • {node_id}: {error}")
1116
+ sys.exit(1)
1117
+
1118
+ @with_env_logging("env node list")
1119
+ def node_list(self, args: argparse.Namespace) -> None:
1120
+ """List custom nodes in the environment."""
1121
+ env = self._get_env(args)
1122
+
1123
+ nodes = env.list_nodes()
1124
+
1125
+ if not nodes:
1126
+ print("No custom nodes installed")
1127
+ return
1128
+
1129
+ print(f"Custom nodes in '{env.name}':")
1130
+ for node in nodes:
1131
+ # Format version display based on source type
1132
+ version_suffix = ""
1133
+ if node.version:
1134
+ if node.source == "git":
1135
+ version_suffix = f" @ {node.version[:8]}"
1136
+ elif node.source == "registry":
1137
+ version_suffix = f" v{node.version}"
1138
+ elif node.source == "development":
1139
+ version_suffix = " (dev)"
1140
+
1141
+ print(f" • {node.registry_id or node.name} ({node.source}){version_suffix}")
1142
+
1143
+ @with_env_logging("env node update")
1144
+ def node_update(self, args: argparse.Namespace, logger=None) -> None:
1145
+ """Update a custom node."""
1146
+ from comfygit_core.strategies.confirmation import (
1147
+ AutoConfirmStrategy,
1148
+ InteractiveConfirmStrategy,
1149
+ )
1150
+
1151
+ env = self._get_env(args)
1152
+
1153
+ print(f"🔄 Updating node: {args.node_name}")
1154
+
1155
+ # Choose confirmation strategy
1156
+ strategy = AutoConfirmStrategy() if args.yes else InteractiveConfirmStrategy()
1157
+
1158
+ try:
1159
+ result = env.update_node(
1160
+ args.node_name,
1161
+ confirmation_strategy=strategy,
1162
+ no_test=args.no_test
1163
+ )
1164
+
1165
+ if result.changed:
1166
+ print(f"✓ {result.message}")
1167
+
1168
+ if result.source == 'development':
1169
+ if result.requirements_added:
1170
+ print(" Added dependencies:")
1171
+ for dep in result.requirements_added:
1172
+ print(f" + {dep}")
1173
+ if result.requirements_removed:
1174
+ print(" Removed dependencies:")
1175
+ for dep in result.requirements_removed:
1176
+ print(f" - {dep}")
1177
+
1178
+ print("\nRun 'cg status' to review changes")
1179
+ else:
1180
+ print(f"ℹ️ {result.message}")
1181
+
1182
+ except Exception as e:
1183
+ if logger:
1184
+ logger.error(f"Node update failed for '{args.node_name}': {e}", exc_info=True)
1185
+ print(f"✗ Failed to update node '{args.node_name}'", file=sys.stderr)
1186
+ print(f" {e}", file=sys.stderr)
1187
+ sys.exit(1)
1188
+
1189
+ # === Constraint management ===
1190
+
1191
+ @with_env_logging("env constraint add")
1192
+ def constraint_add(self, args: argparse.Namespace, logger=None) -> None:
1193
+ """Add constraint dependencies to [tool.uv]."""
1194
+ env = self._get_env(args)
1195
+
1196
+ print(f"📦 Adding constraints: {' '.join(args.packages)}")
1197
+
1198
+ # Add each constraint
1199
+ try:
1200
+ for package in args.packages:
1201
+ env.add_constraint(package)
1202
+ except Exception as e:
1203
+ if logger:
1204
+ logger.error(f"Constraint add failed: {e}", exc_info=True)
1205
+ print("✗ Failed to add constraints", file=sys.stderr)
1206
+ print(f" {e}", file=sys.stderr)
1207
+ sys.exit(1)
1208
+
1209
+ print(f"✓ Added {len(args.packages)} constraint(s) to pyproject.toml")
1210
+ print(f"\nRun 'cg -e {env.name} constraint list' to view all constraints")
1211
+
1212
+ @with_env_logging("env constraint list")
1213
+ def constraint_list(self, args: argparse.Namespace) -> None:
1214
+ """List constraint dependencies from [tool.uv]."""
1215
+ env = self._get_env(args)
1216
+
1217
+ # Get constraints from pyproject.toml
1218
+ constraints = env.list_constraints()
1219
+
1220
+ if not constraints:
1221
+ print("No constraint dependencies configured")
1222
+ return
1223
+
1224
+ print(f"Constraint dependencies in '{env.name}':")
1225
+ for constraint in constraints:
1226
+ print(f" • {constraint}")
1227
+
1228
+ @with_env_logging("env constraint remove")
1229
+ def constraint_remove(self, args: argparse.Namespace, logger=None) -> None:
1230
+ """Remove constraint dependencies from [tool.uv]."""
1231
+ env = self._get_env(args)
1232
+
1233
+ print(f"🗑 Removing constraints: {' '.join(args.packages)}")
1234
+
1235
+ # Remove each constraint
1236
+ removed_count = 0
1237
+ try:
1238
+ for package in args.packages:
1239
+ if env.remove_constraint(package):
1240
+ removed_count += 1
1241
+ else:
1242
+ print(f" Warning: constraint '{package}' not found")
1243
+ except Exception as e:
1244
+ if logger:
1245
+ logger.error(f"Constraint remove failed: {e}", exc_info=True)
1246
+ print("✗ Failed to remove constraints", file=sys.stderr)
1247
+ print(f" {e}", file=sys.stderr)
1248
+ sys.exit(1)
1249
+
1250
+ if removed_count > 0:
1251
+ print(f"✓ Removed {removed_count} constraint(s) from pyproject.toml")
1252
+ else:
1253
+ print("No constraints were removed")
1254
+
1255
+ print(f"\nRun 'cg -e {env.name} constraint list' to view remaining constraints")
1256
+
1257
+ # === Python dependency management ===
1258
+
1259
+ @with_env_logging("env py add")
1260
+ def py_add(self, args: argparse.Namespace, logger=None) -> None:
1261
+ """Add Python dependencies to the environment."""
1262
+ env = self._get_env(args)
1263
+
1264
+ # Validate arguments: must provide either packages or requirements file
1265
+ if not args.packages and not args.requirements:
1266
+ print("✗ Error: Must specify packages or use -r/--requirements", file=sys.stderr)
1267
+ print("Examples:", file=sys.stderr)
1268
+ print(" cg py add requests pillow", file=sys.stderr)
1269
+ print(" cg py add -r requirements.txt", file=sys.stderr)
1270
+ sys.exit(1)
1271
+
1272
+ if args.packages and args.requirements:
1273
+ print("✗ Error: Cannot specify both packages and -r/--requirements", file=sys.stderr)
1274
+ sys.exit(1)
1275
+
1276
+ # Resolve requirements file path to absolute path (UV runs in .cec directory)
1277
+ requirements_file = None
1278
+ if args.requirements:
1279
+ requirements_file = args.requirements.resolve()
1280
+ if not requirements_file.exists():
1281
+ print(f"✗ Error: Requirements file not found: {args.requirements}", file=sys.stderr)
1282
+ sys.exit(1)
1283
+
1284
+ # Display what we're doing
1285
+ upgrade_text = " (with upgrade)" if args.upgrade else ""
1286
+ if requirements_file:
1287
+ print(f"📦 Adding packages from {args.requirements}{upgrade_text}...")
1288
+ else:
1289
+ print(f"📦 Adding {len(args.packages)} package(s){upgrade_text}...")
1290
+
1291
+ try:
1292
+ env.add_dependencies(
1293
+ packages=args.packages or None,
1294
+ requirements_file=requirements_file,
1295
+ upgrade=args.upgrade,
1296
+ group=getattr(args, 'group', None),
1297
+ dev=getattr(args, 'dev', False),
1298
+ editable=getattr(args, 'editable', False),
1299
+ bounds=getattr(args, 'bounds', None)
1300
+ )
1301
+ except UVCommandError as e:
1302
+ if logger:
1303
+ logger.error(f"Failed to add dependencies: {e}", exc_info=True)
1304
+ if e.stderr:
1305
+ logger.error(f"UV stderr:\n{e.stderr}")
1306
+ print(f"✗ Failed to add packages", file=sys.stderr)
1307
+ if e.stderr:
1308
+ print(f"\n{e.stderr}", file=sys.stderr)
1309
+ else:
1310
+ print(f" {e}", file=sys.stderr)
1311
+ sys.exit(1)
1312
+
1313
+ if requirements_file:
1314
+ print(f"\n✓ Added packages from {args.requirements}")
1315
+ else:
1316
+ print(f"\n✓ Added {len(args.packages)} package(s) to dependencies")
1317
+ print(f"\nRun 'cg -e {env.name} status' to review changes")
1318
+
1319
+ @with_env_logging("env py remove")
1320
+ def py_remove(self, args: argparse.Namespace, logger=None) -> None:
1321
+ """Remove Python dependencies from the environment."""
1322
+ env = self._get_env(args)
1323
+
1324
+ # Handle --group flag (remove from dependency group)
1325
+ if hasattr(args, 'group') and args.group:
1326
+ group_name = args.group
1327
+ print(f"🗑 Removing {len(args.packages)} package(s) from group '{group_name}'...")
1328
+
1329
+ try:
1330
+ result = env.pyproject.dependencies.remove_from_group(group_name, args.packages)
1331
+ except ValueError as e:
1332
+ print(f"✗ {e}", file=sys.stderr)
1333
+ sys.exit(1)
1334
+
1335
+ # Show results
1336
+ if not result['removed']:
1337
+ if len(result['skipped']) == 1:
1338
+ print(f"\nℹ️ Package '{result['skipped'][0]}' is not in group '{group_name}'")
1339
+ else:
1340
+ print(f"\nℹ️ None of the specified packages are in group '{group_name}':")
1341
+ for pkg in result['skipped']:
1342
+ print(f" • {pkg}")
1343
+ return
1344
+
1345
+ print(f"\n✓ Removed {len(result['removed'])} package(s) from group '{group_name}'")
1346
+
1347
+ if result['skipped']:
1348
+ print(f"\nℹ️ Skipped {len(result['skipped'])} package(s) not in group:")
1349
+ for pkg in result['skipped']:
1350
+ print(f" • {pkg}")
1351
+
1352
+ print(f"\nRun 'cg -e {env.name} py list --all' to view remaining groups")
1353
+ return
1354
+
1355
+ # Default behavior: remove from main dependencies
1356
+ print(f"🗑 Removing {len(args.packages)} package(s)...")
1357
+
1358
+ try:
1359
+ result = env.remove_dependencies(args.packages)
1360
+ except UVCommandError as e:
1361
+ if logger:
1362
+ logger.error(f"Failed to remove dependencies: {e}", exc_info=True)
1363
+ if e.stderr:
1364
+ logger.error(f"UV stderr:\n{e.stderr}")
1365
+ print(f"✗ Failed to remove packages", file=sys.stderr)
1366
+ if e.stderr:
1367
+ print(f"\n{e.stderr}", file=sys.stderr)
1368
+ else:
1369
+ print(f" {e}", file=sys.stderr)
1370
+ sys.exit(1)
1371
+
1372
+ # If nothing was removed, show appropriate message
1373
+ if not result['removed']:
1374
+ if len(result['skipped']) == 1:
1375
+ print(f"\nℹ️ Package '{result['skipped'][0]}' is not in dependencies (already removed or never added)")
1376
+ else:
1377
+ print(f"\nℹ️ None of the specified packages are in dependencies:")
1378
+ for pkg in result['skipped']:
1379
+ print(f" • {pkg}")
1380
+ return
1381
+
1382
+ # Show successful removals
1383
+ print(f"\n✓ Removed {len(result['removed'])} package(s) from dependencies")
1384
+
1385
+ # Show skipped packages if any
1386
+ if result['skipped']:
1387
+ print(f"\nℹ️ Skipped {len(result['skipped'])} package(s) not in dependencies:")
1388
+ for pkg in result['skipped']:
1389
+ print(f" • {pkg}")
1390
+
1391
+ print(f"\nRun 'cg -e {env.name} status' to review changes")
1392
+
1393
+ @with_env_logging("env py remove-group")
1394
+ def py_remove_group(self, args: argparse.Namespace, logger=None) -> None:
1395
+ """Remove an entire dependency group."""
1396
+ env = self._get_env(args)
1397
+ group_name = args.group
1398
+
1399
+ print(f"🗑 Removing dependency group: {group_name}")
1400
+
1401
+ try:
1402
+ env.pyproject.dependencies.remove_group(group_name)
1403
+ except ValueError as e:
1404
+ print(f"✗ {e}", file=sys.stderr)
1405
+ sys.exit(1)
1406
+
1407
+ print(f"\n✓ Removed dependency group '{group_name}'")
1408
+ print(f"\nRun 'cg -e {env.name} py list --all' to view remaining groups")
1409
+
1410
+ @with_env_logging("env py uv")
1411
+ def py_uv(self, args: argparse.Namespace, logger=None) -> None:
1412
+ """Direct UV command passthrough for advanced users."""
1413
+ env = self._get_env(args)
1414
+
1415
+ if not args.uv_args:
1416
+ # Show helpful usage message
1417
+ print("Usage: cg py uv <uv-command> [uv-args...]")
1418
+ print("Example: cg py uv add --group optional-cuda sageattention")
1419
+ print("\nThis is direct UV access. See 'uv --help' for options.")
1420
+ sys.exit(1)
1421
+
1422
+ # Build UV command with environment context
1423
+ cmd = [env.uv_manager.uv._binary] + args.uv_args
1424
+
1425
+ # Execute with environment context (cwd and env vars)
1426
+ result = subprocess.run(
1427
+ cmd,
1428
+ cwd=env.cec_path,
1429
+ env={
1430
+ **os.environ,
1431
+ "UV_PROJECT_ENVIRONMENT": str(env.venv_path),
1432
+ "UV_CACHE_DIR": str(env.workspace_paths.cache / "uv_cache"),
1433
+ }
1434
+ )
1435
+ sys.exit(result.returncode)
1436
+
1437
+ @with_env_logging("env py list")
1438
+ def py_list(self, args: argparse.Namespace) -> None:
1439
+ """List Python dependencies."""
1440
+ env = self._get_env(args)
1441
+
1442
+ all_deps = env.list_dependencies(all=args.all)
1443
+
1444
+ # Check if there are any dependencies at all
1445
+ total_count = sum(len(deps) for deps in all_deps.values())
1446
+ if total_count == 0:
1447
+ print("No project dependencies or dependency groups")
1448
+ return
1449
+
1450
+ # Display dependencies grouped by section
1451
+ first_group = True
1452
+ for group_name, group_deps in all_deps.items():
1453
+ if not group_deps:
1454
+ continue
1455
+
1456
+ if not first_group:
1457
+ print() # Blank line between groups
1458
+ first_group = False
1459
+
1460
+ # Format the header
1461
+ if group_name == "dependencies":
1462
+ print(f"Dependencies ({len(group_deps)}):")
1463
+ for dep in group_deps:
1464
+ print(f" • {dep}")
1465
+ else:
1466
+ print(f"{group_name} ({len(group_deps)}):")
1467
+ for dep in group_deps:
1468
+ print(f" • {dep}")
1469
+
1470
+ # Show tip if not showing all groups
1471
+ if not args.all and len(all_deps) == 1:
1472
+ print("\nTip: Use --all to see dependency groups")
1473
+
1474
+ # === Git-based operations ===
1475
+
1476
+ @with_env_logging("env repair")
1477
+ def repair(self, args: argparse.Namespace, logger=None) -> None:
1478
+ """Repair environment to match pyproject.toml (for manual edits or git operations)."""
1479
+ env = self._get_env(args)
1480
+
1481
+ # Get status first
1482
+ status = env.status()
1483
+
1484
+ if status.is_synced:
1485
+ print("✓ No changes to apply")
1486
+ return
1487
+
1488
+ # Get preview for display and later use
1489
+ preview: dict[str, Any] = status.get_sync_preview()
1490
+
1491
+ # Confirm unless --yes
1492
+ if not args.yes:
1493
+
1494
+ # Check if there are actually any changes to show
1495
+ has_changes = (
1496
+ preview['nodes_to_install'] or
1497
+ preview['nodes_to_remove'] or
1498
+ preview['nodes_to_update'] or
1499
+ preview['packages_to_sync'] or
1500
+ preview['workflows_to_add'] or
1501
+ preview['workflows_to_update'] or
1502
+ preview['workflows_to_remove'] or
1503
+ preview.get('models_downloadable') or
1504
+ preview.get('models_unavailable')
1505
+ )
1506
+
1507
+ if not has_changes:
1508
+ print("✓ No changes to apply (environment is synced)")
1509
+ return
1510
+
1511
+ print("This will apply the following changes:")
1512
+
1513
+ if preview['nodes_to_install']:
1514
+ print(f" • Install {len(preview['nodes_to_install'])} missing nodes:")
1515
+ for node in preview['nodes_to_install']:
1516
+ print(f" - {node}")
1517
+
1518
+ if preview['nodes_to_remove']:
1519
+ print(f" • Remove {len(preview['nodes_to_remove'])} extra nodes:")
1520
+ for node in preview['nodes_to_remove']:
1521
+ print(f" - {node}")
1522
+
1523
+ if preview['nodes_to_update']:
1524
+ print(f" • Update {len(preview['nodes_to_update'])} nodes to correct versions:")
1525
+ for mismatch in preview['nodes_to_update']:
1526
+ print(f" - {mismatch['name']}: {mismatch['actual']} → {mismatch['expected']}")
1527
+
1528
+ if preview['packages_to_sync']:
1529
+ print(" • Sync Python packages")
1530
+
1531
+ # Show workflow changes categorized by operation
1532
+ if preview['workflows_to_add']:
1533
+ print(f" • Add {len(preview['workflows_to_add'])} new workflow(s) to ComfyUI:")
1534
+ for workflow_name in preview['workflows_to_add']:
1535
+ print(f" - {workflow_name}")
1536
+
1537
+ if preview['workflows_to_update']:
1538
+ print(f" • Update {len(preview['workflows_to_update'])} workflow(s) in ComfyUI:")
1539
+ for workflow_name in preview['workflows_to_update']:
1540
+ print(f" - {workflow_name}")
1541
+
1542
+ if preview['workflows_to_remove']:
1543
+ print(f" • Remove {len(preview['workflows_to_remove'])} workflow(s) from ComfyUI:")
1544
+ for workflow_name in preview['workflows_to_remove']:
1545
+ print(f" - {workflow_name}")
1546
+
1547
+ # Show model download preview with URLs and paths
1548
+ if preview.get('models_downloadable'):
1549
+ print(f"\n Models:")
1550
+ count = len(preview['models_downloadable'])
1551
+ print(f" • Download {count} missing model(s):\n")
1552
+ for idx, missing_info in enumerate(preview['models_downloadable'][:5], 1):
1553
+ print(f" [{idx}/{min(count, 5)}] {missing_info.model.filename} ({missing_info.criticality})")
1554
+ # Show source URL
1555
+ if missing_info.model.sources:
1556
+ source_url = missing_info.model.sources[0]
1557
+ # Truncate long URLs
1558
+ if len(source_url) > 70:
1559
+ display_url = source_url[:67] + "..."
1560
+ else:
1561
+ display_url = source_url
1562
+ print(f" From: {display_url}")
1563
+ # Show target path
1564
+ print(f" To: {missing_info.model.relative_path}")
1565
+ if count > 5:
1566
+ print(f"\n ... and {count - 5} more")
1567
+
1568
+ if preview.get('models_unavailable'):
1569
+ print(f"\n ⚠️ Models unavailable:")
1570
+ for missing_info in preview['models_unavailable'][:3]:
1571
+ print(f" - {missing_info.model.filename} (no sources)")
1572
+
1573
+ response = input("\nContinue? (y/N): ")
1574
+ if response.lower() != 'y':
1575
+ print("Cancelled")
1576
+ return
1577
+
1578
+ print(f"⚙️ Applying changes to: {env.name}")
1579
+
1580
+ # Create callbacks for node and model progress
1581
+ from comfygit_core.models.workflow import BatchDownloadCallbacks, NodeInstallCallbacks
1582
+ from .utils.progress import create_progress_callback
1583
+
1584
+ # Node installation callbacks
1585
+ def on_node_start(node_id, idx, total):
1586
+ print(f" [{idx}/{total}] Installing {node_id}...", end=" ", flush=True)
1587
+
1588
+ def on_node_complete(node_id, success, error):
1589
+ if success:
1590
+ print("✓")
1591
+ else:
1592
+ print(f"✗ ({error})")
1593
+
1594
+ node_callbacks = NodeInstallCallbacks(
1595
+ on_node_start=on_node_start,
1596
+ on_node_complete=on_node_complete
1597
+ )
1598
+
1599
+ # Model download callbacks
1600
+ def on_file_start(filename, idx, total):
1601
+ print(f" [{idx}/{total}] Downloading {filename}...")
1602
+
1603
+ def on_file_complete(filename, success, error):
1604
+ print() # New line after progress bar
1605
+ if success:
1606
+ print(f" ✓ {filename}")
1607
+ else:
1608
+ print(f" ✗ {filename}: {error}")
1609
+
1610
+ model_callbacks = BatchDownloadCallbacks(
1611
+ on_file_start=on_file_start,
1612
+ on_file_progress=create_progress_callback(),
1613
+ on_file_complete=on_file_complete
1614
+ )
1615
+
1616
+ # Apply changes with node and model callbacks
1617
+ try:
1618
+ # Show header if nodes to install
1619
+ if preview['nodes_to_install']:
1620
+ print("\n⬇️ Installing nodes...")
1621
+
1622
+ model_strategy = getattr(args, 'models', 'all')
1623
+ sync_result = env.sync(
1624
+ model_strategy=model_strategy,
1625
+ model_callbacks=model_callbacks,
1626
+ node_callbacks=node_callbacks
1627
+ )
1628
+
1629
+ # Show completion message if nodes were installed
1630
+ if preview['nodes_to_install']:
1631
+ print() # Blank line after node installation
1632
+
1633
+ # Check for errors
1634
+ if not sync_result.success:
1635
+ for error in sync_result.errors:
1636
+ print(f"⚠️ {error}", file=sys.stderr)
1637
+
1638
+ # Show model download summary
1639
+ if sync_result.models_downloaded:
1640
+ print(f"\n✓ Downloaded {len(sync_result.models_downloaded)} model(s)")
1641
+
1642
+ if sync_result.models_failed:
1643
+ print(f"\n⚠️ {len(sync_result.models_failed)} model(s) failed:")
1644
+ for filename, error in sync_result.models_failed[:3]:
1645
+ print(f" • {filename}: {error}")
1646
+
1647
+ except Exception as e:
1648
+ if logger:
1649
+ logger.error(f"Sync failed for environment '{env.name}': {e}", exc_info=True)
1650
+ print(f"✗ Failed to apply changes: {e}", file=sys.stderr)
1651
+ sys.exit(1)
1652
+
1653
+ print("✓ Changes applied successfully!")
1654
+ print(f"\nEnvironment '{env.name}' is ready to use")
1655
+
1656
+ @with_env_logging("env checkout")
1657
+ def checkout(self, args: argparse.Namespace, logger=None) -> None:
1658
+ """Checkout commits, branches, or files."""
1659
+ from .strategies.rollback import AutoRollbackStrategy, InteractiveRollbackStrategy
1660
+
1661
+ env = self._get_env(args)
1662
+
1663
+ try:
1664
+ if args.branch:
1665
+ # Create new branch and switch (git checkout -b semantics)
1666
+ start_point = args.ref if args.ref is not None else "HEAD"
1667
+ print(f"Creating and switching to branch '{args.branch}'...")
1668
+ env.create_and_switch_branch(args.branch, start_point=start_point)
1669
+ print(f"✓ Switched to new branch '{args.branch}'")
1670
+ else:
1671
+ # Just checkout ref - ref is required when not using -b
1672
+ if args.ref is None:
1673
+ print("✗ Error: ref argument is required when not using -b", file=sys.stderr)
1674
+ sys.exit(1)
1675
+
1676
+ print(f"Checking out '{args.ref}'...")
1677
+
1678
+ # Choose strategy
1679
+ strategy = AutoRollbackStrategy() if args.yes or args.force else InteractiveRollbackStrategy()
1680
+
1681
+ env.checkout(args.ref, strategy=strategy, force=args.force)
1682
+
1683
+ # Check if detached HEAD
1684
+ current_branch = env.get_current_branch()
1685
+ if current_branch is None:
1686
+ print(f"✓ HEAD is now at {args.ref} (detached)")
1687
+ print(" You are in 'detached HEAD' state. To keep changes:")
1688
+ print(f" cg checkout -b <new-branch-name>")
1689
+ else:
1690
+ print(f"✓ Switched to branch '{current_branch}'")
1691
+ except Exception as e:
1692
+ if logger:
1693
+ logger.error(f"Checkout failed: {e}", exc_info=True)
1694
+ print(f"✗ Checkout failed: {e}", file=sys.stderr)
1695
+ sys.exit(1)
1696
+
1697
+ @with_env_logging("env branch")
1698
+ def branch(self, args: argparse.Namespace, logger=None) -> None:
1699
+ """Manage branches."""
1700
+ env = self._get_env(args)
1701
+
1702
+ try:
1703
+ if args.name is None:
1704
+ # List branches
1705
+ branches = env.list_branches()
1706
+ if not branches:
1707
+ print("No branches found")
1708
+ return
1709
+
1710
+ print("Branches:")
1711
+ is_detached = False
1712
+ for name, is_current in branches:
1713
+ marker = "* " if is_current else " "
1714
+ print(f"{marker}{name}")
1715
+ if is_current and 'detached' in name.lower():
1716
+ is_detached = True
1717
+
1718
+ # Show help if in detached HEAD
1719
+ if is_detached:
1720
+ print()
1721
+ print("⚠️ You are in detached HEAD state")
1722
+ print(" To save your work, create a branch:")
1723
+ print(" cg checkout -b <branch-name>")
1724
+ elif args.delete or args.force_delete:
1725
+ # Delete branch
1726
+ force = args.force_delete
1727
+ print(f"Deleting branch '{args.name}'...")
1728
+ env.delete_branch(args.name, force=force)
1729
+ print(f"✓ Deleted branch '{args.name}'")
1730
+ else:
1731
+ # Create branch (don't switch)
1732
+ print(f"Creating branch '{args.name}'...")
1733
+ env.create_branch(args.name)
1734
+ print(f"✓ Created branch '{args.name}'")
1735
+ except Exception as e:
1736
+ if logger:
1737
+ logger.error(f"Branch operation failed: {e}", exc_info=True)
1738
+ print(f"✗ Branch operation failed: {e}", file=sys.stderr)
1739
+ sys.exit(1)
1740
+
1741
+ @with_env_logging("env switch")
1742
+ def switch(self, args: argparse.Namespace, logger=None) -> None:
1743
+ """Switch to branch."""
1744
+ env = self._get_env(args)
1745
+
1746
+ try:
1747
+ print(f"Switching to branch '{args.branch}'...")
1748
+ env.switch_branch(args.branch, create=args.create)
1749
+ print(f"✓ Switched to branch '{args.branch}'")
1750
+ except Exception as e:
1751
+ if logger:
1752
+ logger.error(f"Switch failed: {e}", exc_info=True)
1753
+ print(f"✗ Switch failed: {e}", file=sys.stderr)
1754
+ sys.exit(1)
1755
+
1756
+ @with_env_logging("env reset")
1757
+ def reset_git(self, args: argparse.Namespace, logger=None) -> None:
1758
+ """Reset HEAD to ref (git-native reset)."""
1759
+ from .strategies.rollback import InteractiveRollbackStrategy
1760
+
1761
+ env = self._get_env(args)
1762
+
1763
+ # Determine mode
1764
+ if args.hard:
1765
+ mode = "hard"
1766
+ elif args.soft:
1767
+ mode = "soft"
1768
+ else:
1769
+ mode = "mixed" # default
1770
+
1771
+ try:
1772
+ # Choose strategy for hard mode
1773
+ strategy = None
1774
+ if mode == "hard" and not args.yes:
1775
+ strategy = InteractiveRollbackStrategy()
1776
+
1777
+ print(f"Resetting to '{args.ref}' (mode: {mode})...")
1778
+ env.reset(args.ref, mode=mode, strategy=strategy, force=args.yes)
1779
+ print(f"✓ Reset to '{args.ref}'")
1780
+ except Exception as e:
1781
+ if logger:
1782
+ logger.error(f"Reset failed: {e}", exc_info=True)
1783
+ print(f"✗ Reset failed: {e}", file=sys.stderr)
1784
+ sys.exit(1)
1785
+
1786
+ @with_env_logging("env merge")
1787
+ def merge(self, args: argparse.Namespace, logger=None) -> None:
1788
+ """Merge branch into current with atomic conflict resolution."""
1789
+ env = self._get_env(args)
1790
+
1791
+ try:
1792
+ current = env.get_current_branch()
1793
+ if current is None:
1794
+ print("✗ Cannot merge while in detached HEAD state")
1795
+ sys.exit(1)
1796
+
1797
+ # Phase 1: Preview
1798
+ diff = env.preview_merge(args.branch)
1799
+
1800
+ if not diff.has_changes:
1801
+ if diff.is_already_merged:
1802
+ print(f"\n✓ '{args.branch}' is already merged into '{current}'.")
1803
+ elif diff.is_fast_forward:
1804
+ print(f"\n✓ '{args.branch}' has commits but no ComfyGit changes.")
1805
+ print(" Merge will bring in commits without affecting nodes/models/workflows.")
1806
+ else:
1807
+ print(f"\n✓ No ComfyGit configuration changes to merge from '{args.branch}'.")
1808
+ return
1809
+
1810
+ # Preview mode - read-only, just show what would change
1811
+ if getattr(args, "preview", False):
1812
+ self._display_diff_preview(diff)
1813
+ if diff.has_conflicts:
1814
+ print("\n⚠️ Conflicts will occur. Review before merging.")
1815
+ return
1816
+
1817
+ self._display_diff_preview(diff)
1818
+
1819
+ # Phase 2: Collect resolutions if conflicts exist
1820
+ resolutions: dict = {}
1821
+ strategy_option: str | None = None
1822
+ auto_resolve = getattr(args, "auto_resolve", None)
1823
+ strategy = getattr(args, "strategy", None)
1824
+
1825
+ if strategy:
1826
+ # Global strategy flag - use for all conflicts
1827
+ strategy_option = strategy
1828
+ elif auto_resolve:
1829
+ # Auto-resolve flag
1830
+ from .strategies.conflict_resolver import AutoConflictResolver
1831
+ resolver = AutoConflictResolver(auto_resolve)
1832
+ resolutions = resolver.resolve_all(diff)
1833
+ strategy_option = "theirs" if auto_resolve == "theirs" else "ours"
1834
+ elif diff.has_conflicts:
1835
+ # Interactive conflict resolution - ONLY workflow conflicts shown
1836
+ from .strategies.conflict_resolver import InteractiveConflictResolver
1837
+
1838
+ print(f"\n⚠️ Conflicts detected:")
1839
+ resolver = InteractiveConflictResolver()
1840
+ resolutions = resolver.resolve_all(diff)
1841
+
1842
+ # All conflicts must be resolved - no skip option
1843
+ if not resolutions and diff.has_conflicts:
1844
+ print("\n✗ No conflicts were resolved. Merge aborted.")
1845
+ sys.exit(1)
1846
+
1847
+ # Determine strategy from resolutions
1848
+ unique_resolutions = set(resolutions.values())
1849
+ if unique_resolutions == {"take_target"}:
1850
+ strategy_option = "theirs"
1851
+ elif unique_resolutions == {"take_base"}:
1852
+ strategy_option = "ours"
1853
+ # Mixed: no global strategy, rely on per-file resolution
1854
+
1855
+ # Phase 3: Execute merge
1856
+ print(f"\nMerging '{args.branch}' into '{current}'...")
1857
+
1858
+ # Use atomic merge when we have per-file resolutions (mixed mine/theirs)
1859
+ # Otherwise use standard merge with global strategy
1860
+ if resolutions and strategy_option is None:
1861
+ # Mixed resolutions - use atomic executor for per-file resolution
1862
+ result = env.execute_atomic_merge(args.branch, resolutions)
1863
+ if not result.success:
1864
+ print(f"✗ Merge failed: {result.error}", file=sys.stderr)
1865
+ sys.exit(1)
1866
+ else:
1867
+ # Global strategy or no resolutions - use standard merge
1868
+ env.merge_branch(
1869
+ args.branch,
1870
+ message=getattr(args, "message", None),
1871
+ strategy_option=strategy_option,
1872
+ )
1873
+
1874
+ print(f"✓ Merged '{args.branch}' into '{current}'")
1875
+ except Exception as e:
1876
+ if logger:
1877
+ logger.error(f"Merge failed: {e}", exc_info=True)
1878
+ print(f"✗ Merge failed: {e}", file=sys.stderr)
1879
+ sys.exit(1)
1880
+
1881
+ @with_env_logging("env revert")
1882
+ def revert(self, args: argparse.Namespace, logger=None) -> None:
1883
+ """Revert a commit."""
1884
+ env = self._get_env(args)
1885
+
1886
+ try:
1887
+ print(f"Reverting commit '{args.commit}'...")
1888
+ env.revert_commit(args.commit)
1889
+ print(f"✓ Reverted commit '{args.commit}'")
1890
+ except Exception as e:
1891
+ if logger:
1892
+ logger.error(f"Revert failed: {e}", exc_info=True)
1893
+ print(f"✗ Revert failed: {e}", file=sys.stderr)
1894
+ sys.exit(1)
1895
+
1896
+ @with_env_logging("env commit")
1897
+ def commit(self, args: argparse.Namespace, logger=None) -> None:
1898
+ """Commit workflows with optional issue resolution."""
1899
+ env = self._get_env(args)
1900
+
1901
+ # Warn if in detached HEAD before allowing commit
1902
+ current_branch = env.get_current_branch()
1903
+ if current_branch is None and not args.yes:
1904
+ print("⚠️ Warning: You are in detached HEAD state!")
1905
+ print(" Commits made here will not be saved to any branch.")
1906
+ print()
1907
+ print("Options:")
1908
+ print(" • Create a branch first: cg checkout -b <branch-name>")
1909
+ print(" • Commit anyway (not recommended): use --yes flag")
1910
+ print()
1911
+ response = input("Continue with commit in detached HEAD? (y/N): ")
1912
+ if response.lower() != 'y':
1913
+ print("Commit cancelled. Create a branch first.")
1914
+ sys.exit(0)
1915
+ print() # Extra spacing before commit output
1916
+
1917
+ print("📋 Analyzing workflows...")
1918
+
1919
+ # Get workflow status (read-only analysis)
1920
+ try:
1921
+ workflow_status = env.workflow_manager.get_workflow_status()
1922
+
1923
+ if logger:
1924
+ logger.debug(f"Workflow status: {workflow_status.sync_status}")
1925
+
1926
+ # Check if there are ANY committable changes (workflows OR git)
1927
+ if not env.has_committable_changes():
1928
+ print("✓ No changes to commit")
1929
+ return
1930
+
1931
+ except Exception as e:
1932
+ if logger:
1933
+ logger.error(f"Workflow analysis failed: {e}", exc_info=True)
1934
+ print(f"✗ Failed to analyze workflows: {e}", file=sys.stderr)
1935
+ sys.exit(1)
1936
+
1937
+ # Check commit safety
1938
+ if not workflow_status.is_commit_safe and not getattr(args, 'allow_issues', False):
1939
+ print("\n⚠ Cannot commit - workflows have unresolved issues:\n")
1940
+ for wf in workflow_status.workflows_with_issues:
1941
+ print(f" • {wf.name}: {wf.issue_summary}")
1942
+
1943
+ print("\n💡 Options:")
1944
+ print(" 1. Resolve issues: cg workflow resolve \"<name>\"")
1945
+ print(" 2. Force commit: cg commit -m 'msg' --allow-issues")
1946
+ sys.exit(1)
1947
+
1948
+ # Execute commit with chosen strategies
1949
+ try:
1950
+ env.execute_commit(
1951
+ workflow_status=workflow_status,
1952
+ message=args.message,
1953
+ allow_issues=getattr(args, 'allow_issues', False)
1954
+ )
1955
+ except Exception as e:
1956
+ if logger:
1957
+ logger.error(f"Commit failed for environment '{env.name}': {e}", exc_info=True)
1958
+ print(f"✗ Commit failed: {e}", file=sys.stderr)
1959
+ sys.exit(1)
1960
+
1961
+ # Display results on success
1962
+ print(f"✅ Commit successful: {args.message if args.message else 'Update workflows'}")
1963
+
1964
+ # Show what was done
1965
+ new_count = len(workflow_status.sync_status.new)
1966
+ modified_count = len(workflow_status.sync_status.modified)
1967
+ deleted_count = len(workflow_status.sync_status.deleted)
1968
+
1969
+ if new_count > 0:
1970
+ print(f" • Added {new_count} workflow(s)")
1971
+ if modified_count > 0:
1972
+ print(f" • Updated {modified_count} workflow(s)")
1973
+ if deleted_count > 0:
1974
+ print(f" • Deleted {deleted_count} workflow(s)")
1975
+
1976
+
1977
+ # === Git remote operations ===
1978
+
1979
+ @with_env_logging("env pull")
1980
+ def pull(self, args: argparse.Namespace, logger=None) -> None:
1981
+ """Pull from remote and repair environment."""
1982
+ env = self._get_env(args)
1983
+
1984
+ # Check remote exists
1985
+ if not env.git_manager.has_remote(args.remote):
1986
+ print(f"✗ Remote '{args.remote}' not configured")
1987
+ print()
1988
+ print("💡 Set up a remote first:")
1989
+ print(f" cg remote add {args.remote} <url>")
1990
+ sys.exit(1)
1991
+
1992
+ # Preview mode - read-only, just show what would change
1993
+ if getattr(args, "preview", False):
1994
+ try:
1995
+ print(f"Fetching from {args.remote}...")
1996
+ diff = env.preview_pull(remote=args.remote)
1997
+
1998
+ if not diff.has_changes:
1999
+ if diff.is_already_merged:
2000
+ print("\n✓ Already up to date.")
2001
+ elif diff.is_fast_forward:
2002
+ print(f"\n✓ Remote has commits but no ComfyGit changes.")
2003
+ print(" Pull will bring in commits without affecting nodes/models/workflows.")
2004
+ else:
2005
+ print("\n✓ No ComfyGit configuration changes to pull.")
2006
+ return
2007
+
2008
+ self._display_diff_preview(diff)
2009
+
2010
+ if diff.has_conflicts:
2011
+ print("\n⚠️ Conflicts will occur. Resolve before pulling.")
2012
+ return # Preview is read-only, don't continue to actual pull
2013
+ except Exception as e:
2014
+ if logger:
2015
+ logger.error(f"Preview failed: {e}", exc_info=True)
2016
+ print(f"✗ Preview failed: {e}", file=sys.stderr)
2017
+ sys.exit(1)
2018
+
2019
+ # Check for uncommitted changes first
2020
+ if env.has_committable_changes() and not getattr(args, 'force', False):
2021
+ print("⚠️ You have uncommitted changes")
2022
+ print()
2023
+ print("💡 Options:")
2024
+ print(" • Commit: cg commit -m 'message'")
2025
+ print(" • Discard: cg reset --hard")
2026
+ print(" • Force: cg pull origin --force")
2027
+ sys.exit(1)
2028
+
2029
+ try:
2030
+ # Determine merge strategy
2031
+ strategy_option: str | None = None
2032
+ auto_resolve = getattr(args, "auto_resolve", None)
2033
+
2034
+ if auto_resolve:
2035
+ # Use git -X strategy based on auto-resolve choice
2036
+ strategy_option = "theirs" if auto_resolve == "theirs" else "ours"
2037
+ else:
2038
+ # Check for conflicts before pull
2039
+ print(f"Checking for conflicts with {args.remote}...")
2040
+ diff = env.preview_pull(remote=args.remote)
2041
+ if diff.has_conflicts:
2042
+ # Interactive conflict resolution
2043
+ from .strategies.conflict_resolver import InteractiveConflictResolver
2044
+
2045
+ current = env.get_current_branch() or "HEAD"
2046
+ print(f"\n⚠️ Conflicts detected between '{current}' and '{args.remote}':")
2047
+ self._display_diff_preview(diff)
2048
+
2049
+ resolver = InteractiveConflictResolver()
2050
+ resolutions = resolver.resolve_all(diff)
2051
+
2052
+ # Check if any conflicts were skipped
2053
+ skipped = [k for k, v in resolutions.items() if v == "skip"]
2054
+ if skipped:
2055
+ print(f"\n⚠️ {len(skipped)} conflict(s) will be skipped.")
2056
+ print(" You may need to resolve them manually after pull.")
2057
+
2058
+ # Determine strategy from resolutions
2059
+ non_skip = [v for v in resolutions.values() if v != "skip"]
2060
+ unique_resolutions = set(non_skip)
2061
+ if unique_resolutions == {"take_target"}:
2062
+ strategy_option = "theirs"
2063
+ elif unique_resolutions == {"take_base"}:
2064
+ strategy_option = "ours"
2065
+ # Mixed or empty: no strategy, git decides
2066
+
2067
+ print(f"📥 Pulling from {args.remote}...")
2068
+
2069
+ # Create callbacks for node and model progress (reuse repair command patterns)
2070
+ from comfygit_core.models.workflow import BatchDownloadCallbacks, NodeInstallCallbacks
2071
+ from .utils.progress import create_progress_callback
2072
+
2073
+ # Node installation callbacks
2074
+ def on_node_start(_node_id: str, idx: int, total: int) -> None:
2075
+ print(f" [{idx}/{total}] Installing {_node_id}...", end=" ", flush=True)
2076
+
2077
+ def on_node_complete(_node_id: str, success: bool, error: str | None) -> None:
2078
+ if success:
2079
+ print("✓")
2080
+ else:
2081
+ print(f"✗ ({error})")
2082
+
2083
+ node_callbacks = NodeInstallCallbacks(
2084
+ on_node_start=on_node_start,
2085
+ on_node_complete=on_node_complete
2086
+ )
2087
+
2088
+ # Model download callbacks
2089
+ def on_file_start(filename: str, idx: int, total: int) -> None:
2090
+ print(f" [{idx}/{total}] Downloading {filename}...")
2091
+
2092
+ def on_file_complete(filename: str, success: bool, error: str | None) -> None:
2093
+ print() # New line after progress bar
2094
+ if success:
2095
+ print(f" ✓ {filename}")
2096
+ else:
2097
+ print(f" ✗ {filename}: {error}")
2098
+
2099
+ model_callbacks = BatchDownloadCallbacks(
2100
+ on_file_start=on_file_start,
2101
+ on_file_progress=create_progress_callback(),
2102
+ on_file_complete=on_file_complete
2103
+ )
2104
+
2105
+ # Pull and repair with progress callbacks
2106
+ result = env.pull_and_repair(
2107
+ remote=args.remote,
2108
+ model_strategy=getattr(args, 'models', 'all'),
2109
+ model_callbacks=model_callbacks,
2110
+ node_callbacks=node_callbacks,
2111
+ strategy_option=strategy_option,
2112
+ )
2113
+
2114
+ # Extract sync result for summary
2115
+ sync_result = result.get('sync_result')
2116
+
2117
+ print(f"\n✓ Pulled changes from {args.remote}")
2118
+
2119
+ # Show summary of what was synced (like repair command)
2120
+ if sync_result:
2121
+ summary_items = []
2122
+ if sync_result.nodes_installed:
2123
+ summary_items.append(f"Installed {len(sync_result.nodes_installed)} node(s)")
2124
+ if sync_result.nodes_removed:
2125
+ summary_items.append(f"Removed {len(sync_result.nodes_removed)} node(s)")
2126
+ if sync_result.models_downloaded:
2127
+ summary_items.append(f"Downloaded {len(sync_result.models_downloaded)} model(s)")
2128
+
2129
+ if summary_items:
2130
+ for item in summary_items:
2131
+ print(f" • {item}")
2132
+
2133
+ print("\n⚙️ Environment synced successfully")
2134
+
2135
+ except KeyboardInterrupt:
2136
+ # User pressed Ctrl+C - git changes already rolled back by core
2137
+ if logger:
2138
+ logger.warning("Pull interrupted by user")
2139
+ print("\n⚠️ Pull interrupted - git changes rolled back", file=sys.stderr)
2140
+ sys.exit(1)
2141
+ except ValueError as e:
2142
+ # Merge conflicts
2143
+ if logger:
2144
+ logger.error(f"Pull failed: {e}", exc_info=True)
2145
+
2146
+ # Check if it's a merge conflict
2147
+ error_str = str(e)
2148
+ if "Merge conflict" in error_str or "conflict" in error_str.lower():
2149
+ print(f"\n✗ Merge conflict detected", file=sys.stderr)
2150
+ print()
2151
+ print("💡 To resolve:")
2152
+ print(f" 1. cd {env.cec_path}")
2153
+ print(" 2. git status # See conflicted files")
2154
+ print(" 3. Edit conflicts and resolve")
2155
+ print(" 4. git add <resolved-files>")
2156
+ print(" 5. git commit")
2157
+ print(" 6. cg repair # Sync environment")
2158
+ else:
2159
+ print(f"✗ Pull failed: {e}", file=sys.stderr)
2160
+ sys.exit(1)
2161
+ except OSError as e:
2162
+ # Network, auth, or other git errors
2163
+ if logger:
2164
+ logger.error(f"Pull failed: {e}", exc_info=True)
2165
+
2166
+ # Check if it's a merge conflict (OSError from git_merge)
2167
+ error_str = str(e)
2168
+ if "Merge conflict" in error_str or "conflict" in error_str.lower():
2169
+ print(f"\n✗ Merge conflict detected", file=sys.stderr)
2170
+ print()
2171
+ print("💡 To resolve:")
2172
+ print(f" 1. cd {env.cec_path}")
2173
+ print(" 2. git status # See conflicted files")
2174
+ print(" 3. Edit conflicts and resolve")
2175
+ print(" 4. git add <resolved-files>")
2176
+ print(" 5. git commit")
2177
+ print(" 6. cg repair # Sync environment")
2178
+ else:
2179
+ print(f"✗ Pull failed: {e}", file=sys.stderr)
2180
+ sys.exit(1)
2181
+ except Exception as e:
2182
+ if logger:
2183
+ logger.error(f"Pull failed: {e}", exc_info=True)
2184
+ print(f"✗ Pull failed: {e}", file=sys.stderr)
2185
+ sys.exit(1)
2186
+
2187
+ @with_env_logging("env push")
2188
+ def push(self, args: argparse.Namespace, logger=None) -> None:
2189
+ """Push commits to remote."""
2190
+ env = self._get_env(args)
2191
+
2192
+ # Check for uncommitted changes
2193
+ if env.has_committable_changes():
2194
+ print("⚠️ You have uncommitted changes")
2195
+ print()
2196
+ print("💡 Commit first:")
2197
+ print(" cg commit -m 'your message'")
2198
+ sys.exit(1)
2199
+
2200
+ # Check remote exists
2201
+ if not env.git_manager.has_remote(args.remote):
2202
+ print(f"✗ Remote '{args.remote}' not configured")
2203
+ print()
2204
+ print("💡 Set up a remote first:")
2205
+ print(f" cg remote add {args.remote} <url>")
2206
+ sys.exit(1)
2207
+
2208
+ try:
2209
+ force = getattr(args, 'force', False)
2210
+
2211
+ if force:
2212
+ print(f"📤 Force pushing to {args.remote}...")
2213
+ else:
2214
+ print(f"📤 Pushing to {args.remote}...")
2215
+
2216
+ # Push (with force flag if specified)
2217
+ push_output = env.push_commits(remote=args.remote, force=force)
2218
+
2219
+ if force:
2220
+ print(f" ✓ Force pushed commits to {args.remote}")
2221
+ else:
2222
+ print(f" ✓ Pushed commits to {args.remote}")
2223
+
2224
+ # Show remote URL
2225
+ from comfygit_core.utils.git import git_remote_get_url
2226
+ remote_url = git_remote_get_url(env.cec_path, args.remote)
2227
+ if remote_url:
2228
+ print()
2229
+ print(f"💾 Remote: {remote_url}")
2230
+
2231
+ except ValueError as e:
2232
+ # No remote or workflow issues
2233
+ if logger:
2234
+ logger.error(f"Push failed: {e}", exc_info=True)
2235
+ print(f"✗ Push failed: {e}", file=sys.stderr)
2236
+ sys.exit(1)
2237
+ except OSError as e:
2238
+ # Network, auth, or git errors
2239
+ if logger:
2240
+ logger.error(f"Push failed: {e}", exc_info=True)
2241
+ print(f"✗ Push failed: {e}", file=sys.stderr)
2242
+ sys.exit(1)
2243
+ except Exception as e:
2244
+ if logger:
2245
+ logger.error(f"Push failed: {e}", exc_info=True)
2246
+ print(f"✗ Push failed: {e}", file=sys.stderr)
2247
+ sys.exit(1)
2248
+
2249
+ @with_env_logging("env remote")
2250
+ def remote(self, args: argparse.Namespace, logger=None) -> None:
2251
+ """Manage git remotes."""
2252
+ env = self._get_env(args)
2253
+
2254
+ subcommand = args.remote_command
2255
+
2256
+ try:
2257
+ if subcommand == "add":
2258
+ # Add remote
2259
+ if not args.name or not args.url:
2260
+ print("✗ Usage: cg remote add <name> <url>")
2261
+ sys.exit(1)
2262
+
2263
+ env.git_manager.add_remote(args.name, args.url)
2264
+ print(f"✓ Added remote '{args.name}': {args.url}")
2265
+
2266
+ elif subcommand == "remove":
2267
+ # Remove remote
2268
+ if not args.name:
2269
+ print("✗ Usage: cg remote remove <name>")
2270
+ sys.exit(1)
2271
+
2272
+ env.git_manager.remove_remote(args.name)
2273
+ print(f"✓ Removed remote '{args.name}'")
2274
+
2275
+ elif subcommand == "list":
2276
+ # List remotes
2277
+ remotes = env.git_manager.list_remotes()
2278
+
2279
+ if not remotes:
2280
+ print("No remotes configured")
2281
+ print()
2282
+ print("💡 Add a remote:")
2283
+ print(" cg remote add origin <url>")
2284
+ else:
2285
+ print("Remotes:")
2286
+ for name, url, remote_type in remotes:
2287
+ print(f" {name}\t{url} ({remote_type})")
2288
+
2289
+ else:
2290
+ print(f"✗ Unknown remote command: {subcommand}")
2291
+ print(" Usage: cg remote [add|remove|list]")
2292
+ sys.exit(1)
2293
+
2294
+ except ValueError as e:
2295
+ print(f"✗ {e}", file=sys.stderr)
2296
+ sys.exit(1)
2297
+ except OSError as e:
2298
+ if logger:
2299
+ logger.error(f"Remote operation failed: {e}", exc_info=True)
2300
+ print(f"✗ {e}", file=sys.stderr)
2301
+ sys.exit(1)
2302
+
2303
+ # === Workflow management ===
2304
+
2305
+ @with_env_logging("workflow list", get_env_name=lambda self, args: self._get_env(args).name)
2306
+ def workflow_list(self, args: argparse.Namespace) -> None:
2307
+ """List all workflows with their sync status."""
2308
+ env = self._get_env(args)
2309
+
2310
+ workflows = env.list_workflows()
2311
+
2312
+ if workflows.total_count == 0:
2313
+ print("No workflows found")
2314
+ return
2315
+
2316
+ print(f"Workflows in '{env.name}':")
2317
+
2318
+ if workflows.synced:
2319
+ print("\n✓ Synced (up to date):")
2320
+ for name in workflows.synced:
2321
+ print(f" 📋 {name}")
2322
+
2323
+ if workflows.modified:
2324
+ print("\n⚠ Modified (changed since last commit):")
2325
+ for name in workflows.modified:
2326
+ print(f" 📝 {name}")
2327
+
2328
+ if workflows.new:
2329
+ print("\n🆕 New (not committed yet):")
2330
+ for name in workflows.new:
2331
+ print(f" ➕ {name}")
2332
+
2333
+ if workflows.deleted:
2334
+ print("\n🗑 Deleted (removed from ComfyUI):")
2335
+ for name in workflows.deleted:
2336
+ print(f" ➖ {name}")
2337
+
2338
+ # Show commit suggestion if there are changes
2339
+ if workflows.has_changes:
2340
+ print("\nRun 'cg commit' to save current state")
2341
+
2342
+ @with_env_logging("workflow model importance", get_env_name=lambda self, args: self._get_env(args).name)
2343
+ def workflow_model_importance(self, args: argparse.Namespace, logger=None) -> None:
2344
+ """Update model importance (criticality) for workflow models."""
2345
+ env = self._get_env(args)
2346
+
2347
+ # Determine workflow name (direct or interactive)
2348
+ if hasattr(args, 'workflow_name') and args.workflow_name:
2349
+ workflow_name = args.workflow_name
2350
+ else:
2351
+ # Interactive: select workflow
2352
+ workflow_name = self._select_workflow_interactive(env)
2353
+ if not workflow_name:
2354
+ print("✗ No workflow selected")
2355
+ return
2356
+
2357
+ # Get workflow models
2358
+ models = env.pyproject.workflows.get_workflow_models(workflow_name)
2359
+ if not models:
2360
+ print(f"✗ No models found in workflow '{workflow_name}'")
2361
+ return
2362
+
2363
+ # Determine model (direct or interactive)
2364
+ if hasattr(args, 'model_identifier') and args.model_identifier:
2365
+ # Direct mode: update single model
2366
+ model_identifier = args.model_identifier
2367
+ new_importance = args.importance
2368
+
2369
+ success = env.workflow_manager.update_model_criticality(
2370
+ workflow_name=workflow_name,
2371
+ model_identifier=model_identifier,
2372
+ new_criticality=new_importance
2373
+ )
2374
+
2375
+ if success:
2376
+ print(f"✓ Updated '{model_identifier}' importance to: {new_importance}")
2377
+ else:
2378
+ print(f"✗ Model '{model_identifier}' not found in workflow '{workflow_name}'")
2379
+ sys.exit(1)
2380
+ else:
2381
+ # Interactive: loop over all models
2382
+ print(f"\n📋 Setting model importance for workflow: {workflow_name}")
2383
+ print(f" Found {len(models)} model(s)\n")
2384
+
2385
+ updated_count = 0
2386
+ for model in models:
2387
+ current_importance = model.criticality
2388
+ display_name = model.filename
2389
+ if model.hash:
2390
+ display_name += f" ({model.hash[:8]}...)"
2391
+
2392
+ print(f"\nModel: {display_name}")
2393
+ print(f" Current: {current_importance}")
2394
+ print(f" Options: [r]equired, [f]lexible, [o]ptional, [s]kip")
2395
+
2396
+ # Prompt for new importance
2397
+ try:
2398
+ choice = input(" Choice: ").strip().lower()
2399
+ except (KeyboardInterrupt, EOFError):
2400
+ print("\n✗ Cancelled")
2401
+ return
2402
+
2403
+ # Map choice to importance level
2404
+ importance_map = {
2405
+ 'r': 'required',
2406
+ 'f': 'flexible',
2407
+ 'o': 'optional',
2408
+ 's': None # Skip
2409
+ }
2410
+
2411
+ new_importance = importance_map.get(choice)
2412
+ if new_importance is None:
2413
+ if choice == 's':
2414
+ print(" → Skipped")
2415
+ continue
2416
+ else:
2417
+ print(f" → Invalid choice, skipping")
2418
+ continue
2419
+
2420
+ # Update model importance
2421
+ identifier = model.hash if model.hash else model.filename
2422
+ success = env.workflow_manager.update_model_criticality(
2423
+ workflow_name=workflow_name,
2424
+ model_identifier=identifier,
2425
+ new_criticality=new_importance
2426
+ )
2427
+
2428
+ if success:
2429
+ print(f" ✓ Updated to: {new_importance}")
2430
+ updated_count += 1
2431
+ else:
2432
+ print(f" ✗ Failed to update")
2433
+
2434
+ print(f"\n✓ Updated {updated_count}/{len(models)} model(s)")
2435
+
2436
+ def _select_workflow_interactive(self, env) -> str | None:
2437
+ """Interactive workflow selection from available workflows.
2438
+
2439
+ Returns:
2440
+ Selected workflow name or None if cancelled
2441
+ """
2442
+ status = env.workflow_manager.get_workflow_sync_status()
2443
+ all_workflows = status.new + status.modified + status.synced
2444
+
2445
+ if not all_workflows:
2446
+ print("✗ No workflows found")
2447
+ return None
2448
+
2449
+ print("\n📋 Available workflows:")
2450
+ for i, name in enumerate(all_workflows, 1):
2451
+ # Show sync state
2452
+ if name in status.new:
2453
+ state = "new"
2454
+ elif name in status.modified:
2455
+ state = "modified"
2456
+ else:
2457
+ state = "synced"
2458
+ print(f" {i}. {name} ({state})")
2459
+
2460
+ print()
2461
+ try:
2462
+ choice = input("Select workflow (number or name): ").strip()
2463
+ except (KeyboardInterrupt, EOFError):
2464
+ return None
2465
+
2466
+ # Try to parse as number
2467
+ try:
2468
+ idx = int(choice) - 1
2469
+ if 0 <= idx < len(all_workflows):
2470
+ return all_workflows[idx]
2471
+ except ValueError:
2472
+ # Try as name
2473
+ if choice in all_workflows:
2474
+ return choice
2475
+
2476
+ print(f"✗ Invalid selection: {choice}")
2477
+ return None
2478
+
2479
+ @with_env_logging("workflow resolve", get_env_name=lambda self, args: self._get_env(args).name)
2480
+ def workflow_resolve(self, args: argparse.Namespace, logger=None) -> None:
2481
+ """Resolve workflow dependencies interactively."""
2482
+ env = self._get_env(args)
2483
+
2484
+ # Choose strategy
2485
+ if args.auto:
2486
+ from comfygit_core.strategies.auto import AutoModelStrategy, AutoNodeStrategy
2487
+ node_strategy = AutoNodeStrategy()
2488
+ model_strategy = AutoModelStrategy()
2489
+ else:
2490
+ node_strategy = InteractiveNodeStrategy()
2491
+ model_strategy = InteractiveModelStrategy()
2492
+
2493
+ # Phase 1: Resolve dependencies (updates pyproject.toml)
2494
+ print("\n🔧 Resolving dependencies...")
2495
+ try:
2496
+ from comfygit_cli.utils.progress import create_batch_download_callbacks
2497
+
2498
+ result = env.resolve_workflow(
2499
+ name=args.name,
2500
+ node_strategy=node_strategy,
2501
+ model_strategy=model_strategy,
2502
+ download_callbacks=create_batch_download_callbacks()
2503
+ )
2504
+ except CDRegistryDataError as e:
2505
+ # Registry data unavailable
2506
+ formatted = NodeErrorFormatter.format_registry_error(e)
2507
+ if logger:
2508
+ logger.error(f"Registry data unavailable for workflow resolve: {e}", exc_info=True)
2509
+ print(f"✗ Cannot resolve workflow - registry data unavailable", file=sys.stderr)
2510
+ print(formatted, file=sys.stderr)
2511
+ sys.exit(1)
2512
+ except FileNotFoundError as e:
2513
+ if logger:
2514
+ logger.error(f"Resolution failed for '{args.name}': {e}", exc_info=True)
2515
+ workflow_path = env.workflow_manager.comfyui_workflows / f"{args.name}.json"
2516
+ print(f"✗ Workflow '{args.name}' not found at {workflow_path}")
2517
+ sys.exit(1)
2518
+ except Exception as e:
2519
+ if logger:
2520
+ logger.error(f"Resolution failed for '{args.name}': {e}", exc_info=True)
2521
+ print(f"✗ Failed to resolve dependencies: {e}", file=sys.stderr)
2522
+ sys.exit(1)
2523
+
2524
+ # Phase 2: Check for uninstalled nodes and prompt for installation
2525
+ uninstalled_nodes = env.get_uninstalled_nodes(workflow_name=args.name)
2526
+
2527
+ if uninstalled_nodes:
2528
+ print(f"\n📦 Found {len(uninstalled_nodes)} missing node packs:")
2529
+ for node_id in uninstalled_nodes:
2530
+ print(f" • {node_id}")
2531
+
2532
+ # Determine if we should install
2533
+ should_install = False
2534
+
2535
+ if hasattr(args, 'install') and args.install:
2536
+ # Auto-install mode
2537
+ should_install = True
2538
+ elif hasattr(args, 'no_install') and args.no_install:
2539
+ # Skip install mode
2540
+ should_install = False
2541
+ else:
2542
+ # Interactive prompt (default)
2543
+ try:
2544
+ response = input("\nInstall missing nodes? (Y/n): ").strip().lower()
2545
+ should_install = response in ['', 'y', 'yes']
2546
+ except KeyboardInterrupt:
2547
+ print("\nSkipped node installation")
2548
+ should_install = False
2549
+
2550
+ if should_install:
2551
+ from comfygit_core.models.workflow import NodeInstallCallbacks
2552
+
2553
+ print("\n⬇️ Installing nodes...")
2554
+
2555
+ # Create callbacks for progress display
2556
+ def on_node_start(node_id, idx, total):
2557
+ print(f" [{idx}/{total}] Installing {node_id}...", end=" ", flush=True)
2558
+
2559
+ def on_node_complete(node_id, success, error):
2560
+ if success:
2561
+ print("✓")
2562
+ else:
2563
+ # Handle UV-specific errors
2564
+ if "UVCommandError" in str(error) and logger:
2565
+ from comfygit_core.integrations.uv_command import UVCommandError
2566
+ try:
2567
+ # Try to extract meaningful error
2568
+ user_msg = error.split(":", 1)[1].strip() if ":" in error else error
2569
+ print(f"✗ ({user_msg})")
2570
+ except:
2571
+ print(f"✗ ({error})")
2572
+ else:
2573
+ print(f"✗ ({error})")
2574
+
2575
+ callbacks = NodeInstallCallbacks(
2576
+ on_node_start=on_node_start,
2577
+ on_node_complete=on_node_complete
2578
+ )
2579
+
2580
+ # Install nodes with progress feedback
2581
+ installed_count, failed_nodes = env.install_nodes_with_progress(
2582
+ uninstalled_nodes,
2583
+ callbacks=callbacks
2584
+ )
2585
+
2586
+ if installed_count > 0:
2587
+ print(f"\n✅ Installed {installed_count}/{len(uninstalled_nodes)} nodes")
2588
+
2589
+ if failed_nodes:
2590
+ print(f"\n⚠️ Failed to install {len(failed_nodes)} nodes:")
2591
+ for node_id, error in failed_nodes:
2592
+ print(f" • {node_id}")
2593
+ print("\n💡 For detailed error information:")
2594
+ log_file = self.workspace.paths.logs / env.name / "full.log"
2595
+ print(f" {log_file}")
2596
+ print("\nYou can try installing them manually:")
2597
+ print(" cg node add <node-id>")
2598
+ else:
2599
+ print("\nℹ️ Skipped node installation")
2600
+ # print("\nℹ️ Skipped node installation. To install later:")
2601
+ # print(f" • Re-run: cg workflow resolve \"{args.name}\"")
2602
+ # print(" • Or install individually: cg node add <node-id>")
2603
+
2604
+ # Display final results - check issues first
2605
+ uninstalled = env.get_uninstalled_nodes(workflow_name=args.name)
2606
+
2607
+ # Check for category mismatch (blocking issue that resolve can't fix)
2608
+ category_mismatches = [m for m in result.models_resolved if m.has_category_mismatch]
2609
+
2610
+ if result.has_issues or uninstalled:
2611
+ print("\n⚠️ Partial resolution - issues remain:")
2612
+
2613
+ # Show what was resolved
2614
+ if result.models_resolved:
2615
+ print(f" ✓ Resolved {len(result.models_resolved)} models")
2616
+ if result.nodes_resolved:
2617
+ print(f" ✓ Resolved {len(result.nodes_resolved)} nodes")
2618
+
2619
+ # Show what's still broken
2620
+ if result.nodes_unresolved:
2621
+ print(f" ✗ {len(result.nodes_unresolved)} nodes couldn't be resolved")
2622
+ if result.models_unresolved:
2623
+ print(f" ✗ {len(result.models_unresolved)} models not found")
2624
+ if result.models_ambiguous:
2625
+ print(f" ✗ {len(result.models_ambiguous)} ambiguous models")
2626
+ if uninstalled:
2627
+ print(f" ✗ {len(uninstalled)} packages need installation")
2628
+ if category_mismatches:
2629
+ print(f" ✗ {len(category_mismatches)} models in wrong directory")
2630
+
2631
+ print("\n💡 Next:")
2632
+ if category_mismatches:
2633
+ print(" Models in wrong directory (move files manually):")
2634
+ for m in category_mismatches:
2635
+ expected = m.expected_categories[0] if m.expected_categories else "unknown"
2636
+ print(f" {m.actual_category}/{m.name} → {expected}/")
2637
+ else:
2638
+ print(f" Re-run: cg workflow resolve \"{args.name}\"")
2639
+ print(" Or commit with issues: cg commit -m \"...\" --allow-issues")
2640
+
2641
+ elif result.models_resolved or result.nodes_resolved:
2642
+ # Check for failed download intents by querying current state (not stale result)
2643
+ # Downloads execute AFTER result is created, so we need fresh state
2644
+ current_models = env.pyproject.workflows.get_workflow_models(args.name)
2645
+ failed_downloads = [
2646
+ m for m in current_models
2647
+ if m.status == 'unresolved' and m.sources # Has download intent but still unresolved
2648
+ ]
2649
+
2650
+ if failed_downloads:
2651
+ print("\n⚠️ Resolution partially complete:")
2652
+ # Count successful resolutions (not download intents or successful downloads)
2653
+ successful_models = [
2654
+ m for m in result.models_resolved
2655
+ if m.match_type != 'download_intent' or m.resolved_model is not None
2656
+ ]
2657
+ if successful_models:
2658
+ print(f" ✓ Resolved {len(successful_models)} models")
2659
+ if result.nodes_resolved:
2660
+ print(f" ✓ Resolved {len(result.nodes_resolved)} nodes")
2661
+
2662
+ print(f" ⚠️ {len(failed_downloads)} model(s) queued for download (failed to fetch)")
2663
+ for m in failed_downloads:
2664
+ print(f" • {m.filename}")
2665
+
2666
+ print("\n💡 Next:")
2667
+ print(" Add Civitai API key: cg config --civitai-key <your-token>")
2668
+ print(f" Try again: cg workflow resolve \"{args.name}\"")
2669
+ print(" Or commit anyway: cg commit -m \"...\" --allow-issues")
2670
+ else:
2671
+ # Check for category mismatch even in "success" case
2672
+ if category_mismatches:
2673
+ print("\n⚠️ Resolution complete but models in wrong directory:")
2674
+ if result.models_resolved:
2675
+ print(f" ✓ Resolved {len(result.models_resolved)} models")
2676
+ if result.nodes_resolved:
2677
+ print(f" ✓ Resolved {len(result.nodes_resolved)} nodes")
2678
+ print(f" ✗ {len(category_mismatches)} models in wrong directory")
2679
+ print("\n💡 Next (move files manually):")
2680
+ for m in category_mismatches:
2681
+ expected = m.expected_categories[0] if m.expected_categories else "unknown"
2682
+ print(f" {m.actual_category}/{m.name} → {expected}/")
2683
+ else:
2684
+ print("\n✅ Resolution complete!")
2685
+ if result.models_resolved:
2686
+ print(f" • Resolved {len(result.models_resolved)} models")
2687
+ if result.nodes_resolved:
2688
+ print(f" • Resolved {len(result.nodes_resolved)} nodes")
2689
+ print("\n💡 Next:")
2690
+ print(f" Commit workflows: cg commit -m \"Resolved {args.name}\"")
2691
+ else:
2692
+ # No changes case - still check for category mismatch
2693
+ if category_mismatches:
2694
+ print("\n⚠️ No resolution changes but models in wrong directory:")
2695
+ print(f" ✗ {len(category_mismatches)} models in wrong directory")
2696
+ print("\n💡 Next (move files manually):")
2697
+ for m in category_mismatches:
2698
+ expected = m.expected_categories[0] if m.expected_categories else "unknown"
2699
+ print(f" {m.actual_category}/{m.name} → {expected}/")
2700
+ else:
2701
+ print("✓ No changes needed - all dependencies already resolved")