comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,158 @@
1
+ """Utilities for handling and formatting UV command errors."""
2
+
3
+ import logging
4
+ import re
5
+ from typing import Optional
6
+
7
+ from ..models.exceptions import UVCommandError
8
+
9
+
10
+ def parse_failed_dependency_group(stderr: str) -> Optional[str]:
11
+ """Parse UV error to extract the dependency group that caused build failure.
12
+
13
+ UV includes helpful context in build errors:
14
+ "help: `package` (vX.Y.Z) was included because
15
+ `project-name:group-name` (vX.Y.Z) depends on `package>=X.Y.Z`"
16
+
17
+ Args:
18
+ stderr: UV command stderr output
19
+
20
+ Returns:
21
+ Group name if found (e.g., "optional-sageattn"), None otherwise
22
+ """
23
+ if not stderr:
24
+ return None
25
+
26
+ # Pattern matches: `project-name:group-name` in the "was included because" line
27
+ pattern = r"was included because\s+`[^:]+:([^`]+)`"
28
+ match = re.search(pattern, stderr)
29
+
30
+ if match:
31
+ return match.group(1)
32
+
33
+ return None
34
+
35
+
36
+ def extract_uv_error_hint(stderr: str) -> Optional[str]:
37
+ """Extract the most useful error hint from UV stderr output.
38
+
39
+ UV typically formats errors with:
40
+ - Lines starting with "error:"
41
+ - Lines containing "conflict"
42
+ - Multi-line dependency resolution explanations
43
+ - The final/last non-empty line often contains the key message
44
+
45
+ Args:
46
+ stderr: The stderr output from UV command
47
+
48
+ Returns:
49
+ Most relevant error line, or None if stderr is empty
50
+ """
51
+ if not stderr or not stderr.strip():
52
+ return None
53
+
54
+ lines = [line.strip() for line in stderr.strip().split('\n') if line.strip()]
55
+
56
+ if not lines:
57
+ return None
58
+
59
+ # Search for lines with error keywords (in reverse - most recent first)
60
+ error_keywords = ['error:', 'conflict', 'unsatisfiable', 'incompatible', 'failed', '× ']
61
+
62
+ for line in reversed(lines):
63
+ lower_line = line.lower()
64
+ if any(keyword in lower_line for keyword in error_keywords):
65
+ return line
66
+
67
+ # No keyword found - return last non-empty line
68
+ return lines[-1]
69
+
70
+
71
+ def log_uv_error(logger: logging.Logger, error: UVCommandError, context: str) -> None:
72
+ """Log complete UV error details for debugging.
73
+
74
+ Logs all available error information:
75
+ - Context (node name, operation, etc.)
76
+ - Command that was executed
77
+ - Return code
78
+ - Full stderr output
79
+ - Full stdout output (if present)
80
+
81
+ Args:
82
+ logger: Logger instance to use
83
+ error: The UVCommandError exception
84
+ context: Context string (e.g., node name, operation)
85
+ """
86
+ logger.error(f"UV command failed for '{context}'")
87
+
88
+ if error.command:
89
+ logger.error(f" Command: {' '.join(error.command)}")
90
+
91
+ if error.returncode is not None:
92
+ logger.error(f" Return code: {error.returncode}")
93
+
94
+ if error.stderr:
95
+ logger.error(f" STDERR:\n{error.stderr}")
96
+
97
+ if error.stdout:
98
+ logger.error(f" STDOUT:\n{error.stdout}")
99
+
100
+
101
+ def format_uv_error_for_user(error: UVCommandError, max_hint_length: int = 300) -> str:
102
+ """Format UV error for user-facing display.
103
+
104
+ Provides a concise, helpful error message with:
105
+ - Clear error type
106
+ - Truncated error hint from stderr
107
+ - Reference to logs for full details
108
+
109
+ Args:
110
+ error: The UVCommandError exception
111
+ max_hint_length: Maximum length for error hint display
112
+
113
+ Returns:
114
+ User-friendly error message
115
+ """
116
+ base_msg = "UV dependency resolution failed"
117
+
118
+ # Try to extract helpful hint
119
+ if error.stderr:
120
+ hint = extract_uv_error_hint(error.stderr)
121
+ if hint:
122
+ # Truncate if too long
123
+ if len(hint) > max_hint_length:
124
+ hint = hint[:max_hint_length] + "..."
125
+ return f"{base_msg} - {hint}"
126
+
127
+ return base_msg
128
+
129
+
130
+ def handle_uv_error(
131
+ error: UVCommandError,
132
+ context: str,
133
+ logger: logging.Logger,
134
+ max_hint_length: int = 300
135
+ ) -> tuple[str, bool]:
136
+ """Complete UV error handling: log details + return user message.
137
+
138
+ This is the main entry point for handling UV errors in the CLI.
139
+ It performs both logging and user message formatting in one call.
140
+
141
+ Args:
142
+ error: The UVCommandError exception
143
+ context: Context string (e.g., node name, operation)
144
+ logger: Logger instance to use for detailed logging
145
+ max_hint_length: Maximum length for error hint in user message
146
+
147
+ Returns:
148
+ Tuple of (user_message, logs_written)
149
+ - user_message: Brief message to show to user
150
+ - logs_written: True if detailed logs were written
151
+ """
152
+ # Log complete error details
153
+ log_uv_error(logger, error, context)
154
+
155
+ # Format user-friendly message
156
+ user_msg = format_uv_error_for_user(error, max_hint_length)
157
+
158
+ return user_msg, True
@@ -0,0 +1,73 @@
1
+ """Version comparison and PyTorch utilities."""
2
+
3
+ import re
4
+
5
+ from ..constants import PYTORCH_PACKAGE_NAMES
6
+
7
+
8
+ def is_pytorch_package(package_name: str, pytorch_packages: set[str] = PYTORCH_PACKAGE_NAMES) -> bool:
9
+ """Check if a package is PyTorch-related."""
10
+ package_lower = package_name.lower()
11
+
12
+ # Check against known PyTorch packages (includes triton now)
13
+ if package_lower in pytorch_packages:
14
+ return True
15
+
16
+ # Check for NVIDIA CUDA packages with cu11/cu12 suffix using regex
17
+ # This matches current and future nvidia packages that end with -cu11 or -cu12
18
+ if package_lower.startswith('nvidia-'):
19
+ # Use regex to match nvidia-*-cu11 or nvidia-*-cu12 pattern
20
+ if re.match(r'^nvidia-.*-cu(11|12)$', package_lower):
21
+ return True
22
+ # If it starts with nvidia but doesn't match the pattern, it's not a PyTorch package
23
+ return False
24
+
25
+ # Check for other PyTorch-related patterns
26
+ # Note: 'triton' is now in the explicit set, but keeping for backward compatibility
27
+ # Removed 'cuda' and 'cudnn' as standalone patterns to avoid false positives
28
+ if 'torchtext' in package_lower or 'torchaudio' in package_lower:
29
+ return True
30
+
31
+ return False
32
+
33
+
34
+ def get_pytorch_index_url(torch_version: str, cuda_torch_version: str | None = None) -> str | None:
35
+ """Determine the appropriate PyTorch index URL based on the installed torch version."""
36
+ if not torch_version:
37
+ return None
38
+
39
+ # Parse the torch version to determine the index
40
+ if '+cpu' in torch_version:
41
+ return "https://download.pytorch.org/whl/cpu"
42
+
43
+ # Extract CUDA version from torch version string
44
+ cuda_match = re.search(r'\+cu(\d+)', torch_version)
45
+ if cuda_match:
46
+ cuda_ver = cuda_match.group(1)
47
+
48
+ # Check if it's a nightly/dev version
49
+ if 'dev' in torch_version or 'nightly' in torch_version:
50
+ return f"https://download.pytorch.org/whl/nightly/cu{cuda_ver}"
51
+ else:
52
+ return f"https://download.pytorch.org/whl/cu{cuda_ver}"
53
+
54
+ # Fallback: use the detected CUDA version
55
+ if cuda_torch_version:
56
+ # Remove dots from version (12.8 -> 128)
57
+ cuda_ver = cuda_torch_version.replace('.', '')
58
+
59
+ if 'dev' in torch_version or 'nightly' in torch_version:
60
+ return f"https://download.pytorch.org/whl/nightly/cu{cuda_ver}"
61
+ else:
62
+ return f"https://download.pytorch.org/whl/cu{cuda_ver}"
63
+
64
+ # If no CUDA info found, assume CPU
65
+ return "https://download.pytorch.org/whl/cpu"
66
+
67
+
68
+ def normalize_package_name(package: str) -> str:
69
+ """Normalize package name for comparison."""
70
+ # Handle packages with extras like torch[cuda]
71
+ if '[' in package:
72
+ package = package.split('[')[0]
73
+ return package.strip().lower()
@@ -0,0 +1,90 @@
1
+ """Workflow content hashing for cache invalidation.
2
+
3
+ Provides content-based hashing of workflow JSON with normalization
4
+ to ignore volatile fields like UI state and random seeds.
5
+ """
6
+ import copy
7
+ import json
8
+ from pathlib import Path
9
+
10
+ import blake3
11
+
12
+
13
+ def compute_workflow_hash(workflow_path: Path) -> str:
14
+ """Compute content hash for a workflow file.
15
+
16
+ Uses blake3 for fast hashing and normalization to ignore
17
+ volatile fields (UI state, random seeds, etc.).
18
+
19
+ Args:
20
+ workflow_path: Path to workflow JSON file
21
+
22
+ Returns:
23
+ 16-character hex hash string (64-bit)
24
+
25
+ Examples:
26
+ >>> compute_workflow_hash(Path("my_workflow.json"))
27
+ "a1b2c3d4e5f6g7h8"
28
+ """
29
+ # Note: Using direct json.load() rather than WorkflowRepository for performance
30
+ # and separation of concerns (hashing != parsing). This is intentional.
31
+ # Load workflow JSON
32
+ with open(workflow_path, 'r', encoding='utf-8') as f:
33
+ workflow = json.load(f)
34
+
35
+ # Normalize to remove volatile fields
36
+ normalized = normalize_workflow(workflow)
37
+
38
+ # Serialize with sorted keys for determinism
39
+ normalized_json = json.dumps(normalized, sort_keys=True, separators=(',', ':'))
40
+
41
+ # Compute blake3 hash
42
+ hasher = blake3.blake3()
43
+ hasher.update(normalized_json.encode('utf-8'))
44
+
45
+ # Return first 16 hex chars (64-bit hash)
46
+ return hasher.hexdigest()[:16]
47
+
48
+ def normalize_workflow(workflow: dict) -> dict:
49
+ """Remove volatile fields that don't affect workflow functionality.
50
+
51
+ Strips:
52
+ - UI state (extra.ds - pan/zoom)
53
+ - Frontend version (extra.frontendVersion)
54
+ - Revision counter (revision)
55
+ - Auto-generated seeds (when randomize/increment mode is set)
56
+
57
+ Args:
58
+ workflow: Raw workflow dict
59
+
60
+ Returns:
61
+ Normalized workflow dict
62
+ """
63
+ normalized = copy.deepcopy(workflow)
64
+
65
+ # Remove UI state fields
66
+ if 'extra' in normalized:
67
+ normalized['extra'].pop('ds', None) # Pan/zoom state
68
+ normalized['extra'].pop('frontendVersion', None) # Frontend version
69
+
70
+ # Remove revision counter
71
+ normalized.pop('revision', None)
72
+
73
+ # Normalize nodes - remove auto-generated seed values when randomize is set
74
+ if 'nodes' in normalized:
75
+ for node in normalized['nodes']:
76
+ if isinstance(node, dict):
77
+ node_type = node.get('type', '')
78
+
79
+ # For sampler nodes with randomize mode, normalize seed to fixed value
80
+ if node_type in ('KSampler', 'KSamplerAdvanced', 'SamplerCustom'):
81
+ # widgets_values format: [seed, control_after_generate, steps, cfg, ...]
82
+ widgets_values = node.get('widgets_values', [])
83
+ if len(widgets_values) >= 2 and widgets_values[1] in ('randomize', 'increment'):
84
+ widgets_values[0] = 0 # Normalize to fixed value
85
+
86
+ api_widget_values = node.get('api_widget_values', [])
87
+ if len(api_widget_values) >= 2 and api_widget_values[1] in ('randomize', 'increment'):
88
+ api_widget_values[0] = 0 # Normalize to fixed value
89
+
90
+ return normalized
@@ -0,0 +1,297 @@
1
+ """Resolution testing utility for dependency conflict detection.
2
+
3
+ This module provides utilities for testing if dependency resolution will succeed
4
+ without actually modifying the environment. Used for pre-flight checks.
5
+ """
6
+
7
+ import shutil
8
+ import tempfile
9
+ from dataclasses import dataclass, field
10
+ from pathlib import Path
11
+
12
+ from ..integrations.uv_command import UVCommand
13
+ from ..logging.logging_config import get_logger
14
+ from ..managers.pyproject_manager import PyprojectManager
15
+ from ..models.exceptions import CDPyprojectError, UVCommandError
16
+ from ..utils.conflict_parser import parse_uv_conflicts, parse_uv_resolution
17
+
18
+ logger = get_logger(__name__)
19
+
20
+
21
+ @dataclass
22
+ class ResolutionResult:
23
+ """Result of dependency resolution attempt."""
24
+
25
+ success: bool
26
+ conflicts: list[str] = field(default_factory=list)
27
+ warnings: list[str] = field(default_factory=list)
28
+ resolved_packages: dict[str, str] = field(default_factory=dict) # name -> version
29
+ stderr: str = "" # Raw UV stderr for verbose mode and enhanced error messages
30
+
31
+
32
+ class ResolutionTester:
33
+ """Test dependency resolution without making actual changes."""
34
+
35
+ def __init__(self, workspace_path: Path):
36
+ """Initialize the resolution tester.
37
+
38
+ Args:
39
+ workspace_path: Path to ComfyDock workspace
40
+ """
41
+ self.workspace_path = workspace_path
42
+ self.uv_cache_path = workspace_path / "uv_cache"
43
+ self.uv_python_path = workspace_path / "uv" / "python"
44
+ self.logger = logger
45
+
46
+ def test_resolution(
47
+ self, pyproject_path: Path, python_version: str | None = None
48
+ ) -> ResolutionResult:
49
+ """Test if a pyproject.toml will resolve successfully.
50
+
51
+ Args:
52
+ pyproject_path: Path to pyproject.toml to test
53
+ python_version: Optional Python version to test with
54
+
55
+ Returns:
56
+ ResolutionResult with success status and any conflicts
57
+ """
58
+ result = ResolutionResult(success=False)
59
+
60
+ if not pyproject_path.exists():
61
+ result.warnings.append(f"pyproject.toml not found: {pyproject_path}")
62
+ return result
63
+
64
+ self.logger.debug(f"Testing resolution for pyproject at {pyproject_path}")
65
+
66
+ try:
67
+ # Create a temporary directory for resolution testing
68
+ with tempfile.TemporaryDirectory() as temp_dir:
69
+ temp_path = Path(temp_dir)
70
+
71
+ # Copy pyproject.toml to temp location
72
+ temp_pyproject = temp_path / "pyproject.toml"
73
+ shutil.copy2(pyproject_path, temp_pyproject)
74
+
75
+ # Create UV command for temp directory
76
+ uv = UVCommand(
77
+ project_env=temp_path / ".venv",
78
+ cache_dir=self.uv_cache_path,
79
+ python_install_dir=self.uv_python_path,
80
+ cwd=temp_path,
81
+ )
82
+
83
+ # Try to resolve dependencies (quiet for background testing)
84
+ try:
85
+ resolution_result = uv.sync(all_groups=True, dry_run=True)
86
+ resolution_output = resolution_result.stdout
87
+ except UVCommandError as e:
88
+ # Log full UV error details for debugging
89
+ self.logger.error(f"UV resolution test failed")
90
+ if e.stderr:
91
+ self.logger.error(f"UV stderr:\n{e.stderr}")
92
+ if e.stdout:
93
+ self.logger.debug(f"UV stdout:\n{e.stdout}")
94
+
95
+ # Store raw stderr for verbose mode
96
+ error_text = e.stderr or str(e)
97
+ result.stderr = error_text
98
+
99
+ # Try to extract structured conflicts from stderr
100
+ conflicts = parse_uv_conflicts(error_text)
101
+ if conflicts:
102
+ result.conflicts.extend(conflicts)
103
+ else:
104
+ # Fallback: Add concise error from stderr
105
+ if e.stderr:
106
+ # Extract key error lines from stderr
107
+ stderr_lines = [l.strip() for l in e.stderr.strip().split('\n') if l.strip()]
108
+ # Find the main error message (usually has × or ERROR:)
109
+ error_line = next((l for l in stderr_lines if '×' in l or 'ERROR:' in l.upper()), None)
110
+ if error_line:
111
+ result.warnings.append(error_line[:300])
112
+ else:
113
+ # Use last non-empty line as fallback
114
+ result.warnings.append(stderr_lines[-1][:300] if stderr_lines else str(e))
115
+ else:
116
+ result.warnings.append(f"Resolution failed: {str(e)}")
117
+
118
+ return result
119
+ except Exception as e:
120
+ # Non-UV errors
121
+ self.logger.error(f"Unexpected error during resolution test: {e}")
122
+ result.warnings.append(f"Resolution test error: {str(e)[:300]}")
123
+ return result
124
+
125
+ logger.debug(f"Resolution output: {resolution_output}")
126
+
127
+ result.success = True
128
+ if resolution_output:
129
+ # Parse resolution output to get package versions
130
+ result.resolved_packages = parse_uv_resolution(resolution_output)
131
+ self.logger.debug(
132
+ f"Resolution successful, {len(result.resolved_packages)} packages"
133
+ )
134
+
135
+ except Exception as e:
136
+ self.logger.error(f"Error during resolution test: {e}")
137
+ result.warnings.append(f"Could not test resolution: {str(e)}")
138
+
139
+ return result
140
+
141
+ def test_with_additions(
142
+ self,
143
+ base_pyproject: Path,
144
+ additional_deps: list[str],
145
+ group_name: str | None = None,
146
+ ) -> ResolutionResult:
147
+ """Test resolution with additional dependencies added.
148
+
149
+ Useful for testing if adding new packages will cause conflicts
150
+ before actually adding them.
151
+
152
+ Args:
153
+ base_pyproject: Base pyproject.toml path
154
+ additional_deps: List of additional dependencies to test
155
+ group_name: Optional dependency group to add to
156
+
157
+ Returns:
158
+ ResolutionResult with success status and any conflicts
159
+ """
160
+ result = ResolutionResult(success=False)
161
+
162
+ if not base_pyproject.exists():
163
+ result.warnings.append(f"Base pyproject.toml not found: {base_pyproject}")
164
+ return result
165
+
166
+ # Log what we're testing
167
+ deps_preview = ', '.join(additional_deps[:3])
168
+ if len(additional_deps) > 3:
169
+ deps_preview += f'... (+{len(additional_deps) - 3} more)'
170
+ self.logger.debug(f"Testing additions: {deps_preview} to group '{group_name or 'main'}'")
171
+
172
+ try:
173
+ # Create temporary directory for testing
174
+ with tempfile.TemporaryDirectory() as temp_dir:
175
+ temp_path = Path(temp_dir)
176
+ temp_pyproject = temp_path / "pyproject.toml"
177
+
178
+ # Copy base pyproject
179
+ shutil.copy2(base_pyproject, temp_pyproject)
180
+
181
+ # Add the additional dependencies
182
+ manager = PyprojectManager(temp_pyproject)
183
+
184
+ if group_name:
185
+ # Add to dependency group
186
+ try:
187
+ manager.dependencies.add_to_group(group_name, additional_deps)
188
+ except CDPyprojectError as e:
189
+ result.warnings.append(f"Failed to add to group: {e}")
190
+ return result
191
+ else:
192
+ # Add to main dependencies
193
+ config = manager.load()
194
+ if "project" not in config:
195
+ config["project"] = {}
196
+ if "dependencies" not in config["project"]:
197
+ config["project"]["dependencies"] = []
198
+
199
+ config["project"]["dependencies"].extend(additional_deps)
200
+ manager.save(config)
201
+
202
+ # Test the modified pyproject
203
+ return self.test_resolution(temp_pyproject)
204
+
205
+ except Exception as e:
206
+ self.logger.error(f"Error testing with additions: {e}")
207
+ result.warnings.append(f"Could not test additions: {str(e)}")
208
+ return result
209
+
210
+ def test_node_addition(
211
+ self, env_path: Path, node_name: str, requirements: list[str]
212
+ ) -> ResolutionResult:
213
+ """Test if adding a node with requirements will cause conflicts.
214
+
215
+ Args:
216
+ env_path: Environment path
217
+ node_name: Name of the node being added
218
+ requirements: List of requirements from the node
219
+
220
+ Returns:
221
+ ResolutionResult with success status and any conflicts
222
+ """
223
+ # Check both main and staged pyproject
224
+ staged_pyproject = env_path / ".cec" / "pyproject.toml"
225
+ main_pyproject = env_path / "pyproject.toml"
226
+
227
+ # Use staged if exists, otherwise main
228
+ base_pyproject = (
229
+ staged_pyproject if staged_pyproject.exists() else main_pyproject
230
+ )
231
+
232
+ if not base_pyproject.exists():
233
+ # No pyproject yet, test with just the requirements
234
+ with tempfile.TemporaryDirectory() as temp_dir:
235
+ temp_path = Path(temp_dir)
236
+ temp_pyproject = temp_path / "pyproject.toml"
237
+
238
+ # Create minimal pyproject
239
+ manager = PyprojectManager(temp_pyproject)
240
+ config = {
241
+ "project": {
242
+ "name": "test-env",
243
+ "version": "0.1.0",
244
+ "dependencies": requirements,
245
+ }
246
+ }
247
+ manager.save(config)
248
+
249
+ return self.test_resolution(temp_pyproject)
250
+
251
+ # Test with additions to existing pyproject
252
+ group_name = node_name.lower().replace("-", "_").replace(" ", "_")
253
+ return self.test_with_additions(base_pyproject, requirements, group_name)
254
+
255
+
256
+ def format_conflicts(self, result: ResolutionResult, verbose: bool = False) -> str:
257
+ """Format resolution conflicts for display.
258
+
259
+ Args:
260
+ result: ResolutionResult to format
261
+ verbose: Whether to show all conflicts or just top 3
262
+
263
+ Returns:
264
+ Formatted string for display
265
+ """
266
+ if result.success:
267
+ return "✓ No dependency conflicts detected"
268
+
269
+ lines = []
270
+
271
+ # Show conflicts
272
+ if result.conflicts:
273
+ lines.append("⚠️ Dependency conflicts detected:")
274
+
275
+ # Filter out the main error line if it's in conflicts
276
+ display_conflicts = [
277
+ c for c in result.conflicts if not c.startswith("Resolution failed")
278
+ ]
279
+
280
+ limit = (
281
+ len(display_conflicts) if verbose else min(3, len(display_conflicts))
282
+ )
283
+ for conflict in display_conflicts[:limit]:
284
+ lines.append(f" • {conflict}")
285
+
286
+ if not verbose and len(display_conflicts) > 3:
287
+ lines.append(f" ... and {len(display_conflicts) - 3} more conflicts")
288
+
289
+ # Show warnings
290
+ if result.warnings:
291
+ if lines:
292
+ lines.append("")
293
+ lines.append("⚠️ Warnings:")
294
+ for warning in result.warnings:
295
+ lines.append(f" • {warning}")
296
+
297
+ return "\n".join(lines)