comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,250 @@
1
+ """ModelResolver - Resolve model requirements for environment import/export."""
2
+ from __future__ import annotations
3
+
4
+ from pathlib import Path
5
+ from typing import TYPE_CHECKING, Any
6
+
7
+
8
+ from ..logging.logging_config import get_logger
9
+ from ..models.workflow import (
10
+ ModelResolutionContext,
11
+ WorkflowNodeWidgetRef,
12
+ WorkflowNode,
13
+ ResolvedModel,
14
+ WorkflowDependencies,
15
+ )
16
+ from ..configs.model_config import ModelConfig
17
+
18
+ if TYPE_CHECKING:
19
+ from ..managers.pyproject_manager import PyprojectManager
20
+ from ..repositories.model_repository import ModelRepository
21
+ from ..models.shared import ModelWithLocation
22
+
23
+ logger = get_logger(__name__)
24
+
25
+
26
+ class ModelResolver:
27
+ """Resolve model requirements for environments using multiple strategies."""
28
+
29
+ def __init__(
30
+ self,
31
+ model_repository: ModelRepository,
32
+ model_config: ModelConfig | None = None,
33
+ download_manager=None,
34
+ ):
35
+ """Initialize ModelResolver.
36
+
37
+ Args:
38
+ index_manager: ModelIndexManager for lookups
39
+ download_manager: Optional ModelDownloadManager for downloading
40
+ """
41
+ self.model_repository = model_repository
42
+ self.model_config = model_config or ModelConfig.load()
43
+ self.download_manager = download_manager
44
+
45
+ def resolve_model(
46
+ self, ref: WorkflowNodeWidgetRef, model_context: ModelResolutionContext
47
+ ) -> list[ResolvedModel] | None:
48
+ """Try multiple resolution strategies"""
49
+ workflow_name = model_context.workflow_name
50
+ widget_value = ref.widget_value
51
+
52
+ # Strategy 0: Check existing pyproject model data first
53
+ context_resolution_result = self._try_context_resolution(widget_ref=ref, context=model_context)
54
+ if context_resolution_result:
55
+ logger.debug(
56
+ f"Resolved {ref} to {context_resolution_result.resolved_model} from pyproject.toml"
57
+ )
58
+ return [context_resolution_result]
59
+
60
+ # Strategy 1: Exact path match
61
+ all_models = self.model_repository.get_all_models()
62
+ candidates = self._try_exact_match(widget_value, all_models)
63
+ if len(candidates) == 1:
64
+ logger.debug(f"Resolved {ref} to {candidates[0]} as exact match")
65
+ return [
66
+ ResolvedModel(
67
+ workflow=workflow_name,
68
+ reference=ref,
69
+ match_type="exact",
70
+ resolved_model=candidates[0],
71
+ match_confidence=1.0,
72
+ )
73
+ ]
74
+
75
+ # Strategy 2: Reconstruct paths for native loaders
76
+ if self.model_config.is_model_loader_node(ref.node_type):
77
+ paths = self.model_config.reconstruct_model_path(
78
+ ref.node_type, widget_value
79
+ )
80
+ for path in paths:
81
+ candidates = self._try_exact_match(path, all_models)
82
+ if len(candidates) == 1:
83
+ logger.debug(
84
+ f"Resolved {ref} to {candidates[0]} as reconstructed match"
85
+ )
86
+ return [
87
+ ResolvedModel(
88
+ workflow=workflow_name,
89
+ reference=ref,
90
+ match_type="reconstructed",
91
+ resolved_model=candidates[0],
92
+ match_confidence=0.9,
93
+ )
94
+ ]
95
+
96
+ # Strategy 3: Case-insensitive match
97
+ candidates = self._try_case_insensitive_match(widget_value, all_models)
98
+ if len(candidates) == 1:
99
+ logger.debug(f"Resolved {ref} to {candidates[0]} as case-insensitive match")
100
+ return [
101
+ ResolvedModel(
102
+ workflow=workflow_name,
103
+ reference=ref,
104
+ match_type="case_insensitive",
105
+ resolved_model=candidates[0],
106
+ match_confidence=0.8,
107
+ )
108
+ ]
109
+ elif len(candidates) > 1:
110
+ # Multiple matches - need disambiguation
111
+ logger.debug(
112
+ f"Resolved {ref} to {candidates} as case-insensitive match, ambiguous"
113
+ )
114
+ return [
115
+ ResolvedModel(
116
+ workflow=workflow_name,
117
+ reference=ref,
118
+ match_type="case_insensitive",
119
+ resolved_model=model,
120
+ match_confidence=0.0,
121
+ )
122
+ for model in candidates
123
+ ]
124
+
125
+ # Strategy 4: Filename-only match
126
+ filename = Path(widget_value).name
127
+ candidates = self.model_repository.find_by_filename(filename)
128
+ if len(candidates) == 1:
129
+ logger.debug(f"Resolved {ref} to {candidates[0]} as filename-only match")
130
+ return [
131
+ ResolvedModel(
132
+ workflow=workflow_name,
133
+ reference=ref,
134
+ match_type="filename",
135
+ resolved_model=candidates[0],
136
+ match_confidence=0.7,
137
+ )
138
+ ]
139
+ elif len(candidates) > 1:
140
+ # Multiple matches - need disambiguation
141
+ logger.debug(
142
+ f"Resolved {ref} to {candidates} as filename-only match, ambiguous"
143
+ )
144
+ return [
145
+ ResolvedModel(
146
+ workflow=workflow_name,
147
+ reference=ref,
148
+ match_type="filename",
149
+ resolved_model=model,
150
+ match_confidence=0.0,
151
+ )
152
+ for model in candidates
153
+ ]
154
+
155
+ # No matches found
156
+ logger.debug(f"No matches found in pyproject or model index for {ref}")
157
+ return None
158
+
159
+ def _try_exact_match(self, path: str, all_models: list[ModelWithLocation] | None =None) -> list["ModelWithLocation"]:
160
+ """Try exact path match"""
161
+ if all_models is None:
162
+ all_models = self.model_repository.get_all_models()
163
+ return [m for m in all_models if m.relative_path == path]
164
+
165
+ def _try_case_insensitive_match(self, path: str, all_models: list[ModelWithLocation] | None =None) -> list["ModelWithLocation"]:
166
+ """Try case-insensitive path match"""
167
+ if all_models is None:
168
+ all_models = self.model_repository.get_all_models()
169
+ path_lower = path.lower()
170
+ return [m for m in all_models if m.relative_path.lower() == path_lower]
171
+
172
+ def _try_context_resolution(self, context: ModelResolutionContext, widget_ref: WorkflowNodeWidgetRef) -> ResolvedModel | None:
173
+ """Check if this ref was previously resolved using context lookup.
174
+
175
+ Now supports download intent detection via full ManifestWorkflowModel objects.
176
+ """
177
+ workflow_name = context.workflow_name
178
+
179
+ # Check if ref exists in previous resolutions (now contains full ManifestWorkflowModel)
180
+ manifest_model = context.previous_resolutions.get(widget_ref)
181
+
182
+ if not manifest_model:
183
+ return None
184
+
185
+ # NEW: Check if download intent (has URL but no hash yet)
186
+ if manifest_model.status == "unresolved" and manifest_model.sources:
187
+ # Download intent found - don't re-prompt user
188
+ from pathlib import Path
189
+ return ResolvedModel(
190
+ workflow=workflow_name,
191
+ reference=widget_ref,
192
+ match_type="download_intent",
193
+ resolved_model=None,
194
+ model_source=manifest_model.sources[0], # URL from previous session
195
+ target_path=Path(manifest_model.relative_path) if manifest_model.relative_path else None,
196
+ is_optional=False,
197
+ match_confidence=1.0,
198
+ )
199
+
200
+ # Handle optional unresolved models (user explicitly marked as optional)
201
+ if manifest_model.status == "unresolved" and not manifest_model.sources:
202
+ # Only treat as optional if user explicitly marked it (criticality="optional")
203
+ # Otherwise it's just unresolved (from interrupted resolution) - return None
204
+ if manifest_model.criticality == "optional":
205
+ return ResolvedModel(
206
+ workflow=workflow_name,
207
+ reference=widget_ref,
208
+ match_type="workflow_context",
209
+ resolved_model=None,
210
+ is_optional=True,
211
+ match_confidence=1.0,
212
+ )
213
+ # Not explicitly optional - this is truly unresolved, let it fall through
214
+ return None
215
+
216
+ # Handle resolved models - look up in repository by hash
217
+ if manifest_model.hash:
218
+ resolved_model = self.model_repository.get_model(manifest_model.hash)
219
+
220
+ if not resolved_model:
221
+ # Model was previously resolved but doesn't exist locally
222
+ # Check global models table for download sources (fallback path)
223
+ global_model = context.global_models.get(manifest_model.hash)
224
+ if global_model and global_model.sources:
225
+ # Create download intent from global models table
226
+ from pathlib import Path
227
+ logger.info(f"Creating download intent for {manifest_model.filename} from global models table")
228
+ return ResolvedModel(
229
+ workflow=workflow_name,
230
+ reference=widget_ref,
231
+ match_type="download_intent",
232
+ resolved_model=None,
233
+ model_source=global_model.sources[0],
234
+ target_path=Path(global_model.relative_path) if global_model.relative_path else None,
235
+ is_optional=False,
236
+ match_confidence=1.0,
237
+ )
238
+
239
+ logger.warning(f"Model {manifest_model.hash} in previous resolutions but not found in repository or global models")
240
+ return None
241
+
242
+ return ResolvedModel(
243
+ workflow=workflow_name,
244
+ reference=widget_ref,
245
+ match_type="workflow_context",
246
+ resolved_model=resolved_model,
247
+ match_confidence=1.0,
248
+ )
249
+
250
+ return None
@@ -0,0 +1,218 @@
1
+ """Import preview and analysis service."""
2
+ from __future__ import annotations
3
+
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING
7
+
8
+ import tomlkit
9
+
10
+ from ..logging.logging_config import get_logger
11
+
12
+ if TYPE_CHECKING:
13
+ from ..repositories.model_repository import ModelRepository
14
+ from ..repositories.node_mappings_repository import NodeMappingsRepository
15
+
16
+ logger = get_logger(__name__)
17
+
18
+
19
+ @dataclass
20
+ class ModelAnalysis:
21
+ """Analysis of a single model in the import."""
22
+ filename: str
23
+ hash: str | None
24
+ sources: list[str]
25
+ relative_path: str
26
+ locally_available: bool
27
+ needs_download: bool
28
+ workflows: list[str]
29
+
30
+
31
+ @dataclass
32
+ class NodeAnalysis:
33
+ """Analysis of a custom node in the import."""
34
+ name: str
35
+ source: str # "registry" | "development" | "git"
36
+ install_spec: str | None
37
+ is_dev_node: bool
38
+
39
+
40
+ @dataclass
41
+ class WorkflowAnalysis:
42
+ """Analysis of a workflow in the import."""
43
+ name: str
44
+ models_required: int
45
+ models_optional: int
46
+
47
+
48
+ @dataclass
49
+ class ImportAnalysis:
50
+ """Complete analysis of an import before finalization."""
51
+
52
+ # ComfyUI version
53
+ comfyui_version: str | None
54
+ comfyui_version_type: str | None
55
+
56
+ # Models breakdown
57
+ models: list[ModelAnalysis]
58
+ total_models: int
59
+ models_locally_available: int
60
+ models_needing_download: int
61
+ models_without_sources: int
62
+
63
+ # Nodes breakdown
64
+ nodes: list[NodeAnalysis]
65
+ total_nodes: int
66
+ registry_nodes: int
67
+ dev_nodes: int
68
+ git_nodes: int
69
+
70
+ # Workflows
71
+ workflows: list[WorkflowAnalysis]
72
+ total_workflows: int
73
+
74
+ # Summary flags
75
+ needs_model_downloads: bool
76
+ needs_node_installs: bool
77
+
78
+ def get_download_strategy_recommendation(self) -> str:
79
+ """Recommend strategy based on analysis."""
80
+ if self.models_without_sources > 0:
81
+ return "required" # Some models can't be downloaded - user must provide
82
+ if not self.needs_model_downloads:
83
+ return "skip" # All models available locally
84
+ return "all" # Can download everything
85
+
86
+
87
+ class ImportAnalyzer:
88
+ """Analyzes import requirements before finalization.
89
+
90
+ Works on extracted .cec directory to provide preview of what
91
+ will be downloaded, installed, and configured during import finalization.
92
+ """
93
+
94
+ def __init__(
95
+ self,
96
+ model_repository: ModelRepository,
97
+ node_mapping_repository: NodeMappingsRepository
98
+ ):
99
+ self.model_repository = model_repository
100
+ self.node_mapping_repository = node_mapping_repository
101
+
102
+ def analyze_import(self, cec_path: Path) -> ImportAnalysis:
103
+ """Analyze import requirements from extracted .cec directory.
104
+
105
+ Args:
106
+ cec_path: Path to extracted .cec directory
107
+
108
+ Returns:
109
+ ImportAnalysis with models, nodes, workflows breakdown
110
+ """
111
+ # Parse pyproject.toml
112
+ pyproject_path = cec_path / "pyproject.toml"
113
+ with open(pyproject_path, encoding='utf-8') as f:
114
+ pyproject_data = tomlkit.load(f)
115
+
116
+ comfygit_config = pyproject_data.get("tool", {}).get("comfygit", {})
117
+
118
+ # Analyze models
119
+ models = self._analyze_models(pyproject_data)
120
+
121
+ # Analyze nodes
122
+ nodes = self._analyze_nodes(comfygit_config)
123
+
124
+ # Analyze workflows
125
+ workflows = self._analyze_workflows(pyproject_data)
126
+
127
+ # Build summary
128
+ return ImportAnalysis(
129
+ comfyui_version=comfygit_config.get("comfyui_version"),
130
+ comfyui_version_type=comfygit_config.get("comfyui_version_type"),
131
+ models=models,
132
+ total_models=len(models),
133
+ models_locally_available=sum(1 for m in models if m.locally_available),
134
+ models_needing_download=sum(1 for m in models if m.needs_download),
135
+ models_without_sources=sum(
136
+ 1 for m in models if not m.sources and not m.locally_available
137
+ ),
138
+ nodes=nodes,
139
+ total_nodes=len(nodes),
140
+ registry_nodes=sum(1 for n in nodes if n.source == "registry"),
141
+ dev_nodes=sum(1 for n in nodes if n.is_dev_node),
142
+ git_nodes=sum(1 for n in nodes if n.source == "git"),
143
+ workflows=workflows,
144
+ total_workflows=len(workflows),
145
+ needs_model_downloads=any(m.needs_download for m in models),
146
+ needs_node_installs=any(n.source in ("registry", "git") for n in nodes),
147
+ )
148
+
149
+ def _analyze_models(self, pyproject_data: dict) -> list[ModelAnalysis]:
150
+ """Analyze all models from pyproject.toml."""
151
+ models = []
152
+
153
+ # Get global models table
154
+ global_models = pyproject_data.get("tool", {}).get("comfygit", {}).get("models", {})
155
+
156
+ # Get all workflows
157
+ workflows_config = pyproject_data.get("tool", {}).get("comfygit", {}).get("workflows", {})
158
+
159
+ # Build reverse index: hash -> workflows
160
+ hash_to_workflows = {}
161
+ for workflow_name, workflow_data in workflows_config.items():
162
+ for model in workflow_data.get("models", []):
163
+ model_hash = model.get("hash")
164
+ if model_hash:
165
+ hash_to_workflows.setdefault(model_hash, []).append(workflow_name)
166
+
167
+ # Analyze each model
168
+ for model_hash, model_data in global_models.items():
169
+ sources = model_data.get("sources", [])
170
+
171
+ # Check local availability
172
+ existing = self.model_repository.get_model(model_hash)
173
+ locally_available = existing is not None
174
+
175
+ models.append(ModelAnalysis(
176
+ filename=model_data.get("filename", "unknown"),
177
+ hash=model_hash,
178
+ sources=sources,
179
+ relative_path=model_data.get("relative_path", ""),
180
+ locally_available=locally_available,
181
+ needs_download=not locally_available and bool(sources),
182
+ workflows=hash_to_workflows.get(model_hash, [])
183
+ ))
184
+
185
+ return models
186
+
187
+ def _analyze_nodes(self, comfygit_config: dict) -> list[NodeAnalysis]:
188
+ """Analyze all custom nodes from pyproject.toml."""
189
+ nodes = []
190
+ nodes_config = comfygit_config.get("nodes", {})
191
+
192
+ for node_name, node_data in nodes_config.items():
193
+ source = node_data.get("source", "registry")
194
+
195
+ nodes.append(NodeAnalysis(
196
+ name=node_name,
197
+ source=source,
198
+ install_spec=node_data.get("install_spec"),
199
+ is_dev_node=(source == "development")
200
+ ))
201
+
202
+ return nodes
203
+
204
+ def _analyze_workflows(self, pyproject_data: dict) -> list[WorkflowAnalysis]:
205
+ """Analyze all workflows."""
206
+ workflows = []
207
+ workflows_config = pyproject_data.get("tool", {}).get("comfygit", {}).get("workflows", {})
208
+
209
+ for workflow_name, workflow_data in workflows_config.items():
210
+ models = workflow_data.get("models", [])
211
+
212
+ workflows.append(WorkflowAnalysis(
213
+ name=workflow_name,
214
+ models_required=sum(1 for m in models if m.get("criticality") == "required"),
215
+ models_optional=sum(1 for m in models if m.get("criticality") == "optional"),
216
+ ))
217
+
218
+ return workflows