comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,914 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import asdict, dataclass, field
4
+ from functools import cached_property
5
+ from pathlib import Path
6
+ from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING
7
+
8
+ from ..services.model_downloader import ModelDownloader
9
+ from ..models.node_mapping import (
10
+ GlobalNodePackage,
11
+ )
12
+ from ..utils.uuid import is_uuid
13
+
14
+ if TYPE_CHECKING:
15
+ from .shared import ModelWithLocation, NodeInfo
16
+
17
+
18
+ @dataclass
19
+ class ScoredMatch:
20
+ """Model match with similarity score."""
21
+ model: ModelWithLocation
22
+ score: float
23
+ confidence: str # "high", "good", "possible"
24
+
25
+
26
+ @dataclass
27
+ class ScoredPackageMatch:
28
+ """Node package match with similarity score for fuzzy search."""
29
+ package_id: str
30
+ package_data: GlobalNodePackage
31
+ score: float
32
+ confidence: str # "high", "medium", "low"
33
+
34
+
35
+ @dataclass
36
+ class NodeResolutionContext:
37
+ """Context for enhanced node resolution with state tracking."""
38
+
39
+ # Existing packages in environment
40
+ installed_packages: dict[str, NodeInfo] = field(default_factory=dict)
41
+
42
+ # User-defined mappings (persisted in pyproject.toml)
43
+ custom_mappings: dict[str, str | bool] = field(default_factory=dict) # node_type -> package_id or false (for optional node)
44
+
45
+ # Current workflow context
46
+ workflow_name: str = ""
47
+
48
+ # Search function for fuzzy package matching (injected by workflow_manager)
49
+ # Signature: (node_type: str, installed_packages: dict, include_registry: bool, limit: int) -> list[ResolvedNodePackage]
50
+ search_fn: Callable | None = None
51
+
52
+ # Auto-selection configuration (post-MVP: make this configurable via config file)
53
+ auto_select_ambiguous: bool = True # Auto-select best package from registry mappings
54
+
55
+ @dataclass
56
+ class WorkflowModelNodeMapping:
57
+ nodes: list[WorkflowNodeWidgetRef]
58
+
59
+ @dataclass
60
+ class BatchDownloadCallbacks:
61
+ """Callbacks for batch download coordination in core library.
62
+
63
+ All callbacks are optional - if None, core library performs operation silently.
64
+ CLI package provides implementations that render to terminal.
65
+ """
66
+
67
+ # Called once at start with total number of files
68
+ on_batch_start: Callable[[int], None] | None = None
69
+
70
+ # Called before each file download (filename, current_index, total_count)
71
+ on_file_start: Callable[[str, int, int], None] | None = None
72
+
73
+ # Called during download for progress updates (bytes_downloaded, total_bytes)
74
+ on_file_progress: Callable[[int, int | None], None] | None = None
75
+
76
+ # Called after each file completes (filename, success, error_message)
77
+ on_file_complete: Callable[[str, bool, str | None], None] | None = None
78
+
79
+ # Called once at end (success_count, total_count)
80
+ on_batch_complete: Callable[[int, int], None] | None = None
81
+
82
+
83
+ @dataclass
84
+ class NodeInstallCallbacks:
85
+ """Callbacks for node installation progress in core library.
86
+
87
+ All callbacks are optional - if None, core library performs operation silently.
88
+ CLI package provides implementations that render to terminal.
89
+ """
90
+
91
+ # Called once at start with total number of nodes
92
+ on_batch_start: Callable[[int], None] | None = None
93
+
94
+ # Called before each node installation (node_id, current_index, total_count)
95
+ on_node_start: Callable[[str, int, int], None] | None = None
96
+
97
+ # Called after each node completes (node_id, success, error_message)
98
+ on_node_complete: Callable[[str, bool, str | None], None] | None = None
99
+
100
+ # Called once at end (success_count, total_count)
101
+ on_batch_complete: Callable[[int, int], None] | None = None
102
+
103
+
104
+ @dataclass
105
+ class ModelResolutionContext:
106
+ """Context for model resolution with search function and workflow info."""
107
+ workflow_name: str
108
+
109
+ # Lookup: ref → ManifestWorkflowModel (full model object with hash, sources, status, etc.)
110
+ # Changed from dict[WorkflowNodeWidgetRef, str] to support download intent detection
111
+ previous_resolutions: dict[WorkflowNodeWidgetRef, Any] = field(default_factory=dict) # TYPE_CHECKING: ManifestWorkflowModel
112
+
113
+ # Global models table: hash → ManifestGlobalModel (for download intent creation)
114
+ global_models: dict[str, Any] = field(default_factory=dict) # TYPE_CHECKING: ManifestGlobalModel
115
+
116
+ # Search function for fuzzy matching (injected by workflow_manager)
117
+ # Signature: (search_term: str, node_type: str | None, limit: int) -> list[ScoredMatch]
118
+ search_fn: Callable | None = None
119
+
120
+ # Model downloader for URL-based downloads (injected by workflow_manager)
121
+ downloader: ModelDownloader | None = None
122
+
123
+ # Auto-selection configuration (for automated strategies)
124
+ auto_select_ambiguous: bool = True
125
+
126
+
127
+ @dataclass
128
+ class Link:
129
+ """Represents a connection between nodes."""
130
+ id: int
131
+ source_node_id: int
132
+ source_slot: int
133
+ target_node_id: int
134
+ target_slot: int
135
+ type: str
136
+
137
+ def to_array(self) -> list:
138
+ """Convert to ComfyUI's [id, source_node, source_slot, target_node,
139
+ target_slot, type] format."""
140
+ return [self.id, self.source_node_id, self.source_slot, self.target_node_id, self.target_slot, self.type]
141
+
142
+ @classmethod
143
+ def from_array(cls, arr: list) -> Link:
144
+ """Parse from ComfyUI's array format."""
145
+ return cls(arr[0], arr[1], arr[2], arr[3], arr[4], arr[5])
146
+
147
+ @dataclass
148
+ class Group:
149
+ """Represents a visual grouping of nodes."""
150
+ id: int
151
+ title: str
152
+ bounding: tuple[float, float, float, float] # [x, y, width, height]
153
+ color: str
154
+ font_size: int = 24
155
+ flags: dict[str, Any] = field(default_factory=dict)
156
+
157
+ @dataclass
158
+ class Workflow:
159
+ """Complete parsed workflow representation."""
160
+
161
+ # Core data
162
+ nodes: dict[str, WorkflowNode] # Keep as dict for easier access
163
+
164
+ # Graph structure
165
+ links: list[Link] = field(default_factory=list)
166
+ groups: list[Group] = field(default_factory=list)
167
+
168
+ # Metadata (exactly as in your examples)
169
+ id: str | None = None
170
+ revision: int = 0
171
+ last_node_id: int | None = None
172
+ last_link_id: int | None = None
173
+ version: float | None = None
174
+
175
+ # Flexible containers (don't break these out into separate fields!)
176
+ config: dict[str, Any] = field(default_factory=dict)
177
+ extra: dict[str, Any] = field(default_factory=dict)
178
+
179
+ # Subgraph reconstruction metadata (private)
180
+ _subgraph_metadata: dict[str, dict[str, Any]] = field(default_factory=dict, repr=False)
181
+
182
+ def __repr__(self) -> str:
183
+ """Concise representation showing node count and types."""
184
+ node_count = len(self.nodes)
185
+ type_summary = ", ".join(sorted(set(n.type for n in self.nodes.values()))[:5])
186
+ if len(self.node_types) > 5:
187
+ type_summary += f", ... ({len(self.node_types) - 5} more types)"
188
+ return f"Workflow(nodes={node_count}, types=[{type_summary}])"
189
+
190
+ @cached_property
191
+ def node_types(self) -> set[str]:
192
+ return {node.type for node in self.nodes.values()}
193
+
194
+ @classmethod
195
+ def from_json(cls, data: dict) -> Workflow:
196
+ """Parse from ComfyUI workflow JSON.
197
+
198
+ Supports subgraphs (ComfyUI v1.24.3+): nodes inside subgraphs are extracted
199
+ and flattened, while UUID-based subgraph references are filtered out.
200
+
201
+ Stores metadata needed to reconstruct original structure in to_json().
202
+ """
203
+ # Build set of subgraph IDs for filtering UUID references
204
+ subgraph_ids = set()
205
+ if 'definitions' in data and 'subgraphs' in data['definitions']:
206
+ subgraph_ids = {sg['id'] for sg in data['definitions']['subgraphs']}
207
+
208
+ nodes = {}
209
+ subgraph_metadata = {}
210
+
211
+ # Parse top-level nodes (skip subgraph references but remember them)
212
+ top_level_uuid_refs = []
213
+ if isinstance(data.get('nodes'), list):
214
+ for node in data['nodes']:
215
+ node_type = node.get('type') or node.get('class_type') or ''
216
+ if node_type in subgraph_ids or is_uuid(node_type):
217
+ # Store UUID reference node for reconstruction
218
+ top_level_uuid_refs.append(node)
219
+ else:
220
+ nodes[str(node['id'])] = WorkflowNode.from_dict(node)
221
+ else:
222
+ for k, v in data.get('nodes', {}).items():
223
+ node_type = v.get('type') or v.get('class_type') or ''
224
+ if node_type in subgraph_ids or is_uuid(node_type):
225
+ top_level_uuid_refs.append(v)
226
+ else:
227
+ nodes[k] = WorkflowNode.from_dict(v)
228
+
229
+ # Parse subgraph nodes (flatten all subgraphs) + capture ALL metadata for lossless round-trip
230
+ if 'definitions' in data and 'subgraphs' in data['definitions']:
231
+ for subgraph in data['definitions']['subgraphs']:
232
+ subgraph_id = subgraph['id']
233
+
234
+ # Capture complete subgraph structure for round-trip preservation
235
+ # Required for ComfyUI Zod schema compliance
236
+ subgraph_metadata[subgraph_id] = {
237
+ # Core identity
238
+ 'name': subgraph.get('name', ''),
239
+
240
+ # Schema-required fields
241
+ 'version': subgraph.get('version', 1),
242
+ 'revision': subgraph.get('revision', 0),
243
+ 'state': subgraph.get('state', {}),
244
+ 'config': subgraph.get('config', {}),
245
+
246
+ # I/O structure
247
+ 'inputNode': subgraph.get('inputNode'),
248
+ 'outputNode': subgraph.get('outputNode'),
249
+ 'inputs': subgraph.get('inputs', []),
250
+ 'outputs': subgraph.get('outputs', []),
251
+ 'widgets': subgraph.get('widgets', []),
252
+
253
+ # Graph structure (nodes/links handled separately)
254
+ 'links': subgraph.get('links', []),
255
+ 'groups': subgraph.get('groups', []),
256
+
257
+ # Optional metadata
258
+ 'extra': subgraph.get('extra', {}),
259
+
260
+ # Internal tracking (not serialized as-is)
261
+ 'node_ids': [], # Will be populated below
262
+ 'uuid_refs': [] # Nested subgraph references within this subgraph
263
+ }
264
+
265
+ for node in subgraph.get('nodes', []):
266
+ node_type = node.get('type') or node.get('class_type') or ''
267
+ node_id = str(node.get('id', 'unknown'))
268
+
269
+ # Check if this is a nested subgraph reference
270
+ if node_type in subgraph_ids or is_uuid(node_type):
271
+ # Store nested UUID reference for reconstruction
272
+ subgraph_metadata[subgraph_id]['uuid_refs'].append(node)
273
+ else:
274
+ # Real node - flatten it
275
+ scoped_id = f"{subgraph_id}:{node_id}"
276
+ nodes[scoped_id] = WorkflowNode.from_dict(node, subgraph_id=subgraph_id)
277
+ subgraph_metadata[subgraph_id]['node_ids'].append(scoped_id)
278
+
279
+ # Parse links from arrays
280
+ links = [Link.from_array(link) for link in data.get('links', [])]
281
+
282
+ # Parse groups (if present)
283
+ groups = [Group(**group) for group in data.get('groups', [])]
284
+
285
+ # Store top-level UUID refs in metadata for reconstruction
286
+ if top_level_uuid_refs:
287
+ for ref in top_level_uuid_refs:
288
+ sg_id = ref.get('type')
289
+ if sg_id in subgraph_metadata:
290
+ subgraph_metadata[sg_id]['top_level_ref'] = ref
291
+
292
+ # DO NOT store definitions in extra - we'll reconstruct it in to_json()
293
+ extra = data.get('extra', {}).copy()
294
+
295
+ return cls(
296
+ nodes=nodes,
297
+ links=links,
298
+ groups=groups,
299
+ id=data.get('id'),
300
+ revision=data.get('revision', 0),
301
+ last_node_id=data.get('last_node_id'),
302
+ last_link_id=data.get('last_link_id'),
303
+ version=data.get('version'),
304
+ config=data.get('config', {}),
305
+ extra=extra,
306
+ _subgraph_metadata=subgraph_metadata
307
+ )
308
+
309
+ def to_json(self) -> dict:
310
+ """Convert back to ComfyUI workflow format.
311
+
312
+ Reconstructs original structure with subgraphs if metadata is present.
313
+ """
314
+ # Separate nodes by origin
315
+ top_level_nodes = []
316
+ subgraph_nodes_by_id = {}
317
+
318
+ for scoped_id, node in self.nodes.items():
319
+ if node.subgraph_id is None:
320
+ # Top-level node
321
+ top_level_nodes.append(node.to_dict())
322
+ else:
323
+ # Subgraph node - restore original ID
324
+ if node.subgraph_id not in subgraph_nodes_by_id:
325
+ subgraph_nodes_by_id[node.subgraph_id] = []
326
+
327
+ # Extract original node ID from scoped ID (format: "subgraph-uuid:10")
328
+ original_id = scoped_id.split(':', 1)[1] if ':' in scoped_id else scoped_id
329
+ node_dict = node.to_dict()
330
+ node_dict['id'] = int(original_id) if original_id.isdigit() else original_id
331
+ subgraph_nodes_by_id[node.subgraph_id].append(node_dict)
332
+
333
+ # Build result
334
+ result = {
335
+ 'id': self.id,
336
+ 'revision': self.revision,
337
+ 'last_node_id': self.last_node_id,
338
+ 'last_link_id': self.last_link_id,
339
+ 'links': [link.to_array() for link in self.links],
340
+ 'groups': [asdict(group) for group in self.groups],
341
+ 'config': self.config,
342
+ 'version': self.version
343
+ }
344
+
345
+ # Reconstruct subgraphs if metadata exists
346
+ if self._subgraph_metadata:
347
+ definitions = {'subgraphs': []}
348
+
349
+ for sg_id, metadata in self._subgraph_metadata.items():
350
+ # Get nodes for this subgraph
351
+ sg_nodes = subgraph_nodes_by_id.get(sg_id, [])
352
+
353
+ # Add nested UUID references back
354
+ if metadata.get('uuid_refs'):
355
+ sg_nodes.extend(metadata['uuid_refs'])
356
+
357
+ # Reconstruct complete subgraph structure with all fields
358
+ subgraph_dict = {
359
+ 'id': sg_id,
360
+ 'version': metadata.get('version', 1),
361
+ 'state': metadata.get('state', {}),
362
+ 'revision': metadata.get('revision', 0),
363
+ 'config': metadata.get('config', {}),
364
+ 'name': metadata.get('name', ''),
365
+ 'inputNode': metadata.get('inputNode'),
366
+ 'outputNode': metadata.get('outputNode'),
367
+ 'inputs': metadata.get('inputs', []),
368
+ 'outputs': metadata.get('outputs', []),
369
+ 'widgets': metadata.get('widgets', []),
370
+ 'nodes': sg_nodes,
371
+ 'groups': metadata.get('groups', []),
372
+ 'links': metadata.get('links', []),
373
+ 'extra': metadata.get('extra', {})
374
+ }
375
+ definitions['subgraphs'].append(subgraph_dict)
376
+
377
+ # Add definitions to top level
378
+ result['definitions'] = definitions
379
+
380
+ # Add top-level UUID reference nodes
381
+ for sg_id, metadata in self._subgraph_metadata.items():
382
+ if 'top_level_ref' in metadata:
383
+ top_level_nodes.append(metadata['top_level_ref'])
384
+
385
+ # Add nodes to result
386
+ result['nodes'] = top_level_nodes
387
+
388
+ # Add extra (without definitions, since it's at top level now)
389
+ result['extra'] = self.extra
390
+
391
+ return result
392
+
393
+ @dataclass
394
+ class NodeInput:
395
+ """Represents a node input definition."""
396
+ name: str
397
+ type: str
398
+ link: int | None = None
399
+ localized_name: str | None = None
400
+ widget: dict[str, Any] | None = None
401
+ shape: int | None = None
402
+ slot_index: int | None = None
403
+
404
+ def to_dict(self) -> dict[str, Any]:
405
+ """Convert to dict format."""
406
+ result: dict = {
407
+ 'name': self.name,
408
+ 'type': self.type
409
+ }
410
+ if self.link is not None:
411
+ result['link'] = self.link
412
+ if self.localized_name is not None:
413
+ result['localized_name'] = self.localized_name
414
+ if self.widget is not None:
415
+ result['widget'] = self.widget
416
+ if self.shape is not None:
417
+ result['shape'] = self.shape
418
+ if self.slot_index is not None:
419
+ result['slot_index'] = self.slot_index
420
+ return result
421
+
422
+
423
+ @dataclass
424
+ class NodeOutput:
425
+ """Represents a node output definition."""
426
+ name: str
427
+ type: str
428
+ links: list[int] | None = None
429
+ localized_name: str | None = None
430
+ slot_index: int | None = None
431
+
432
+ def to_dict(self) -> dict[str, Any]:
433
+ """Convert to dict format."""
434
+ result: dict = {
435
+ 'name': self.name,
436
+ 'type': self.type
437
+ }
438
+ if self.links is not None:
439
+ result['links'] = self.links
440
+ if self.localized_name is not None:
441
+ result['localized_name'] = self.localized_name
442
+ if self.slot_index is not None:
443
+ result['slot_index'] = self.slot_index
444
+ return result
445
+
446
+
447
+ @dataclass
448
+ class WorkflowNode:
449
+ """Complete workflow node with all available data."""
450
+ id: str
451
+ type: str
452
+
453
+ # Core data - dual naming for compatibility
454
+ api_widget_values: dict[str, Any] = field(default_factory=dict) # For convenience/internal use
455
+ widgets_values: list[Any] = field(default_factory=list) # Frontend format
456
+
457
+ # UI positioning
458
+ pos: tuple[float, float] | None = None
459
+ size: tuple[float, float] | None = None
460
+
461
+ # UI state
462
+ flags: dict[str, Any] = field(default_factory=dict)
463
+ order: int | None = None
464
+ mode: int | None = None
465
+ title: str | None = None
466
+ color: str | None = None
467
+ bgcolor: str | None = None
468
+
469
+ # Connections
470
+ inputs: list[NodeInput] = field(default_factory=list)
471
+ outputs: list[NodeOutput] = field(default_factory=list)
472
+
473
+ # Extended properties
474
+ properties: dict[str, Any] = field(default_factory=dict)
475
+
476
+ # Subgraph context (for nodes inside subgraphs)
477
+ subgraph_id: str | None = None
478
+
479
+ def __repr__(self) -> str:
480
+ """Concise representation showing only id and type."""
481
+ return f"WorkflowNode(id={self.id!r}, type={self.type!r})"
482
+
483
+ @property
484
+ def class_type(self) -> str:
485
+ """Alias for API format compatibility."""
486
+ return self.type
487
+
488
+ def to_api_format(self) -> dict:
489
+ """Convert to ComfyUI API format."""
490
+ inputs = {}
491
+
492
+ # Handle connections and widget values
493
+ widget_idx = 0
494
+ for inp in self.inputs:
495
+ if inp.link is not None:
496
+ # Connected input: [source_node_id, output_slot]
497
+ inputs[inp.name] = [str(inp.link), inp.slot_index or 0]
498
+ elif inp.widget and widget_idx < len(self.widgets_values):
499
+ # Widget input: use value from widgets_values array
500
+ inputs[inp.name] = self.widgets_values[widget_idx]
501
+ widget_idx += 1
502
+
503
+ return {
504
+ "class_type": self.type,
505
+ "inputs": inputs
506
+ }
507
+
508
+ @classmethod
509
+ def from_dict(cls, data: dict, subgraph_id: str | None = None) -> WorkflowNode:
510
+ """Parse from workflow node dict.
511
+
512
+ Args:
513
+ data: Node data dict from workflow JSON
514
+ subgraph_id: Optional subgraph ID if node is inside a subgraph
515
+ """
516
+ # Parse inputs
517
+ inputs = []
518
+ raw_inputs = data.get('inputs', [])
519
+ if isinstance(raw_inputs, list):
520
+ for idx, input_data in enumerate(raw_inputs):
521
+ if isinstance(input_data, dict):
522
+ inputs.append(NodeInput(
523
+ name=input_data.get('name', ''),
524
+ type=input_data.get('type', ''),
525
+ link=input_data.get('link'),
526
+ localized_name=input_data.get('localized_name'),
527
+ widget=input_data.get('widget'),
528
+ shape=input_data.get('shape'),
529
+ slot_index=input_data.get('slot_index', idx)
530
+ ))
531
+
532
+ # Parse outputs
533
+ outputs = []
534
+ raw_outputs = data.get('outputs', [])
535
+ if isinstance(raw_outputs, list):
536
+ for idx, output_data in enumerate(raw_outputs):
537
+ if isinstance(output_data, dict):
538
+ outputs.append(NodeOutput(
539
+ name=output_data.get('name', ''),
540
+ type=output_data.get('type', ''),
541
+ links=output_data.get('links'),
542
+ localized_name=output_data.get('localized_name'),
543
+ slot_index=output_data.get('slot_index', idx)
544
+ ))
545
+
546
+ # Parse position and size
547
+ pos = None
548
+ if 'pos' in data and isinstance(data['pos'], list) and len(data['pos']) >= 2:
549
+ pos = (float(data['pos'][0]), float(data['pos'][1]))
550
+
551
+ size = None
552
+ if 'size' in data and isinstance(data['size'], list) and len(data['size']) >= 2:
553
+ size = (float(data['size'][0]), float(data['size'][1]))
554
+
555
+ # Handle dual naming convention for widget values
556
+ widgets_values = data.get('widgets_values', [])
557
+ widget_values = data.get('widget_values', widgets_values)
558
+
559
+ return cls(
560
+ id=str(data.get('id', 'unknown')),
561
+ type=data.get('type') or data.get('class_type') or '',
562
+ api_widget_values=widget_values,
563
+ widgets_values=widgets_values,
564
+ pos=pos,
565
+ size=size,
566
+ flags=data.get('flags', {}),
567
+ order=data.get('order'),
568
+ mode=data.get('mode'),
569
+ title=data.get('title'),
570
+ color=data.get('color'),
571
+ bgcolor=data.get('bgcolor'),
572
+ inputs=inputs,
573
+ outputs=outputs,
574
+ properties=data.get('properties', {}),
575
+ subgraph_id=subgraph_id
576
+ )
577
+
578
+ def to_dict(self) -> dict[str, Any]:
579
+ """Convert to dict format."""
580
+ result = {
581
+ 'id': int(self.id) if self.id.isdigit() else self.id,
582
+ 'type': self.type,
583
+ 'widgets_values': self.widgets_values,
584
+ 'inputs': [inp.to_dict() for inp in self.inputs],
585
+ 'outputs': [out.to_dict() for out in self.outputs],
586
+ 'properties': self.properties,
587
+ 'flags': self.flags
588
+ }
589
+
590
+ # Add optional fields only if they have values
591
+ if self.pos is not None:
592
+ result['pos'] = list(self.pos)
593
+ if self.size is not None:
594
+ result['size'] = list(self.size)
595
+ if self.order is not None:
596
+ result['order'] = self.order
597
+ if self.mode is not None:
598
+ result['mode'] = self.mode
599
+ if self.title is not None:
600
+ result['title'] = self.title
601
+ if self.color is not None:
602
+ result['color'] = self.color
603
+ if self.bgcolor is not None:
604
+ result['bgcolor'] = self.bgcolor
605
+
606
+ return result
607
+
608
+
609
+ @dataclass
610
+ class InstalledPackageInfo:
611
+ """Information about an already-installed package."""
612
+
613
+ package_id: str
614
+ display_name: Optional[str]
615
+ installed_version: str
616
+ suggested_version: Optional[str] = None
617
+
618
+ @property
619
+ def version_mismatch(self) -> bool:
620
+ """Check if installed version differs from suggested."""
621
+ return bool(self.suggested_version and
622
+ self.installed_version != self.suggested_version)
623
+
624
+ @dataclass(frozen=True)
625
+ class WorkflowNodeWidgetRef:
626
+ """Reference to a widget value in a workflow node."""
627
+ node_id: str
628
+ node_type: str
629
+ widget_index: int
630
+ widget_value: str # Original value from workflow
631
+
632
+ def __eq__(self, value: object) -> bool:
633
+ if isinstance(value, WorkflowNodeWidgetRef):
634
+ return self.node_id == value.node_id and \
635
+ self.node_type == value.node_type and \
636
+ self.widget_index == value.widget_index and \
637
+ self.widget_value == value.widget_value
638
+ return False
639
+
640
+ def __hash__(self) -> int:
641
+ """Hash based on all fields for proper dict/set lookups."""
642
+ return hash((self.node_id, self.node_type, self.widget_index, self.widget_value))
643
+
644
+ @dataclass
645
+ class WorkflowDependencies:
646
+ """Complete workflow dependency analysis results."""
647
+ workflow_name: str
648
+ found_models: list[WorkflowNodeWidgetRef] = field(default_factory=list)
649
+ builtin_nodes: list[WorkflowNode] = field(default_factory=list)
650
+ non_builtin_nodes: list[WorkflowNode] = field(default_factory=list)
651
+
652
+ @property
653
+ def total_models(self) -> int:
654
+ """Total number of model references found."""
655
+ return len(self.found_models) + len(self.found_models)
656
+
657
+ @dataclass
658
+ class ResolvedNodePackage:
659
+ """A potential match for an unknown node."""
660
+ node_type: str
661
+ match_type: str # "exact", "type_only", "fuzzy", "optional", "manual"
662
+ package_id: str | None = None
663
+ package_data: GlobalNodePackage | None = None
664
+ versions: list[str] | None = None
665
+ match_confidence: float = 1.0
666
+ is_optional: bool = False
667
+ rank: int | None = None # Popularity rank from registry (1 = most popular)
668
+
669
+ def __repr__(self) -> str:
670
+ """Concise representation showing resolution details."""
671
+ version_str = f"{len(self.versions)} version(s)" if self.versions else "no versions"
672
+ rank_str = f", rank={self.rank}" if self.rank else ""
673
+ return f"ResolvedNodePackage(package={self.package_id!r}, node={self.node_type!r}, match={self.match_type}, confidence={self.match_confidence:.2f}, {version_str}{rank_str})"
674
+
675
+ @dataclass
676
+ class ResolvedModel:
677
+ """A potential match for a model reference in a workflow"""
678
+ workflow: str # Resolved models are always associated with a workflow
679
+ reference: WorkflowNodeWidgetRef # Reference to the model in the workflow
680
+ resolved_model: ModelWithLocation | None = None
681
+ model_source: str | None = None # path or URL
682
+ is_optional: bool = False
683
+ match_type: str | None = None # "exact", "case_insensitive", "filename", "ambiguous", "not_found", "download_intent"
684
+ match_confidence: float = 1.0 # 1.0 = exact, 0.5 = fuzzy
685
+ target_path: Path | None = None # Where user intends to download model to (for download_intent match_type)
686
+ needs_path_sync: bool = False # True if workflow path differs from resolved path
687
+
688
+ @property
689
+ def name(self) -> str:
690
+ return self.reference.widget_value
691
+
692
+ @property
693
+ def is_resolved(self) -> bool:
694
+ return self.resolved_model is not None or self.model_source is not None
695
+
696
+ @dataclass
697
+ class DownloadResult:
698
+ """Result of a single model download attempt."""
699
+ success: bool
700
+ filename: str
701
+ model: Optional[ModelWithLocation] = None
702
+ error: Optional[str] = None
703
+ reused: bool = False
704
+
705
+ @dataclass
706
+ class ResolutionResult:
707
+ """Result of resolution check or application."""
708
+ workflow_name: str
709
+ nodes_resolved: List[ResolvedNodePackage] = field(default_factory=list) # Nodes resolved/added
710
+ nodes_unresolved: List[WorkflowNode] = field(default_factory=list) # Nodes not found
711
+ nodes_ambiguous: List[List[ResolvedNodePackage]] = field(default_factory=list) # Nodes with multiple matches
712
+ models_resolved: List[ResolvedModel] = field(default_factory=list) # Models resolved (or candidates)
713
+ models_unresolved: List[WorkflowNodeWidgetRef] = field(default_factory=list) # Models not found
714
+ models_ambiguous: List[List[ResolvedModel]] = field(default_factory=list) # Models with multiple matches
715
+ download_results: List[DownloadResult] = field(default_factory=list) # Results from model downloads
716
+
717
+ @property
718
+ def has_issues(self) -> bool:
719
+ """Check if there are any unresolved issues."""
720
+ return bool(
721
+ self.models_unresolved
722
+ or self.models_ambiguous
723
+ or self.nodes_unresolved
724
+ or self.nodes_ambiguous
725
+ )
726
+
727
+ @property
728
+ def has_download_intents(self) -> bool:
729
+ """Check if any models have download intents pending."""
730
+ return any(m.match_type == "download_intent" for m in self.models_resolved)
731
+
732
+ @property
733
+ def summary(self) -> str:
734
+ """Generate summary of resolution."""
735
+ parts = []
736
+ if self.nodes_resolved:
737
+ parts.append(f"{len(self.nodes_resolved)} nodes")
738
+ if self.nodes_unresolved:
739
+ parts.append(f"{len(self.nodes_unresolved)} unresolved nodes")
740
+ if self.nodes_ambiguous:
741
+ parts.append(f"{len(self.nodes_ambiguous)} ambiguous nodes")
742
+ if self.models_resolved:
743
+ parts.append(f"{len(self.models_resolved)} models")
744
+ if self.models_unresolved:
745
+ parts.append(f"{len(self.models_unresolved)} unresolved models")
746
+ if self.models_ambiguous:
747
+ parts.append(f"{len(self.models_ambiguous)} ambiguous models")
748
+
749
+ return f"Resolutions: {', '.join(parts)}" if parts else "No resolutions"
750
+
751
+
752
+ @dataclass
753
+ class CommitAnalysis:
754
+ """Analysis of all workflows for commit."""
755
+ workflows_copied: Dict[str, str] = field(default_factory=dict) # name -> status
756
+ analyses: List[WorkflowDependencies] = field(default_factory=list)
757
+ has_git_changes: bool = False # Whether there are actual git changes to commit
758
+
759
+ @property
760
+ def summary(self) -> str:
761
+ """Generate commit summary."""
762
+ copied_count = len([s for s in self.workflows_copied.values() if s == "copied"])
763
+ if copied_count:
764
+ return f"Update {copied_count} workflow(s)"
765
+ return "Update workflows"
766
+
767
+
768
+ # Status System Dataclasses
769
+
770
+ @dataclass
771
+ class WorkflowSyncStatus:
772
+ """File-level sync status between ComfyUI and .cec."""
773
+ new: list[str] = field(default_factory=list)
774
+ modified: list[str] = field(default_factory=list)
775
+ deleted: list[str] = field(default_factory=list)
776
+ synced: list[str] = field(default_factory=list)
777
+
778
+ @property
779
+ def has_changes(self) -> bool:
780
+ """Check if there are any file changes."""
781
+ return bool(self.new or self.modified or self.deleted)
782
+
783
+ @property
784
+ def is_synced(self) -> bool:
785
+ """Check if all workflows are synced (no pending changes)."""
786
+ return not self.has_changes
787
+
788
+ @property
789
+ def total_count(self) -> int:
790
+ """Total number of workflows."""
791
+ return len(self.new) + len(self.modified) + len(self.deleted) + len(self.synced)
792
+
793
+
794
+ @dataclass
795
+ class WorkflowAnalysisStatus:
796
+ """Complete analysis for a single workflow including dependencies and resolution."""
797
+ name: str
798
+ sync_state: str # "new", "modified", "deleted", "synced"
799
+
800
+ # Analysis results
801
+ dependencies: WorkflowDependencies
802
+ resolution: ResolutionResult
803
+
804
+ # Installation status (for CLI display without pyproject access)
805
+ uninstalled_nodes: list[str] = field(default_factory=list) # Node IDs needing installation
806
+
807
+ @property
808
+ def has_issues(self) -> bool:
809
+ """Check if workflow has unresolved issues or pending download intents.
810
+
811
+ Note: Path sync issues are NOT included here as they're auto-fixable
812
+ and don't prevent commits. They're tracked separately via has_path_sync_issues.
813
+ """
814
+ has_download_intents = any(
815
+ m.match_type == "download_intent" for m in self.resolution.models_resolved
816
+ )
817
+ return (
818
+ self.resolution.has_issues
819
+ or bool(self.uninstalled_nodes)
820
+ or has_download_intents
821
+ )
822
+
823
+ @property
824
+ def issue_summary(self) -> str:
825
+ """Human-readable summary of issues."""
826
+ parts = []
827
+ if self.resolution.models_ambiguous:
828
+ parts.append(f"{len(self.resolution.models_ambiguous)} ambiguous models")
829
+ if self.resolution.models_unresolved:
830
+ parts.append(f"{len(self.resolution.models_unresolved)} unresolved models")
831
+ if self.resolution.nodes_unresolved:
832
+ parts.append(f"{len(self.resolution.nodes_unresolved)} missing nodes")
833
+ if self.resolution.nodes_ambiguous:
834
+ parts.append(f"{len(self.resolution.nodes_ambiguous)} ambiguous nodes")
835
+
836
+ return ", ".join(parts) if parts else "No issues"
837
+
838
+ @property
839
+ def model_count(self) -> int:
840
+ """Total number of model references."""
841
+ return len(self.dependencies.found_models)
842
+
843
+ @property
844
+ def node_count(self) -> int:
845
+ """Total number of nodes in workflow."""
846
+ return len(self.dependencies.builtin_nodes) + len(self.dependencies.non_builtin_nodes)
847
+
848
+ @property
849
+ def models_resolved_count(self) -> int:
850
+ """Number of successfully resolved models."""
851
+ return len(self.resolution.models_resolved)
852
+
853
+ @property
854
+ def nodes_resolved_count(self) -> int:
855
+ """Number of successfully resolved nodes."""
856
+ return len(self.resolution.nodes_resolved)
857
+
858
+ @property
859
+ def uninstalled_count(self) -> int:
860
+ """Number of nodes that need installation."""
861
+ return len(self.uninstalled_nodes)
862
+
863
+ @property
864
+ def download_intents_count(self) -> int:
865
+ """Number of models queued for download."""
866
+ return sum(1 for m in self.resolution.models_resolved if m.match_type == "download_intent")
867
+
868
+ @property
869
+ def models_needing_path_sync_count(self) -> int:
870
+ """Number of models that resolved but have wrong paths in workflow JSON."""
871
+ return sum(1 for m in self.resolution.models_resolved if m.needs_path_sync)
872
+
873
+ @property
874
+ def has_path_sync_issues(self) -> bool:
875
+ """Check if workflow has model paths that need syncing."""
876
+ return self.models_needing_path_sync_count > 0
877
+
878
+
879
+ @dataclass
880
+ class DetailedWorkflowStatus:
881
+ """Complete status for all workflows in environment."""
882
+ sync_status: WorkflowSyncStatus
883
+ analyzed_workflows: list[WorkflowAnalysisStatus] = field(default_factory=list)
884
+
885
+ @property
886
+ def total_issues(self) -> int:
887
+ """Count of workflows with issues."""
888
+ return sum(1 for w in self.analyzed_workflows if w.has_issues)
889
+
890
+ @property
891
+ def workflows_with_issues(self) -> list[WorkflowAnalysisStatus]:
892
+ """List of workflows that have unresolved issues."""
893
+ return [w for w in self.analyzed_workflows if w.has_issues]
894
+
895
+ @property
896
+ def total_unresolved_models(self) -> int:
897
+ """Total count of unresolved/ambiguous models across all workflows."""
898
+ return sum(
899
+ len(w.resolution.models_unresolved) + len(w.resolution.models_ambiguous)
900
+ for w in self.analyzed_workflows
901
+ )
902
+
903
+ @property
904
+ def total_missing_nodes(self) -> int:
905
+ """Total count of missing/ambiguous nodes across all workflows."""
906
+ return sum(
907
+ len(w.resolution.nodes_unresolved) + len(w.resolution.nodes_ambiguous)
908
+ for w in self.analyzed_workflows
909
+ )
910
+
911
+ @property
912
+ def is_commit_safe(self) -> bool:
913
+ """Check if safe to commit without issues."""
914
+ return not any(w.has_issues for w in self.analyzed_workflows)