comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,1321 @@
1
+ """PyprojectManager - Handles all pyproject.toml file operations.
2
+
3
+ This module provides a clean, reusable interface for managing pyproject.toml files,
4
+ especially for UV-based Python projects.
5
+ """
6
+ from __future__ import annotations
7
+
8
+ import hashlib
9
+ import re
10
+ from functools import cached_property
11
+ from pathlib import Path
12
+ from typing import TYPE_CHECKING
13
+
14
+ import tomlkit
15
+ from comfygit_core.models.manifest import ManifestModel, ManifestWorkflowModel
16
+ from tomlkit.exceptions import TOMLKitError
17
+
18
+ from ..logging.logging_config import get_logger
19
+ from ..models.exceptions import CDPyprojectError, CDPyprojectInvalidError, CDPyprojectNotFoundError
20
+
21
+ if TYPE_CHECKING:
22
+ from ..models.shared import NodeInfo
23
+
24
+ from ..utils.dependency_parser import parse_dependency_string
25
+
26
+ logger = get_logger(__name__)
27
+
28
+
29
+ class PyprojectManager:
30
+ """Manages pyproject.toml file operations for Python projects."""
31
+
32
+ # Class-level call counter for tracking total loads across all instances
33
+ _total_load_calls = 0
34
+
35
+ def __init__(self, pyproject_path: Path):
36
+ """Initialize the PyprojectManager.
37
+
38
+ Args:
39
+ pyproject_path: Path to the pyproject.toml file
40
+ """
41
+ self.path = pyproject_path
42
+ self._instance_load_calls = 0 # Instance-level counter
43
+ self._config_cache: dict | None = None
44
+ self._cache_mtime: float | None = None
45
+
46
+ @cached_property
47
+ def dependencies(self) -> DependencyHandler:
48
+ """Get dependency handler."""
49
+ return DependencyHandler(self)
50
+
51
+ @cached_property
52
+ def nodes(self) -> NodeHandler:
53
+ """Get node handler."""
54
+ return NodeHandler(self)
55
+
56
+ @cached_property
57
+ def uv_config(self) -> UVConfigHandler:
58
+ """Get UV configuration handler."""
59
+ return UVConfigHandler(self)
60
+
61
+ @cached_property
62
+ def workflows(self) -> WorkflowHandler:
63
+ """Get workflow handler."""
64
+ return WorkflowHandler(self)
65
+
66
+ @cached_property
67
+ def models(self) -> ModelHandler:
68
+ """Get model handler."""
69
+ return ModelHandler(self)
70
+
71
+ # ===== Core Operations =====
72
+
73
+ def exists(self) -> bool:
74
+ """Check if the pyproject.toml file exists."""
75
+ return self.path.exists()
76
+
77
+ def get_load_stats(self) -> dict:
78
+ """Get statistics about pyproject.toml load operations.
79
+
80
+ Returns:
81
+ Dictionary with load statistics including:
82
+ - instance_loads: Number of loads for this instance
83
+ - total_loads: Total loads across all instances
84
+ """
85
+ return {
86
+ "instance_loads": self._instance_load_calls,
87
+ "total_loads": PyprojectManager._total_load_calls,
88
+ }
89
+
90
+ @classmethod
91
+ def reset_load_stats(cls):
92
+ """Reset class-level load statistics (useful for testing/benchmarking)."""
93
+ cls._total_load_calls = 0
94
+
95
+ def load(self, force_reload: bool = False) -> dict:
96
+ """Load the pyproject.toml file with instance-level caching.
97
+
98
+ Cache is automatically invalidated when the file's mtime changes.
99
+
100
+ Args:
101
+ force_reload: Force reload from disk even if cached
102
+
103
+ Returns:
104
+ The loaded configuration dictionary
105
+
106
+ Raises:
107
+ CDPyprojectNotFoundError: If the file doesn't exist
108
+ CDPyprojectInvalidError: If the file is empty or invalid
109
+ """
110
+ import time
111
+ import traceback
112
+
113
+ if not self.exists():
114
+ raise CDPyprojectNotFoundError(f"pyproject.toml not found at {self.path}")
115
+
116
+ # Check cache validity via mtime
117
+ current_mtime = self.path.stat().st_mtime
118
+
119
+ if (not force_reload and
120
+ self._config_cache is not None and
121
+ self._cache_mtime == current_mtime):
122
+ # Cache hit
123
+ logger.debug("[PYPROJECT CACHE HIT] Using cached config")
124
+ return self._config_cache
125
+
126
+ # Cache miss - load from disk
127
+ PyprojectManager._total_load_calls += 1
128
+ self._instance_load_calls += 1
129
+
130
+ # Get caller info for tracking where loads are coming from
131
+ stack = traceback.extract_stack()
132
+ caller_frame = stack[-2] if len(stack) >= 2 else None
133
+ caller_info = f"{caller_frame.filename}:{caller_frame.lineno} in {caller_frame.name}" if caller_frame else "unknown"
134
+
135
+ # Start timing
136
+ start_time = time.perf_counter()
137
+
138
+ try:
139
+ with open(self.path, encoding='utf-8') as f:
140
+ config = tomlkit.load(f)
141
+ except (OSError, TOMLKitError) as e:
142
+ raise CDPyprojectInvalidError(f"Failed to parse pyproject.toml at {self.path}: {e}")
143
+
144
+ if not config:
145
+ raise CDPyprojectInvalidError(f"pyproject.toml is empty at {self.path}")
146
+
147
+ # Cache the loaded config
148
+ self._config_cache = config
149
+ self._cache_mtime = current_mtime
150
+
151
+ # Calculate elapsed time
152
+ elapsed_ms = (time.perf_counter() - start_time) * 1000
153
+
154
+ # Log with detailed metrics
155
+ logger.debug(
156
+ f"[PYPROJECT LOAD #{self._instance_load_calls}/{PyprojectManager._total_load_calls}] "
157
+ f"Loaded pyproject.toml in {elapsed_ms:.2f}ms | "
158
+ f"Called from: {caller_info}"
159
+ )
160
+
161
+ return config
162
+
163
+
164
+ def save(self, config: dict | None = None) -> None:
165
+ """Save the configuration to pyproject.toml.
166
+
167
+ Automatically invalidates the cache to ensure fresh reads after save.
168
+
169
+ Args:
170
+ config: Configuration to save (uses cache if not provided)
171
+
172
+ Raises:
173
+ CDPyprojectError: If no configuration to save or write fails
174
+ """
175
+ if config is None:
176
+ raise CDPyprojectError("No configuration to save")
177
+
178
+ # Clean up empty sections before saving
179
+ self._cleanup_empty_sections(config)
180
+
181
+ # Ensure proper spacing between major sections
182
+ self._ensure_section_spacing(config)
183
+
184
+ try:
185
+ # Ensure parent directory exists
186
+ self.path.parent.mkdir(parents=True, exist_ok=True)
187
+
188
+ with open(self.path, 'w') as f:
189
+ tomlkit.dump(config, f)
190
+ except OSError as e:
191
+ raise CDPyprojectError(f"Failed to write pyproject.toml to {self.path}: {e}")
192
+
193
+ # Invalidate cache after save to ensure fresh reads
194
+ self._config_cache = None
195
+ self._cache_mtime = None
196
+
197
+ logger.debug(f"Saved pyproject.toml to {self.path}")
198
+
199
+ def reset_lazy_handlers(self):
200
+ """Clear all cached properties to force re-initialization."""
201
+ cached_props = [
202
+ name for name in dir(type(self))
203
+ if isinstance(getattr(type(self), name, None), cached_property)
204
+ ]
205
+ for prop in cached_props:
206
+ if prop in self.__dict__:
207
+ del self.__dict__[prop]
208
+
209
+ # Invalidate cache after save to ensure fresh reads
210
+ self._config_cache = None
211
+ self._cache_mtime = None
212
+
213
+ def _cleanup_empty_sections(self, config: dict) -> None:
214
+ """Recursively remove empty sections from config."""
215
+ def _clean_dict(d: dict) -> bool:
216
+ """Recursively clean dict, return True if dict became empty."""
217
+ keys_to_remove = []
218
+ for key, value in list(d.items()):
219
+ if isinstance(value, dict):
220
+ if _clean_dict(value) or not value:
221
+ keys_to_remove.append(key)
222
+ for key in keys_to_remove:
223
+ del d[key]
224
+ return not d
225
+
226
+ _clean_dict(config)
227
+
228
+ def _ensure_section_spacing(self, config: dict) -> None:
229
+ """Ensure proper spacing between major sections in tool.comfygit.
230
+
231
+ This adds visual separation between:
232
+ - [tool.comfygit] metadata and workflows
233
+ - workflows section and models section
234
+ """
235
+ if 'tool' not in config or 'comfygit' not in config['tool']:
236
+ return
237
+
238
+ comfydock = config['tool']['comfygit']
239
+
240
+ # Track which sections exist
241
+ has_metadata = any(k in comfydock for k in ['comfyui_version', 'python_version', 'manifest_state'])
242
+ has_nodes = 'nodes' in comfydock
243
+ has_workflows = 'workflows' in comfydock
244
+ has_models = 'models' in comfydock
245
+
246
+ # Only rebuild if we have workflows or models (need spacing)
247
+ if not (has_workflows or has_models):
248
+ return
249
+
250
+ # Deep copy sections to strip any accumulated whitespace
251
+ def deep_copy_table(obj):
252
+ """Recursively copy tomlkit objects, preserving special types."""
253
+ if isinstance(obj, dict):
254
+ # Determine if inline table or regular table
255
+ is_inline = hasattr(obj, '__class__') and 'InlineTable' in obj.__class__.__name__
256
+ new_dict = tomlkit.inline_table() if is_inline else tomlkit.table()
257
+ for k, v in obj.items():
258
+ # Skip whitespace items (empty keys)
259
+ if k == '':
260
+ continue
261
+ new_dict[k] = deep_copy_table(v)
262
+ return new_dict
263
+ elif isinstance(obj, list):
264
+ # Check if this is a tomlkit array (preserve inline table items)
265
+ is_tomlkit_array = hasattr(obj, '__class__') and 'Array' in obj.__class__.__name__
266
+ if is_tomlkit_array:
267
+ new_array = tomlkit.array()
268
+ for item in obj:
269
+ # Preserve inline tables inside arrays
270
+ if hasattr(item, '__class__') and 'InlineTable' in item.__class__.__name__:
271
+ new_inline = tomlkit.inline_table()
272
+ for k, v in item.items():
273
+ new_inline[k] = deep_copy_table(v)
274
+ new_array.append(new_inline)
275
+ else:
276
+ new_array.append(deep_copy_table(item))
277
+ return new_array
278
+ else:
279
+ return [deep_copy_table(item) for item in obj]
280
+ else:
281
+ return obj
282
+
283
+ # Create a new table with sections in the correct order
284
+ new_table = tomlkit.table()
285
+
286
+ # Add metadata fields first
287
+ for key in ['comfyui_version', 'python_version', 'manifest_state']:
288
+ if key in comfydock:
289
+ new_table[key] = comfydock[key]
290
+
291
+ # Add nodes if it exists
292
+ if has_nodes:
293
+ new_table['nodes'] = deep_copy_table(comfydock['nodes'])
294
+
295
+ # Add workflows with preceding newline if needed
296
+ if has_workflows:
297
+ if has_metadata or has_nodes:
298
+ new_table.add(tomlkit.nl())
299
+ new_table['workflows'] = deep_copy_table(comfydock['workflows'])
300
+
301
+ # Add models with preceding newline if needed
302
+ if has_models:
303
+ if has_metadata or has_nodes or has_workflows:
304
+ new_table.add(tomlkit.nl())
305
+ new_table['models'] = deep_copy_table(comfydock['models'])
306
+
307
+ # Replace the comfydock table
308
+ config['tool']['comfygit'] = new_table
309
+
310
+ def get_manifest_state(self) -> str:
311
+ """Get the current manifest state.
312
+
313
+ Returns:
314
+ 'local' or 'exportable'
315
+ """
316
+ config = self.load()
317
+ if 'tool' in config and 'comfygit' in config['tool']:
318
+ return config['tool']['comfygit'].get('manifest_state', 'local')
319
+ return 'local'
320
+
321
+ def set_manifest_state(self, state: str) -> None:
322
+ """Set the manifest state.
323
+
324
+ Args:
325
+ state: 'local' or 'exportable'
326
+ """
327
+ if state not in ('local', 'exportable'):
328
+ raise ValueError(f"Invalid manifest state: {state}")
329
+
330
+ config = self.load()
331
+ if 'tool' not in config:
332
+ config['tool'] = {}
333
+ if 'comfygit' not in config['tool']:
334
+ config['tool']['comfygit'] = {}
335
+
336
+ config['tool']['comfygit']['manifest_state'] = state
337
+ self.save(config)
338
+ logger.info(f"Set manifest state to: {state}")
339
+
340
+ def snapshot(self) -> bytes:
341
+ """Capture current pyproject.toml file contents for rollback.
342
+
343
+ Returns:
344
+ Raw file bytes
345
+ """
346
+ return self.path.read_bytes()
347
+
348
+ def restore(self, snapshot: bytes) -> None:
349
+ """Restore pyproject.toml from a snapshot.
350
+
351
+ Args:
352
+ snapshot: Previously captured file bytes from snapshot()
353
+ """
354
+ self.path.write_bytes(snapshot)
355
+ # Reset lazy handlers so they reload from restored state
356
+ self.reset_lazy_handlers()
357
+ logger.debug("Restored pyproject.toml from snapshot")
358
+
359
+
360
+ class BaseHandler:
361
+ """Base handler providing common functionality."""
362
+
363
+ def __init__(self, manager: PyprojectManager):
364
+ self.manager = manager
365
+
366
+ def load(self) -> dict:
367
+ """Load configuration from manager."""
368
+ return self.manager.load()
369
+
370
+ def save(self, config: dict) -> None:
371
+ """Save configuration through manager.
372
+
373
+ Raises:
374
+ CDPyprojectError
375
+ """
376
+ self.manager.save(config)
377
+
378
+ def ensure_section(self, config: dict, *path: str) -> dict:
379
+ """Ensure a nested section exists in config."""
380
+ current = config
381
+ for key in path:
382
+ if key not in current:
383
+ current[key] = tomlkit.table()
384
+ current = current[key]
385
+ return current
386
+
387
+ def clean_empty_sections(self, config: dict, *path: str) -> None:
388
+ """Clean up empty sections by removing them from bottom up."""
389
+ if not path:
390
+ return
391
+
392
+ # Navigate to parent of the last key
393
+ current = config
394
+ for key in path[:-1]:
395
+ if key not in current:
396
+ return
397
+ current = current[key]
398
+
399
+ # Check if the final key exists and is empty
400
+ final_key = path[-1]
401
+ if final_key in current and not current[final_key]:
402
+ del current[final_key]
403
+ # Recursively clean parent if it becomes empty (except top-level sections)
404
+ if len(path) > 2 and not current:
405
+ self.clean_empty_sections(config, *path[:-1])
406
+
407
+
408
+ class DependencyHandler(BaseHandler):
409
+ """Handles dependency groups and analysis."""
410
+
411
+ def get_groups(self) -> dict[str, list[str]]:
412
+ """Get all dependency groups."""
413
+ try:
414
+ config = self.load()
415
+ return config.get('dependency-groups', {})
416
+ except Exception:
417
+ return {}
418
+
419
+ def add_to_group(self, group: str, packages: list[str]) -> None:
420
+ """Add packages to a dependency group."""
421
+ config = self.load()
422
+
423
+ if 'dependency-groups' not in config:
424
+ config['dependency-groups'] = {}
425
+
426
+ if group not in config['dependency-groups']:
427
+ config['dependency-groups'][group] = []
428
+
429
+ group_deps = config['dependency-groups'][group]
430
+ added_count = 0
431
+
432
+ for pkg in packages:
433
+ if pkg not in group_deps:
434
+ group_deps.append(pkg)
435
+ added_count += 1
436
+
437
+ logger.info(f"Added {added_count} packages to group '{group}'")
438
+ self.save(config)
439
+
440
+ def remove_group(self, group: str) -> None:
441
+ """Remove a dependency group."""
442
+ config = self.load()
443
+
444
+ if 'dependency-groups' not in config:
445
+ raise ValueError("No dependency groups found")
446
+
447
+ if group not in config['dependency-groups']:
448
+ raise ValueError(f"Group '{group}' not found")
449
+
450
+ del config['dependency-groups'][group]
451
+ logger.info(f"Removed dependency group: {group}")
452
+ self.save(config)
453
+
454
+ def remove_from_group(self, group: str, packages: list[str]) -> dict[str, list[str]]:
455
+ """Remove specific packages from a dependency group.
456
+
457
+ Matches packages case-insensitively by extracting package names from
458
+ dependency specifications (e.g., "pillow>=9.0.0" matches "pillow").
459
+
460
+ Args:
461
+ group: Dependency group name
462
+ packages: List of package names to remove (without version specs)
463
+
464
+ Returns:
465
+ Dict with 'removed' (list of packages removed) and 'skipped' (list not found)
466
+
467
+ Raises:
468
+ ValueError: If group doesn't exist
469
+ """
470
+ from ..utils.dependency_parser import parse_dependency_string
471
+
472
+ config = self.load()
473
+
474
+ if 'dependency-groups' not in config:
475
+ raise ValueError("No dependency groups found")
476
+
477
+ if group not in config['dependency-groups']:
478
+ raise ValueError(f"Group '{group}' not found")
479
+
480
+ group_deps = config['dependency-groups'][group]
481
+
482
+ # Normalize package names for case-insensitive comparison
483
+ packages_to_remove = {pkg.lower() for pkg in packages}
484
+
485
+ # Track what we remove and skip
486
+ removed = []
487
+ remaining = []
488
+
489
+ for dep in group_deps:
490
+ pkg_name, _ = parse_dependency_string(dep)
491
+ if pkg_name.lower() in packages_to_remove:
492
+ removed.append(pkg_name)
493
+ else:
494
+ remaining.append(dep)
495
+
496
+ # Update or delete the group
497
+ if remaining:
498
+ config['dependency-groups'][group] = remaining
499
+ else:
500
+ # If no packages left, delete the entire group
501
+ del config['dependency-groups'][group]
502
+ logger.info(f"Removed empty dependency group: {group}")
503
+
504
+ # Find skipped packages (requested but not found)
505
+ removed_lower = {pkg.lower() for pkg in removed}
506
+ skipped = [pkg for pkg in packages if pkg.lower() not in removed_lower]
507
+
508
+ if removed:
509
+ logger.info(f"Removed {len(removed)} package(s) from group '{group}'")
510
+
511
+ self.save(config)
512
+
513
+ return {
514
+ 'removed': removed,
515
+ 'skipped': skipped
516
+ }
517
+
518
+
519
+ class UVConfigHandler(BaseHandler):
520
+ """Handles UV-specific configuration."""
521
+
522
+ # System-level sources that should never be auto-removed
523
+ PROTECTED_SOURCES = {'pytorch-cuda', 'pytorch-cpu', 'torch-cpu', 'torch-cuda'}
524
+
525
+ def add_constraint(self, package: str) -> None:
526
+ """Add a constraint dependency to [tool.uv]."""
527
+ config = self.load()
528
+ self.ensure_section(config, 'tool', 'uv')
529
+
530
+ constraints = config['tool']['uv'].get('constraint-dependencies', [])
531
+
532
+ # Extract package name for comparison
533
+ pkg_name = self._extract_package_name(package)
534
+
535
+ # Update existing or add new
536
+ for i, existing in enumerate(constraints):
537
+ if self._extract_package_name(existing) == pkg_name:
538
+ logger.info(f"Updating constraint: {existing} -> {package}")
539
+ constraints[i] = package
540
+ break
541
+ else:
542
+ logger.info(f"Adding constraint: {package}")
543
+ constraints.append(package)
544
+
545
+ config['tool']['uv']['constraint-dependencies'] = constraints
546
+ self.save(config)
547
+
548
+ def remove_constraint(self, package_name: str) -> bool:
549
+ """Remove a constraint dependency from [tool.uv]."""
550
+ config = self.load()
551
+ constraints = config.get('tool', {}).get('uv', {}).get('constraint-dependencies', [])
552
+
553
+ if not constraints:
554
+ return False
555
+
556
+ # Find and remove constraint by package name
557
+ for i, existing in enumerate(constraints):
558
+ if self._extract_package_name(existing) == package_name.lower():
559
+ removed = constraints.pop(i)
560
+ logger.info(f"Removing constraint: {removed}")
561
+ config['tool']['uv']['constraint-dependencies'] = constraints
562
+ self.save(config)
563
+ return True
564
+
565
+ return False
566
+
567
+ def add_index(self, name: str, url: str, explicit: bool = True) -> None:
568
+ """Add an index to [[tool.uv.index]]."""
569
+ config = self.load()
570
+ self.ensure_section(config, 'tool', 'uv')
571
+ indexes = config['tool']['uv'].get('index', [])
572
+
573
+ if not isinstance(indexes, list):
574
+ indexes = [indexes] if indexes else []
575
+
576
+ # Update existing or add new
577
+ for i, existing in enumerate(indexes):
578
+ if existing.get('name') == name:
579
+ logger.info(f"Updating index '{name}'")
580
+ indexes[i] = {'name': name, 'url': url, 'explicit': explicit}
581
+ break
582
+ else:
583
+ logger.info(f"Creating index '{name}'")
584
+ indexes.append({'name': name, 'url': url, 'explicit': explicit})
585
+
586
+ config['tool']['uv']['index'] = indexes
587
+ self.save(config)
588
+
589
+ def add_source(self, package_name: str, source: dict) -> None:
590
+ """Add a source mapping to [tool.uv.sources]."""
591
+ config = self.load()
592
+ self.ensure_section(config, 'tool', 'uv')
593
+
594
+ if 'sources' not in config['tool']['uv']:
595
+ config['tool']['uv']['sources'] = {}
596
+
597
+ config['tool']['uv']['sources'][package_name] = source
598
+ logger.info(f"Added source for '{package_name}': {source}")
599
+ self.save(config)
600
+
601
+ def add_url_sources(self, package_name: str, urls_with_markers: list[dict], group: str | None = None) -> None:
602
+ """Add URL sources with markers to [tool.uv.sources]."""
603
+ config = self.load()
604
+ self.ensure_section(config, 'tool', 'uv')
605
+
606
+ if 'sources' not in config['tool']['uv']:
607
+ config['tool']['uv']['sources'] = {}
608
+
609
+ # Clean up markers
610
+ cleaned_sources = []
611
+ for source in urls_with_markers:
612
+ cleaned_source = {'url': source['url']}
613
+ if source.get('marker'):
614
+ cleaned_marker = source['marker'].replace('\\"', '"').replace("\\'", "'")
615
+ cleaned_source['marker'] = cleaned_marker
616
+ cleaned_sources.append(cleaned_source)
617
+
618
+ # Format sources
619
+ if len(cleaned_sources) > 1:
620
+ config['tool']['uv']['sources'][package_name] = cleaned_sources
621
+ else:
622
+ config['tool']['uv']['sources'][package_name] = cleaned_sources[0]
623
+
624
+ # Add to dependency group if specified
625
+ if group:
626
+ self._add_to_dependency_group(config, group, package_name, urls_with_markers)
627
+
628
+ self.save(config)
629
+
630
+ def get_constraints(self) -> list[str]:
631
+ """Get UV constraint dependencies."""
632
+ try:
633
+ config = self.load()
634
+ return config.get('tool', {}).get('uv', {}).get('constraint-dependencies', [])
635
+ except Exception:
636
+ return []
637
+
638
+ def get_indexes(self) -> list[dict]:
639
+ """Get UV indexes."""
640
+ try:
641
+ config = self.load()
642
+ indexes = config.get('tool', {}).get('uv', {}).get('index', [])
643
+ return indexes if isinstance(indexes, list) else [indexes] if indexes else []
644
+ except Exception:
645
+ return []
646
+
647
+ def get_sources(self) -> dict:
648
+ """Get UV source mappings."""
649
+ try:
650
+ config = self.load()
651
+ return config.get('tool', {}).get('uv', {}).get('sources', {})
652
+ except Exception:
653
+ return {}
654
+
655
+ def get_source_names(self) -> set[str]:
656
+ """Get all UV source package names."""
657
+ return set(self.get_sources().keys())
658
+
659
+ def cleanup_orphaned_sources(self, removed_node_sources: list[str]) -> None:
660
+ """Remove sources that are no longer referenced by any nodes."""
661
+ if not removed_node_sources:
662
+ return
663
+
664
+ config = self.load()
665
+
666
+ # Get all remaining nodes and their sources
667
+ remaining_sources = set()
668
+ if hasattr(self.manager, 'nodes'):
669
+ for node_info in self.manager.nodes.get_existing().values():
670
+ if node_info.dependency_sources:
671
+ remaining_sources.update(node_info.dependency_sources)
672
+
673
+ # Remove orphaned sources (not protected, not used by other nodes)
674
+ sources_removed = False
675
+ for source_name in removed_node_sources:
676
+ if (source_name not in remaining_sources and
677
+ not self._is_protected_source(source_name)):
678
+ self._remove_source(config, source_name)
679
+ sources_removed = True
680
+
681
+ if sources_removed:
682
+ self.save(config)
683
+
684
+ def _is_protected_source(self, source_name: str) -> bool:
685
+ """Check if source should never be auto-removed."""
686
+ return any(protected in source_name.lower() for protected in self.PROTECTED_SOURCES)
687
+
688
+ def _remove_source(self, config: dict, source_name: str) -> None:
689
+ """Remove all source entries for a given package."""
690
+ if 'tool' not in config or 'uv' not in config['tool']:
691
+ return
692
+
693
+ sources = config['tool']['uv'].get('sources', {})
694
+ if source_name in sources:
695
+ del sources[source_name]
696
+ logger.info(f"Removed orphaned source: {source_name}")
697
+
698
+ def _extract_package_name(self, package_spec: str) -> str:
699
+ """Extract package name from a version specification."""
700
+ name, _ = parse_dependency_string(package_spec)
701
+ return name.lower()
702
+
703
+ def _add_to_dependency_group(self, config: dict, group: str, package: str, sources: list[dict]) -> None:
704
+ """Internal helper to add a package to a dependency group with markers."""
705
+ if 'dependency-groups' not in config:
706
+ config['dependency-groups'] = {}
707
+
708
+ if group not in config['dependency-groups']:
709
+ config['dependency-groups'][group] = []
710
+
711
+ group_deps = config['dependency-groups'][group]
712
+
713
+ # Check if package already exists
714
+ pkg_name = self._extract_package_name(package)
715
+ for dep in group_deps:
716
+ if self._extract_package_name(dep) == pkg_name:
717
+ return # Already exists
718
+
719
+ # Add with unique markers
720
+ unique_markers = set()
721
+ for source in sources:
722
+ if source.get('marker'):
723
+ unique_markers.add(source['marker'])
724
+
725
+ if unique_markers:
726
+ for marker in unique_markers:
727
+ entry = f"{package} ; {marker}"
728
+ if entry not in group_deps:
729
+ group_deps.append(entry)
730
+ logger.info(f"Added '{entry}' to group '{group}'")
731
+ else:
732
+ group_deps.append(package)
733
+ logger.info(f"Added '{package}' to group '{group}'")
734
+
735
+
736
+ class NodeHandler(BaseHandler):
737
+ """Handles custom node management."""
738
+
739
+ def add(self, node_info: NodeInfo, node_identifier: str | None) -> None:
740
+ """Add a custom node to the pyproject.toml."""
741
+ config = self.load()
742
+ identifier = node_identifier or (node_info.registry_id if node_info.registry_id else node_info.name)
743
+
744
+ # Only create nodes section when actually adding a node
745
+ self.ensure_section(config, 'tool', 'comfygit', 'nodes')
746
+
747
+ # Build node data, excluding any None values (tomlkit requirement)
748
+ filtered_data = {k: v for k, v in node_info.__dict__.copy().items() if v is not None}
749
+
750
+ # Create a proper tomlkit table for better formatting
751
+ node_table = tomlkit.table()
752
+ for key, value in filtered_data.items():
753
+ node_table[key] = value
754
+
755
+ # Add node to configuration
756
+ config['tool']['comfygit']['nodes'][identifier] = node_table
757
+
758
+ logger.info(f"Added custom node: {identifier}")
759
+ self.save(config)
760
+
761
+ def add_development(self, name: str) -> None:
762
+ """Add a development node (version='dev')."""
763
+ from ..models.shared import NodeInfo
764
+ node_info = NodeInfo(
765
+ name=name,
766
+ version='dev',
767
+ source='development'
768
+ )
769
+ self.add(node_info, name)
770
+
771
+ # def is_development(self, identifier: str) -> bool:
772
+ # """Check if a node is a development node."""
773
+ # nodes = self.get_existing()
774
+ # node = nodes.get(identifier)
775
+ # return node and hasattr(node, 'version') and node.version == 'dev'
776
+
777
+ def get_existing(self) -> dict[str, NodeInfo]:
778
+ """Get all existing custom nodes from pyproject.toml."""
779
+ from ..models.shared import NodeInfo
780
+ config = self.load()
781
+ nodes_data = config.get('tool', {}).get('comfygit', {}).get('nodes', {})
782
+
783
+ result = {}
784
+ for identifier, node_data in nodes_data.items():
785
+ result[identifier] = NodeInfo(
786
+ name=node_data.get('name') or identifier,
787
+ repository=node_data.get('repository'),
788
+ registry_id=node_data.get('registry_id'),
789
+ version=node_data.get('version'),
790
+ source=node_data.get('source', 'unknown'),
791
+ download_url=node_data.get('download_url'),
792
+ dependency_sources=node_data.get('dependency_sources')
793
+ )
794
+
795
+ return result
796
+
797
+ def remove(self, node_identifier: str) -> bool:
798
+ """Remove a custom node and its associated dependency group."""
799
+ config = self.load()
800
+ removed = False
801
+
802
+ # Get existing nodes to find the one to remove
803
+ existing_nodes = self.get_existing()
804
+ if node_identifier not in existing_nodes:
805
+ return False
806
+
807
+ node_info = existing_nodes[node_identifier]
808
+
809
+ # Generate the hash-based group name that was used during add
810
+ fallback_identifier = node_info.registry_id if node_info.registry_id else node_info.name
811
+ group_name = self.generate_group_name(node_info, fallback_identifier)
812
+
813
+ # Remove from dependency-groups using the hash-based group name
814
+ if 'dependency-groups' in config and group_name in config['dependency-groups']:
815
+ del config['dependency-groups'][group_name]
816
+ removed = True
817
+ logger.debug(f"Removed dependency group: {group_name}")
818
+
819
+ # Remove from nodes using the original identifier
820
+ if ('tool' in config and 'comfygit' in config['tool'] and
821
+ 'nodes' in config['tool']['comfygit'] and
822
+ node_identifier in config['tool']['comfygit']['nodes']):
823
+ del config['tool']['comfygit']['nodes'][node_identifier]
824
+ removed = True
825
+ logger.debug(f"Removed node info: {node_identifier}")
826
+
827
+ if removed:
828
+ # Clean up empty sections
829
+ self.clean_empty_sections(config, 'tool', 'comfygit', 'nodes')
830
+ self.save(config)
831
+ logger.info(f"Removed custom node: {node_identifier}")
832
+
833
+ return removed
834
+
835
+ @staticmethod
836
+ def generate_group_name(node_info: NodeInfo, fallback_identifier: str) -> str:
837
+ """Generate a collision-resistant group name for a custom node."""
838
+ # Use node name as base, fallback to identifier
839
+ base_name = node_info.name or fallback_identifier
840
+
841
+ # Normalize the base name (similar to what UV would do)
842
+ normalized = re.sub(r'[^a-z0-9]+', '-', base_name.lower()).strip('-')
843
+
844
+ # Generate hash from repository URL (most unique identifier) or fallback
845
+ hash_source = node_info.repository or fallback_identifier
846
+ hash_digest = hashlib.sha256(hash_source.encode()).hexdigest()[:8]
847
+
848
+ return f"{normalized}-{hash_digest}"
849
+
850
+
851
+ # DevNodeHandler removed - development nodes now handled by NodeHandler with version='dev'
852
+
853
+
854
+ class WorkflowHandler(BaseHandler):
855
+ """Handles workflow model resolutions and tracking."""
856
+
857
+ def get_workflow(self, name: str) -> dict | None:
858
+ """Get a workflow from pyproject.toml."""
859
+ try:
860
+ config = self.load()
861
+ return config.get('tool', {}).get('comfygit', {}).get('workflows', {}).get(name, None)
862
+ except Exception:
863
+ logger.error(f"Failed to load config for workflow: {name}")
864
+ return None
865
+
866
+ def add_workflow(self, name: str) -> None:
867
+ """Add a new workflow to the pyproject.toml."""
868
+ config = self.load()
869
+ self.ensure_section(config, 'tool', 'comfygit', 'workflows')
870
+ config['tool']['comfygit']['workflows'][name] = tomlkit.table()
871
+ config['tool']['comfygit']['workflows'][name]['path'] = f"workflows/{name}.json"
872
+ logger.info(f"Added new workflow: {name}")
873
+ self.save(config)
874
+
875
+ def get_workflow_models(
876
+ self,
877
+ workflow_name: str,
878
+ config: dict | None = None
879
+ ) -> list[ManifestWorkflowModel]:
880
+ """Get all models for a workflow.
881
+
882
+ Args:
883
+ workflow_name: Workflow name
884
+ config: Optional in-memory config for batched reads. If None, loads from disk.
885
+
886
+ Returns:
887
+ List of ManifestWorkflowModel objects (resolved and unresolved)
888
+ """
889
+ try:
890
+ if config is None:
891
+ config = self.load()
892
+ workflow_data = config.get('tool', {}).get('comfygit', {}).get('workflows', {}).get(workflow_name, {})
893
+ models_data = workflow_data.get('models', [])
894
+
895
+ return [ManifestWorkflowModel.from_toml_dict(m) for m in models_data]
896
+ except Exception as e:
897
+ logger.debug(f"Error loading workflow models for '{workflow_name}': {e}")
898
+ return []
899
+
900
+ def set_workflow_models(
901
+ self,
902
+ workflow_name: str,
903
+ models: list[ManifestWorkflowModel],
904
+ config: dict | None = None
905
+ ) -> None:
906
+ """Set all models for a workflow (unified list).
907
+
908
+ Args:
909
+ workflow_name: Workflow name
910
+ models: List of ManifestWorkflowModel objects (resolved and unresolved)
911
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
912
+ """
913
+ is_batch = config is not None
914
+ if not is_batch:
915
+ config = self.load()
916
+
917
+ # Ensure sections exist
918
+ self.ensure_section(config, 'tool', 'comfygit', 'workflows')
919
+
920
+ # Ensure specific workflow exists
921
+ if workflow_name not in config['tool']['comfygit']['workflows']:
922
+ config['tool']['comfygit']['workflows'][workflow_name] = tomlkit.table()
923
+
924
+ # Set workflow path
925
+ if 'path' not in config['tool']['comfygit']['workflows'][workflow_name]:
926
+ config['tool']['comfygit']['workflows'][workflow_name]['path'] = f"workflows/{workflow_name}.json"
927
+
928
+ # Serialize to array of tables
929
+ models_array = []
930
+ for model in models:
931
+ model_dict = model.to_toml_dict()
932
+ # Convert to inline table for compact representation
933
+ models_array.append(model_dict)
934
+
935
+ config['tool']['comfygit']['workflows'][workflow_name]['models'] = models_array
936
+
937
+ if not is_batch:
938
+ self.save(config)
939
+
940
+ logger.debug(f"Set {len(models)} model(s) for workflow '{workflow_name}'")
941
+
942
+ def add_workflow_model(
943
+ self,
944
+ workflow_name: str,
945
+ model: ManifestWorkflowModel
946
+ ) -> None:
947
+ """Add or update a single model in workflow (progressive write).
948
+
949
+ Args:
950
+ workflow_name: Workflow name
951
+ model: ManifestWorkflowModel to add or update
952
+
953
+ Note:
954
+ - If same node reference exists, replaces/upgrades that entry
955
+ - If model with same hash exists, merges nodes
956
+ - Otherwise, appends as new model
957
+ """
958
+ existing = self.get_workflow_models(workflow_name)
959
+
960
+ # Build set of node references in new model
961
+ new_refs = {(n.node_id, n.widget_index) for n in model.nodes}
962
+
963
+ # Check for overlap with existing models
964
+ updated = False
965
+ for i, existing_model in enumerate(existing):
966
+ existing_refs = {(n.node_id, n.widget_index) for n in existing_model.nodes}
967
+
968
+ # If any node references overlap, this is a resolution of an existing entry
969
+ if new_refs & existing_refs:
970
+ if model.hash:
971
+ # Resolved version replaces unresolved
972
+ existing[i] = model
973
+ logger.debug(f"Replaced unresolved model '{existing_model.filename}' with resolved '{model.filename}'")
974
+ else:
975
+ # Both unresolved - merge nodes and update mutable fields
976
+ non_overlapping = [n for n in model.nodes if (n.node_id, n.widget_index) not in existing_refs]
977
+ existing_model.nodes.extend(non_overlapping)
978
+ existing_model.criticality = model.criticality
979
+ existing_model.status = model.status
980
+ # Update download intent fields if present
981
+ if model.sources:
982
+ existing_model.sources = model.sources
983
+ if model.relative_path:
984
+ existing_model.relative_path = model.relative_path
985
+ logger.debug(f"Updated unresolved model '{existing_model.filename}' with {len(non_overlapping)} new ref(s)")
986
+ updated = True
987
+ break
988
+
989
+ # Fallback: hash matching (for models resolved to same file from different nodes)
990
+ elif model.hash and existing_model.hash == model.hash:
991
+ non_overlapping = [n for n in model.nodes if (n.node_id, n.widget_index) not in existing_refs]
992
+ existing_model.nodes.extend(non_overlapping)
993
+ logger.debug(f"Merged {len(non_overlapping)} new node(s) into existing model '{model.filename}'")
994
+ updated = True
995
+ break
996
+
997
+ if not updated:
998
+ # Completely new model
999
+ existing.append(model)
1000
+ logger.debug(f"Added new model '{model.filename}' to workflow '{workflow_name}'")
1001
+
1002
+ self.set_workflow_models(workflow_name, existing)
1003
+
1004
+
1005
+ def get_all_with_resolutions(self) -> dict:
1006
+ """Get all workflows that have model resolutions."""
1007
+ try:
1008
+ config = self.load()
1009
+ return config.get('tool', {}).get('comfygit', {}).get('workflows', {})
1010
+ except Exception:
1011
+ return {}
1012
+
1013
+ def set_node_packs(self, name: str, node_pack_ids: set[str] | None, config: dict | None = None) -> None:
1014
+ """Set node pack references for a workflow.
1015
+
1016
+ Args:
1017
+ name: Workflow name
1018
+ node_pack_ids: List of node pack identifiers (e.g., ["comfyui-akatz-nodes"]) | None which clears node packs
1019
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
1020
+ """
1021
+ is_batch = config is not None
1022
+ if not is_batch:
1023
+ config = self.load()
1024
+
1025
+ self.ensure_section(config, 'tool', 'comfygit', 'workflows', name)
1026
+ if not node_pack_ids:
1027
+ if 'nodes' in config['tool']['comfygit']['workflows'][name]:
1028
+ logger.info(f"Clearing node packs for workflow: {name}")
1029
+ del config['tool']['comfygit']['workflows'][name]['nodes']
1030
+ else:
1031
+ logger.info(f"Set {len(node_pack_ids)} node pack(s) for workflow: {name}")
1032
+ config['tool']['comfygit']['workflows'][name]['nodes'] = sorted(node_pack_ids)
1033
+
1034
+ if not is_batch:
1035
+ self.save(config)
1036
+
1037
+ def clear_workflow_resolutions(self, name: str) -> bool:
1038
+ """Clear model resolutions for a workflow."""
1039
+ config = self.load()
1040
+ workflows = config.get('tool', {}).get('comfygit', {}).get('workflows', {})
1041
+
1042
+ if name not in workflows:
1043
+ return False
1044
+
1045
+ del workflows[name]
1046
+ # Clean up empty sections
1047
+ self.clean_empty_sections(config, 'tool', 'comfygit', 'workflows')
1048
+ self.save(config)
1049
+ logger.info(f"Cleared model resolutions for workflow: {name}")
1050
+ return True
1051
+
1052
+ # === Per-workflow custom_node_map methods ===
1053
+
1054
+ def get_custom_node_map(self, workflow_name: str, config: dict | None = None) -> dict[str, str | bool]:
1055
+ """Get custom_node_map for a specific workflow.
1056
+
1057
+ Args:
1058
+ workflow_name: Name of workflow
1059
+ config: Optional in-memory config for batched reads. If None, loads from disk.
1060
+
1061
+ Returns:
1062
+ Dict mapping node_type -> package_id (or false for optional)
1063
+ """
1064
+ try:
1065
+ if config is None:
1066
+ config = self.load()
1067
+ workflow_data = config.get('tool', {}).get('comfygit', {}).get('workflows', {}).get(workflow_name, {})
1068
+ return workflow_data.get('custom_node_map', {})
1069
+ except Exception:
1070
+ return {}
1071
+
1072
+ def set_custom_node_mapping(self, workflow_name: str, node_type: str, package_id: str | None) -> None:
1073
+ """Set a single custom_node_map entry for a workflow (progressive write).
1074
+
1075
+ Args:
1076
+ workflow_name: Name of workflow
1077
+ node_type: Node type to map
1078
+ package_id: Package ID (or None for optional = false)
1079
+ """
1080
+ config = self.load()
1081
+ self.ensure_section(config, 'tool', 'comfygit', 'workflows', workflow_name)
1082
+
1083
+ # Ensure custom_node_map exists
1084
+ if 'custom_node_map' not in config['tool']['comfygit']['workflows'][workflow_name]:
1085
+ config['tool']['comfygit']['workflows'][workflow_name]['custom_node_map'] = {}
1086
+
1087
+ # Set mapping (false for optional, package_id string for resolved)
1088
+ if package_id is None:
1089
+ config['tool']['comfygit']['workflows'][workflow_name]['custom_node_map'][node_type] = False
1090
+ else:
1091
+ config['tool']['comfygit']['workflows'][workflow_name]['custom_node_map'][node_type] = package_id
1092
+
1093
+ self.save(config)
1094
+ logger.debug(f"Set custom_node_map for workflow '{workflow_name}': {node_type} -> {package_id}")
1095
+
1096
+ def remove_custom_node_mapping(self, workflow_name: str, node_type: str, config: dict | None = None) -> bool:
1097
+ """Remove a single custom_node_map entry for a workflow.
1098
+
1099
+ Args:
1100
+ workflow_name: Name of workflow
1101
+ node_type: Node type to remove
1102
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
1103
+
1104
+ Returns:
1105
+ True if removed, False if not found
1106
+ """
1107
+ is_batch = config is not None
1108
+ if not is_batch:
1109
+ config = self.load()
1110
+
1111
+ workflow_data = config.get('tool', {}).get('comfygit', {}).get('workflows', {}).get(workflow_name, {})
1112
+
1113
+ if 'custom_node_map' not in workflow_data or node_type not in workflow_data['custom_node_map']:
1114
+ return False
1115
+
1116
+ del workflow_data['custom_node_map'][node_type]
1117
+
1118
+ # Clean up empty custom_node_map
1119
+ if not workflow_data['custom_node_map']:
1120
+ del workflow_data['custom_node_map']
1121
+
1122
+ if not is_batch:
1123
+ self.save(config)
1124
+
1125
+ logger.debug(f"Removed custom_node_map entry for workflow '{workflow_name}': {node_type}")
1126
+ return True
1127
+
1128
+ def remove_workflows(self, workflow_names: list[str], config: dict | None = None) -> int:
1129
+ """Remove workflow sections from pyproject.toml.
1130
+
1131
+ Args:
1132
+ workflow_names: List of workflow names to remove
1133
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
1134
+
1135
+ Returns:
1136
+ Number of workflows removed
1137
+ """
1138
+ if not workflow_names:
1139
+ return 0
1140
+
1141
+ is_batch = config is not None
1142
+ if not is_batch:
1143
+ config = self.load()
1144
+
1145
+ workflows = config.get('tool', {}).get('comfygit', {}).get('workflows', {})
1146
+
1147
+ removed_count = 0
1148
+ for name in workflow_names:
1149
+ if name in workflows:
1150
+ del workflows[name]
1151
+ removed_count += 1
1152
+ logger.debug(f"Removed workflow section: {name}")
1153
+
1154
+ if removed_count > 0:
1155
+ # Clean up empty workflows section
1156
+ self.clean_empty_sections(config, 'tool', 'comfygit', 'workflows')
1157
+ if not is_batch:
1158
+ self.save(config)
1159
+ logger.info(f"Removed {removed_count} workflow section(s) from pyproject.toml")
1160
+
1161
+ return removed_count
1162
+
1163
+
1164
+ class ModelHandler(BaseHandler):
1165
+ """Handles global model manifest in pyproject.toml.
1166
+
1167
+ Note: This stores ONLY resolved models with hashes for deduplication.
1168
+ Unresolved models are stored per-workflow only.
1169
+ """
1170
+
1171
+ def add_model(self, model: ManifestModel, config: dict | None = None) -> None:
1172
+ """Add a model to the global manifest.
1173
+
1174
+ If model already exists, merges sources (union of old and new).
1175
+
1176
+ Args:
1177
+ model: ManifestModel object with hash, filename, size, etc.
1178
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
1179
+
1180
+ Raises:
1181
+ CDPyprojectError: If save fails
1182
+ """
1183
+ is_batch = config is not None
1184
+ if not is_batch:
1185
+ config = self.load()
1186
+
1187
+ # Ensure sections exist
1188
+ self.ensure_section(config, "tool", "comfygit", "models")
1189
+
1190
+ # Check if model already exists and merge sources
1191
+ # In batch mode, check in-memory config instead of loading from disk
1192
+ models_section = config.get("tool", {}).get("comfygit", {}).get("models", {})
1193
+ if model.hash in models_section:
1194
+ existing_dict = models_section[model.hash]
1195
+ existing_sources = existing_dict.get('sources', [])
1196
+ model.sources = list(set(existing_sources + model.sources))
1197
+
1198
+ # Serialize to inline table for compact representation
1199
+ model_dict = model.to_toml_dict()
1200
+ model_entry = tomlkit.inline_table()
1201
+ for key, value in model_dict.items():
1202
+ model_entry[key] = value
1203
+
1204
+ config["tool"]["comfygit"]["models"][model.hash] = model_entry
1205
+
1206
+ if not is_batch:
1207
+ self.save(config)
1208
+
1209
+ logger.debug(f"Added model: {model.filename} ({model.hash[:8]}...)")
1210
+
1211
+ def get_all(self) -> list[ManifestModel]:
1212
+ """Get all models in manifest.
1213
+
1214
+ Returns:
1215
+ List of ManifestModel objects
1216
+ """
1217
+ try:
1218
+ config = self.load()
1219
+ models_data = config.get("tool", {}).get("comfygit", {}).get("models", {})
1220
+
1221
+ return [
1222
+ ManifestModel.from_toml_dict(hash_key, data)
1223
+ for hash_key, data in models_data.items()
1224
+ ]
1225
+ except Exception as e:
1226
+ logger.debug(f"Error loading models: {e}")
1227
+ return []
1228
+
1229
+ def get_by_hash(self, model_hash: str) -> ManifestModel | None:
1230
+ """Get a specific model by hash.
1231
+
1232
+ Args:
1233
+ model_hash: Model hash to look up
1234
+
1235
+ Returns:
1236
+ ManifestModel if found, None otherwise
1237
+ """
1238
+ try:
1239
+ config = self.load()
1240
+ models_data = config.get("tool", {}).get("comfygit", {}).get("models", {})
1241
+
1242
+ if model_hash in models_data:
1243
+ return ManifestModel.from_toml_dict(model_hash, models_data[model_hash])
1244
+ return None
1245
+ except Exception as e:
1246
+ logger.warning(f"Error getting model by hash {model_hash}: {e}")
1247
+ return None
1248
+
1249
+ def remove_model(self, model_hash: str) -> bool:
1250
+ """Remove a model from the manifest.
1251
+
1252
+ Args:
1253
+ model_hash: Model hash to remove
1254
+
1255
+ Returns:
1256
+ True if removed, False if not found
1257
+ """
1258
+ config = self.load()
1259
+ models = config.get("tool", {}).get("comfygit", {}).get("models", {})
1260
+
1261
+ if model_hash in models:
1262
+ del models[model_hash]
1263
+ self.save(config)
1264
+ logger.debug(f"Removed model: {model_hash[:8]}...")
1265
+ return True
1266
+
1267
+ return False
1268
+
1269
+ def get_all_model_hashes(self) -> set[str]:
1270
+ """Get all model hashes in manifest.
1271
+
1272
+ Returns:
1273
+ Set of all model hashes
1274
+ """
1275
+ config = self.load()
1276
+ models = config.get("tool", {}).get("comfygit", {}).get("models", {})
1277
+ return set(models.keys())
1278
+
1279
+ def cleanup_orphans(self, config: dict | None = None) -> None:
1280
+ """Remove models from global table that aren't referenced by any workflow.
1281
+
1282
+ This should be called after all workflows have been processed to clean up
1283
+ models that were removed from all workflows.
1284
+
1285
+ Args:
1286
+ config: Optional in-memory config for batched writes. If None, loads and saves immediately.
1287
+ """
1288
+ is_batch = config is not None
1289
+ if not is_batch:
1290
+ config = self.load()
1291
+
1292
+ # Collect all model hashes referenced by ANY workflow
1293
+ # Read from in-memory config instead of loading from disk
1294
+ referenced_hashes = set()
1295
+ all_workflows = config.get('tool', {}).get('comfygit', {}).get('workflows', {})
1296
+
1297
+ for workflow_name, workflow_data in all_workflows.items():
1298
+ workflow_models_data = workflow_data.get('models', [])
1299
+ for model_data in workflow_models_data:
1300
+ # Only track resolved models (unresolved models aren't in global table)
1301
+ if model_data.get('hash') and model_data.get('status') == "resolved":
1302
+ referenced_hashes.add(model_data['hash'])
1303
+
1304
+ # Get all hashes in global models table (from in-memory config)
1305
+ models_section = config.get("tool", {}).get("comfygit", {}).get("models", {})
1306
+ global_hashes = set(models_section.keys())
1307
+
1308
+ # Remove orphans (in global but not referenced)
1309
+ orphaned_hashes = global_hashes - referenced_hashes
1310
+
1311
+ if orphaned_hashes:
1312
+ for model_hash in orphaned_hashes:
1313
+ if model_hash in models_section:
1314
+ del models_section[model_hash]
1315
+ logger.debug(f"Removed orphaned model: {model_hash[:8]}...")
1316
+
1317
+ if not is_batch:
1318
+ self.save(config)
1319
+
1320
+ logger.info(f"Cleaned up {len(orphaned_hashes)} orphaned model(s)")
1321
+