comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,145 @@
1
+ """Input signature utilities for node version resolution."""
2
+ from __future__ import annotations
3
+
4
+ import hashlib
5
+ import json
6
+ from typing import TYPE_CHECKING, Any, Dict, List
7
+
8
+ if TYPE_CHECKING:
9
+ from comfygit_core.models.workflow import NodeInput
10
+
11
+ from ..logging.logging_config import get_logger
12
+
13
+ logger = get_logger(__name__)
14
+
15
+
16
+ def normalize_registry_inputs(input_types_json: str) -> str:
17
+ """Normalize input types from registry metadata.
18
+
19
+ Args:
20
+ input_types_json: JSON string like '{"required":{"mask":["MASK"],"scale":["FLOAT",{"default":1}]}}'
21
+
22
+ Returns:
23
+ Canonical input signature string
24
+ """
25
+ try:
26
+ parsed = json.loads(input_types_json)
27
+ normalized = {}
28
+
29
+ # Process required and optional inputs
30
+ for category in ["required", "optional"]:
31
+ if category in parsed:
32
+ for name, type_info in parsed[category].items():
33
+ # type_info patterns:
34
+ # - ["TYPE"] or ["TYPE", {...constraints}] for simple types
35
+ # - [[...options]] or [[...options], {...}] for COMBO/choice fields
36
+ if isinstance(type_info, list) and len(type_info) > 0:
37
+ first_elem = type_info[0]
38
+ # Check if first element is a list (COMBO type)
39
+ if isinstance(first_elem, list):
40
+ normalized[name] = "COMBO"
41
+ elif isinstance(first_elem, str):
42
+ normalized[name] = first_elem
43
+ else:
44
+ logger.warning(f"Unexpected first element type for {name}: {type(first_elem)}")
45
+ continue
46
+ elif isinstance(type_info, str):
47
+ normalized[name] = type_info
48
+ else:
49
+ logger.warning(f"Unexpected type_info format for {name}: {type_info}")
50
+ continue
51
+
52
+ return _create_canonical_signature(normalized)
53
+
54
+ except (json.JSONDecodeError, KeyError, TypeError) as e:
55
+ logger.warning(f"Failed to parse registry input types: {e}")
56
+ return ""
57
+
58
+
59
+ def normalize_workflow_inputs(inputs: List[NodeInput]) -> str:
60
+ """Normalize input types from workflow node definition.
61
+
62
+ Args:
63
+ inputs: List of NodeInput dataclass instances with name and type attributes
64
+
65
+ Returns:
66
+ Canonical input signature string
67
+ """
68
+ normalized = {}
69
+
70
+ for input_def in inputs:
71
+ # Handle NodeInput dataclass (has attributes) or dict (has keys)
72
+ if hasattr(input_def, 'name') and hasattr(input_def, 'type'):
73
+ # NodeInput dataclass
74
+ name = input_def.name
75
+ input_type = input_def.type
76
+ elif isinstance(input_def, dict):
77
+ # Legacy dict format (for backwards compatibility)
78
+ name = input_def.get('name')
79
+ input_type = input_def.get('type')
80
+ else:
81
+ logger.warning(f"Unexpected input format: {type(input_def)}")
82
+ continue
83
+
84
+ if name and input_type:
85
+ normalized[name] = input_type
86
+
87
+ return _create_canonical_signature(normalized)
88
+
89
+
90
+ def _create_canonical_signature(inputs: Dict[str, str]) -> str:
91
+ """Create canonical signature from normalized inputs.
92
+
93
+ Args:
94
+ inputs: Dictionary of {input_name: input_type}
95
+
96
+ Returns:
97
+ Canonical signature string
98
+ """
99
+ if not inputs:
100
+ return ""
101
+
102
+ # Sort by name for deterministic ordering
103
+ sorted_inputs = sorted(inputs.items())
104
+
105
+ # Create canonical string: "name1:TYPE1|name2:TYPE2"
106
+ canonical = "|".join([f"{name}:{type_}" for name, type_ in sorted_inputs])
107
+
108
+ return canonical
109
+
110
+
111
+ def hash_signature(signature: str) -> str:
112
+ """Create short hash of input signature.
113
+
114
+ Args:
115
+ signature: Canonical signature string
116
+
117
+ Returns:
118
+ 8-character hash
119
+ """
120
+ if not signature:
121
+ return "_" # Special marker for empty/unknown signatures
122
+
123
+ return hashlib.sha1(signature.encode()).hexdigest()[:8]
124
+
125
+
126
+ def create_node_key(node_type: str, inputs_signature: str) -> str:
127
+ """Create compound key for node lookup.
128
+
129
+ Args:
130
+ node_type: Node class type name
131
+ inputs_signature: Canonical input signature or hash
132
+
133
+ Returns:
134
+ Compound key like "NodeType::hash1234"
135
+ """
136
+ if not inputs_signature or inputs_signature == "_":
137
+ return f"{node_type}::_"
138
+
139
+ # If signature is already a hash (8 chars), use it. Otherwise hash it.
140
+ if len(inputs_signature) == 8 and all(c in '0123456789abcdef' for c in inputs_signature):
141
+ hash_part = inputs_signature
142
+ else:
143
+ hash_part = hash_signature(inputs_signature)
144
+
145
+ return f"{node_type}::{hash_part}"
@@ -0,0 +1,52 @@
1
+ """Utility functions for determining model categories from filesystem paths."""
2
+
3
+ from pathlib import Path
4
+
5
+ from ..configs.comfyui_models import COMFYUI_MODELS_CONFIG
6
+
7
+
8
+ def get_model_category(relative_path: str) -> str:
9
+ """Determine model category from relative path.
10
+
11
+ Extracts the first directory component from the relative path and checks
12
+ if it matches a standard ComfyUI model directory. If not found in the
13
+ standard directories, returns 'unknown' to indicate a custom directory.
14
+
15
+ Args:
16
+ relative_path: Path relative to models directory (e.g., "checkpoints/sd_xl.safetensors")
17
+
18
+ Returns:
19
+ Category name (e.g., "checkpoints", "loras", "vae") or "unknown"
20
+
21
+ Examples:
22
+ >>> get_model_category("checkpoints/sd_xl_base.safetensors")
23
+ 'checkpoints'
24
+ >>> get_model_category("loras/detail_tweaker.safetensors")
25
+ 'loras'
26
+ >>> get_model_category("custom_nodes/my-node/models/special.pt")
27
+ 'unknown'
28
+ >>> get_model_category("model.safetensors")
29
+ 'unknown'
30
+ """
31
+ if not relative_path:
32
+ return "unknown"
33
+
34
+ # Normalize path and extract first component
35
+ normalized_path = Path(relative_path).as_posix()
36
+ parts = normalized_path.split('/')
37
+
38
+ if not parts or not parts[0]:
39
+ return "unknown"
40
+
41
+ # Get first directory component (lowercase for case-insensitive matching)
42
+ first_dir = parts[0].lower()
43
+
44
+ # Check against standard directories
45
+ standard_dirs = COMFYUI_MODELS_CONFIG.get('standard_directories', [])
46
+
47
+ # Case-insensitive match
48
+ for std_dir in standard_dirs:
49
+ if first_dir == std_dir.lower():
50
+ return std_dir
51
+
52
+ return "unknown"
@@ -0,0 +1,71 @@
1
+ """PyTorch-specific utilities for backend detection and index URL generation."""
2
+
3
+ from ..constants import PYTORCH_INDEX_BASE_URL
4
+
5
+
6
+ def get_pytorch_index_url(backend: str) -> str:
7
+ """Generate PyTorch index URL for any backend.
8
+
9
+ PyTorch uses a consistent URL pattern for all backends:
10
+ https://download.pytorch.org/whl/{backend}
11
+
12
+ This works for:
13
+ - CPU: cpu
14
+ - CUDA: cu118, cu121, cu124, cu126, cu128, cu130, etc.
15
+ - ROCm: rocm6.2, rocm6.3, rocm6.4, etc.
16
+ - Intel XPU: xpu
17
+
18
+ Args:
19
+ backend: Backend identifier (e.g., 'cu128', 'rocm6.3', 'cpu')
20
+
21
+ Returns:
22
+ Full index URL for the backend
23
+
24
+ Examples:
25
+ >>> get_pytorch_index_url("cu128")
26
+ 'https://download.pytorch.org/whl/cu128'
27
+ >>> get_pytorch_index_url("rocm6.3")
28
+ 'https://download.pytorch.org/whl/rocm6.3'
29
+ """
30
+ return f"{PYTORCH_INDEX_BASE_URL}/{backend}"
31
+
32
+
33
+ def extract_backend_from_version(version: str) -> str | None:
34
+ """Extract backend from PyTorch version string.
35
+
36
+ PyTorch versions with specific backends use the format:
37
+ {version}+{backend} (e.g., '2.9.0+cu128')
38
+
39
+ CPU-only builds may omit the backend suffix on some platforms.
40
+
41
+ Args:
42
+ version: Version string (e.g., '2.9.0+cu128', '2.6.0')
43
+
44
+ Returns:
45
+ Backend string (e.g., 'cu128', 'rocm6.3') or None if no backend suffix
46
+
47
+ Examples:
48
+ >>> extract_backend_from_version("2.9.0+cu128")
49
+ 'cu128'
50
+ >>> extract_backend_from_version("2.9.0")
51
+ None
52
+ """
53
+ if '+' in version:
54
+ return version.split('+')[1]
55
+ return None
56
+
57
+
58
+ def extract_pip_show_package_version(pip_show_output: str) -> str | None:
59
+ """Extract version from pip show output.
60
+
61
+ Args:
62
+ pip_show_output: Output from 'uv pip show package'
63
+
64
+ Returns:
65
+ Version string (e.g., '2.6.0+cu128') or None if not found
66
+ """
67
+ import re
68
+ match = re.search(r'^Version:\s*(.+)$', pip_show_output, re.MULTILINE)
69
+ if match:
70
+ return match.group(1).strip()
71
+ return None
@@ -0,0 +1,211 @@
1
+ """Requirements parsing utilities."""
2
+
3
+ from pathlib import Path
4
+
5
+ import requirements
6
+ import tomlkit
7
+
8
+ from ..logging.logging_config import get_logger
9
+
10
+ logger = get_logger(__name__)
11
+
12
+
13
+ def parse_requirements_file(requirements_path: Path) -> dict[str, list[str]]:
14
+ """Parse a requirements.txt file and return package requirements."""
15
+ parsed_requirements = {}
16
+
17
+ if not requirements_path.exists():
18
+ return parsed_requirements
19
+
20
+ try:
21
+ with open(requirements_path, encoding='utf-8') as f:
22
+ original_lines = f.readlines()
23
+
24
+ # Try iterative parsing to isolate and remove problematic lines
25
+ valid_lines = _get_valid_requirements_lines(original_lines, requirements_path)
26
+
27
+ # Parse the valid lines
28
+ if valid_lines:
29
+ valid_content = '\n'.join(valid_lines)
30
+ try:
31
+ for req in requirements.parse(valid_content):
32
+ # Only process regular package requirements (not VCS, local files, etc.)
33
+ if req.name and req.specifier:
34
+ package_name = req.name.lower()
35
+
36
+ if package_name not in parsed_requirements:
37
+ parsed_requirements[package_name] = []
38
+
39
+ # Convert specs to version constraints
40
+ if req.specs:
41
+ # Join all version specs into a single constraint string
42
+ version_spec = ",".join([f"{op}{ver}" for op, ver in req.specs])
43
+ parsed_requirements[package_name].append(version_spec)
44
+ else:
45
+ parsed_requirements[package_name].append("")
46
+
47
+ elif req.name and not req.specifier:
48
+ # Package without version constraints
49
+ package_name = req.name.lower()
50
+ if package_name not in parsed_requirements:
51
+ parsed_requirements[package_name] = []
52
+ parsed_requirements[package_name].append("")
53
+
54
+ # Skip VCS requirements, local files, etc. - they'll be handled elsewhere
55
+ elif req.vcs or req.local_file or req.uri:
56
+ logger.debug(f"Skipping non-standard requirement: {req.line}")
57
+
58
+ except Exception as e:
59
+ logger.error(f"Failed to parse even filtered requirements from {requirements_path}: {e}")
60
+
61
+ except Exception as e:
62
+ logger.error(f"Error reading {requirements_path}: {e}")
63
+
64
+ return parsed_requirements
65
+
66
+
67
+ def _get_valid_requirements_lines(original_lines: list[str], requirements_path: Path) -> list[str]:
68
+ """
69
+ Iteratively remove problematic lines from requirements until we can parse successfully.
70
+ Returns a list of valid requirement lines.
71
+ """
72
+ # Start with all non-empty, non-comment lines
73
+ candidate_lines = []
74
+ for line in original_lines:
75
+ line = line.strip()
76
+ if line and not line.startswith('#') and not line.startswith('-'):
77
+ candidate_lines.append(line)
78
+
79
+ if not candidate_lines:
80
+ return []
81
+
82
+ max_attempts = len(candidate_lines)
83
+ removed_lines = []
84
+
85
+ for _ in range(max_attempts):
86
+ try:
87
+ # Try to parse current candidate lines
88
+ test_content = '\n'.join(candidate_lines)
89
+ list(requirements.parse_requirements_file(test_content)) # This will raise if there's an error
90
+
91
+ # If we get here, parsing succeeded
92
+ if removed_lines:
93
+ logger.info(f"Successfully parsed {requirements_path} after removing {len(removed_lines)} problematic lines")
94
+ for removed in removed_lines:
95
+ logger.debug(f" Removed: {removed}")
96
+
97
+ return candidate_lines
98
+
99
+ except Exception:
100
+ # Parse failed, try to identify the problematic line
101
+ problematic_line = None
102
+
103
+ # Try to extract line content from error message or find it by testing each line
104
+ for i, line in enumerate(candidate_lines):
105
+ try:
106
+ list(requirements.parse(line))
107
+ except Exception:
108
+ # This line causes an error
109
+ problematic_line = line
110
+ candidate_lines.pop(i)
111
+ removed_lines.append(line)
112
+ logger.warning(f"Removed problematic requirement line: {line}")
113
+ break
114
+
115
+ if problematic_line is None:
116
+ # Couldn't identify the specific line, remove the first line and try again
117
+ if candidate_lines:
118
+ removed_line = candidate_lines.pop(0)
119
+ removed_lines.append(removed_line)
120
+ logger.warning(f"Could not identify specific problematic line, removed: {removed_line}")
121
+ else:
122
+ break
123
+
124
+ # If we exhausted all attempts
125
+ if removed_lines:
126
+ logger.warning(f"Could not parse {requirements_path} even after removing all lines")
127
+
128
+ return candidate_lines
129
+
130
+
131
+ def parse_pyproject_toml(pyproject_path: Path) -> dict | None:
132
+ """Parse pyproject.toml file and extract project information."""
133
+ try:
134
+ with open(pyproject_path, encoding='utf-8') as f:
135
+ data = tomlkit.load(f)
136
+
137
+ # Extract project information
138
+ project_info = {}
139
+
140
+ # Check different possible locations for project metadata
141
+ if 'project' in data:
142
+ project = data['project']
143
+ project_info['name'] = project.get('name', '')
144
+ project_info['version'] = project.get('version', '')
145
+ project_info['description'] = project.get('description', '')
146
+ project_info['authors'] = project.get('authors', [])
147
+ project_info['urls'] = project.get('urls', {})
148
+
149
+ # Also check tool.poetry section (for Poetry projects)
150
+ if 'tool' in data and 'poetry' in data['tool']:
151
+ poetry = data['tool']['poetry']
152
+ if not project_info.get('name'):
153
+ project_info['name'] = poetry.get('name', '')
154
+ if not project_info.get('version'):
155
+ project_info['version'] = poetry.get('version', '')
156
+ if not project_info.get('description'):
157
+ project_info['description'] = poetry.get('description', '')
158
+ if not project_info.get('authors'):
159
+ project_info['authors'] = poetry.get('authors', [])
160
+
161
+ # Extract dependencies if present
162
+ if 'dependencies' in data.get('project', {}):
163
+ project_info['dependencies'] = data['project']['dependencies']
164
+ elif 'tool' in data and 'poetry' in data['tool'] and 'dependencies' in data['tool']['poetry']:
165
+ project_info['dependencies'] = data['tool']['poetry']['dependencies']
166
+
167
+ return project_info if project_info.get('name') else None
168
+
169
+ except Exception as e:
170
+ logger.error(f"Error parsing pyproject.toml: {e}")
171
+ return None
172
+
173
+
174
+ def save_requirements_txt(requirements: dict[str, str], system_info: dict, comfyui_path: Path):
175
+ """Save the resolved requirements to a requirements.txt file."""
176
+ req_path = Path("comfyui_requirements.txt")
177
+
178
+ with open(req_path, 'w', encoding='utf-8') as f:
179
+ f.write("# ComfyUI Migration Requirements\n")
180
+ f.write(f"# Generated from: {comfyui_path}\n")
181
+ f.write(f"# Python version: {system_info.get('python_version')}\n")
182
+ if system_info.get('cuda_version'):
183
+ f.write(f"# CUDA version: {system_info.get('cuda_version')}\n")
184
+ if system_info.get('torch_version'):
185
+ f.write(f"# PyTorch version: {system_info.get('torch_version')}\n")
186
+ f.write("\n")
187
+ f.write("# NOTE: PyTorch packages are handled separately in comfyui_migration.json\n")
188
+ f.write("# Install with: pip install -r comfyui_requirements.txt\n")
189
+ f.write("\n")
190
+
191
+ # Sort packages for consistency
192
+ for package in sorted(requirements.keys()):
193
+ version = requirements[package]
194
+ if version:
195
+ f.write(f"{package}=={version}\n")
196
+ else:
197
+ f.write(f"{package}\n")
198
+
199
+ # Add editable and git requirements at the end
200
+ if system_info.get('editable_installs'):
201
+ f.write("\n# Editable installs\n")
202
+ for install in system_info['editable_installs']:
203
+ f.write(f"{install}\n")
204
+
205
+ if system_info.get('git_requirements'):
206
+ f.write("\n# Git requirements\n")
207
+ for req in system_info['git_requirements']:
208
+ f.write(f"{req}\n")
209
+
210
+ logger.info(f"Requirements saved to {req_path}")
211
+ logger.info(f"Requirements saved to {req_path}")