comfygit-core 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfygit_core/analyzers/custom_node_scanner.py +109 -0
- comfygit_core/analyzers/git_change_parser.py +156 -0
- comfygit_core/analyzers/model_scanner.py +318 -0
- comfygit_core/analyzers/node_classifier.py +58 -0
- comfygit_core/analyzers/node_git_analyzer.py +77 -0
- comfygit_core/analyzers/status_scanner.py +362 -0
- comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
- comfygit_core/caching/__init__.py +16 -0
- comfygit_core/caching/api_cache.py +210 -0
- comfygit_core/caching/base.py +212 -0
- comfygit_core/caching/comfyui_cache.py +100 -0
- comfygit_core/caching/custom_node_cache.py +320 -0
- comfygit_core/caching/workflow_cache.py +797 -0
- comfygit_core/clients/__init__.py +4 -0
- comfygit_core/clients/civitai_client.py +412 -0
- comfygit_core/clients/github_client.py +349 -0
- comfygit_core/clients/registry_client.py +230 -0
- comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
- comfygit_core/configs/comfyui_models.py +62 -0
- comfygit_core/configs/model_config.py +151 -0
- comfygit_core/constants.py +82 -0
- comfygit_core/core/environment.py +1635 -0
- comfygit_core/core/workspace.py +898 -0
- comfygit_core/factories/environment_factory.py +419 -0
- comfygit_core/factories/uv_factory.py +61 -0
- comfygit_core/factories/workspace_factory.py +109 -0
- comfygit_core/infrastructure/sqlite_manager.py +156 -0
- comfygit_core/integrations/__init__.py +7 -0
- comfygit_core/integrations/uv_command.py +318 -0
- comfygit_core/logging/logging_config.py +15 -0
- comfygit_core/managers/environment_git_orchestrator.py +316 -0
- comfygit_core/managers/environment_model_manager.py +296 -0
- comfygit_core/managers/export_import_manager.py +116 -0
- comfygit_core/managers/git_manager.py +667 -0
- comfygit_core/managers/model_download_manager.py +252 -0
- comfygit_core/managers/model_symlink_manager.py +166 -0
- comfygit_core/managers/node_manager.py +1378 -0
- comfygit_core/managers/pyproject_manager.py +1321 -0
- comfygit_core/managers/user_content_symlink_manager.py +436 -0
- comfygit_core/managers/uv_project_manager.py +569 -0
- comfygit_core/managers/workflow_manager.py +1944 -0
- comfygit_core/models/civitai.py +432 -0
- comfygit_core/models/commit.py +18 -0
- comfygit_core/models/environment.py +293 -0
- comfygit_core/models/exceptions.py +378 -0
- comfygit_core/models/manifest.py +132 -0
- comfygit_core/models/node_mapping.py +201 -0
- comfygit_core/models/protocols.py +248 -0
- comfygit_core/models/registry.py +63 -0
- comfygit_core/models/shared.py +356 -0
- comfygit_core/models/sync.py +42 -0
- comfygit_core/models/system.py +204 -0
- comfygit_core/models/workflow.py +914 -0
- comfygit_core/models/workspace_config.py +71 -0
- comfygit_core/py.typed +0 -0
- comfygit_core/repositories/migrate_paths.py +49 -0
- comfygit_core/repositories/model_repository.py +958 -0
- comfygit_core/repositories/node_mappings_repository.py +246 -0
- comfygit_core/repositories/workflow_repository.py +57 -0
- comfygit_core/repositories/workspace_config_repository.py +121 -0
- comfygit_core/resolvers/global_node_resolver.py +459 -0
- comfygit_core/resolvers/model_resolver.py +250 -0
- comfygit_core/services/import_analyzer.py +218 -0
- comfygit_core/services/model_downloader.py +422 -0
- comfygit_core/services/node_lookup_service.py +251 -0
- comfygit_core/services/registry_data_manager.py +161 -0
- comfygit_core/strategies/__init__.py +4 -0
- comfygit_core/strategies/auto.py +72 -0
- comfygit_core/strategies/confirmation.py +69 -0
- comfygit_core/utils/comfyui_ops.py +125 -0
- comfygit_core/utils/common.py +164 -0
- comfygit_core/utils/conflict_parser.py +232 -0
- comfygit_core/utils/dependency_parser.py +231 -0
- comfygit_core/utils/download.py +216 -0
- comfygit_core/utils/environment_cleanup.py +111 -0
- comfygit_core/utils/filesystem.py +178 -0
- comfygit_core/utils/git.py +1184 -0
- comfygit_core/utils/input_signature.py +145 -0
- comfygit_core/utils/model_categories.py +52 -0
- comfygit_core/utils/pytorch.py +71 -0
- comfygit_core/utils/requirements.py +211 -0
- comfygit_core/utils/retry.py +242 -0
- comfygit_core/utils/symlink_utils.py +119 -0
- comfygit_core/utils/system_detector.py +258 -0
- comfygit_core/utils/uuid.py +28 -0
- comfygit_core/utils/uv_error_handler.py +158 -0
- comfygit_core/utils/version.py +73 -0
- comfygit_core/utils/workflow_hash.py +90 -0
- comfygit_core/validation/resolution_tester.py +297 -0
- comfygit_core-0.2.0.dist-info/METADATA +939 -0
- comfygit_core-0.2.0.dist-info/RECORD +93 -0
- comfygit_core-0.2.0.dist-info/WHEEL +4 -0
- comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"""Simple custom node scanner for finding dependencies in nodes."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class NodeDependencies:
|
|
9
|
+
"""Dependencies found in a custom node."""
|
|
10
|
+
requirements_file: Path | None = None
|
|
11
|
+
requirements: list[str] | None = None
|
|
12
|
+
pyproject_file: Path | None = None
|
|
13
|
+
has_install_script: bool = False
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class CustomNodeScanner:
|
|
17
|
+
"""Scans custom nodes for dependencies and metadata."""
|
|
18
|
+
|
|
19
|
+
def scan_node(self, node_path: Path) -> NodeDependencies:
|
|
20
|
+
"""Scan a custom node directory for dependencies.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
node_path: Path to the custom node directory
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
NodeDependencies with found requirements
|
|
27
|
+
"""
|
|
28
|
+
result = NodeDependencies()
|
|
29
|
+
|
|
30
|
+
if not node_path.exists() or not node_path.is_dir():
|
|
31
|
+
return result
|
|
32
|
+
|
|
33
|
+
# Look for requirements.txt (primary source)
|
|
34
|
+
req_file = node_path / "requirements.txt"
|
|
35
|
+
if req_file.exists():
|
|
36
|
+
result.requirements_file = req_file
|
|
37
|
+
result.requirements = self._read_requirements(req_file)
|
|
38
|
+
|
|
39
|
+
# Check for pyproject.toml (secondary)
|
|
40
|
+
pyproject_file = node_path / "pyproject.toml"
|
|
41
|
+
if pyproject_file.exists():
|
|
42
|
+
result.pyproject_file = pyproject_file
|
|
43
|
+
|
|
44
|
+
# Check for install scripts
|
|
45
|
+
for script_name in ["install.py", "install.sh", "setup.py"]:
|
|
46
|
+
if (node_path / script_name).exists():
|
|
47
|
+
result.has_install_script = True
|
|
48
|
+
break
|
|
49
|
+
|
|
50
|
+
return result
|
|
51
|
+
|
|
52
|
+
def _read_requirements(self, req_file: Path) -> list[str]:
|
|
53
|
+
"""Read and parse requirements.txt file.
|
|
54
|
+
|
|
55
|
+
Strips inline comments (e.g., 'gdown # comment') to ensure PEP 508 compliance.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
req_file: Path to requirements.txt
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
List of requirement strings with inline comments removed
|
|
62
|
+
"""
|
|
63
|
+
requirements = []
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
with open(req_file, encoding='utf-8') as f:
|
|
67
|
+
for line in f:
|
|
68
|
+
line = line.strip()
|
|
69
|
+
# Skip full-line comments and empty lines
|
|
70
|
+
if line and not line.startswith('#'):
|
|
71
|
+
# Handle -r includes (just note them for now)
|
|
72
|
+
if line.startswith('-r '):
|
|
73
|
+
# TODO: Handle recursive requirements
|
|
74
|
+
continue
|
|
75
|
+
|
|
76
|
+
# Strip inline comments (everything after #)
|
|
77
|
+
# Example: "gdown # supports downloading" -> "gdown"
|
|
78
|
+
if '#' in line:
|
|
79
|
+
line = line.split('#', 1)[0].strip()
|
|
80
|
+
|
|
81
|
+
# Only add if there's content after stripping
|
|
82
|
+
if line:
|
|
83
|
+
requirements.append(line)
|
|
84
|
+
except Exception:
|
|
85
|
+
# Return empty list on any read error
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
return requirements
|
|
89
|
+
|
|
90
|
+
def find_all_requirements(self, node_path: Path) -> list[Path]:
|
|
91
|
+
"""Find all requirements files in a node (including subdirectories).
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
node_path: Path to the custom node directory
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
List of paths to requirements files
|
|
98
|
+
"""
|
|
99
|
+
if not node_path.exists() or not node_path.is_dir():
|
|
100
|
+
return []
|
|
101
|
+
|
|
102
|
+
# Look for requirements files
|
|
103
|
+
patterns = ["requirements*.txt", "requirements/*.txt"]
|
|
104
|
+
req_files = []
|
|
105
|
+
|
|
106
|
+
for pattern in patterns:
|
|
107
|
+
req_files.extend(node_path.glob(pattern))
|
|
108
|
+
|
|
109
|
+
return req_files
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"""Parse git changes in pyproject.toml files to extract what was modified."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import tomlkit
|
|
7
|
+
|
|
8
|
+
from ..logging.logging_config import get_logger
|
|
9
|
+
from ..models.environment import GitStatus
|
|
10
|
+
from ..utils.dependency_parser import compare_dependency_sets, extract_all_dependencies
|
|
11
|
+
from ..utils.git import git_show
|
|
12
|
+
|
|
13
|
+
logger = get_logger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class GitChangeParser:
|
|
17
|
+
"""Parse git changes to pyproject.toml to identify what was modified."""
|
|
18
|
+
|
|
19
|
+
def __init__(self, repo_path: Path):
|
|
20
|
+
"""Initialize the parser.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
repo_path: Path to git repository (.cec directory)
|
|
24
|
+
"""
|
|
25
|
+
self.repo_path = repo_path
|
|
26
|
+
|
|
27
|
+
def parse_changes(self, current_config: dict) -> dict[str, Any]:
|
|
28
|
+
"""Parse git changes comparing HEAD to current.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
current_config: Current pyproject.toml configuration
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Dict with changes categorized by type
|
|
35
|
+
"""
|
|
36
|
+
changes = {
|
|
37
|
+
'nodes_added': [],
|
|
38
|
+
'nodes_removed': [],
|
|
39
|
+
'dependencies_added': [],
|
|
40
|
+
'dependencies_removed': [],
|
|
41
|
+
'dependencies_updated': [],
|
|
42
|
+
'constraints_added': [],
|
|
43
|
+
'constraints_removed': [],
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
# Get the last committed version
|
|
48
|
+
committed_content = git_show(
|
|
49
|
+
self.repo_path,
|
|
50
|
+
"HEAD",
|
|
51
|
+
Path("pyproject.toml"),
|
|
52
|
+
is_text=True
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if not committed_content:
|
|
56
|
+
return changes
|
|
57
|
+
|
|
58
|
+
committed_config = tomlkit.loads(committed_content)
|
|
59
|
+
|
|
60
|
+
# Compare each category
|
|
61
|
+
self._compare_nodes(committed_config, current_config, changes)
|
|
62
|
+
self._compare_dependencies(committed_config, current_config, changes)
|
|
63
|
+
self._compare_constraints(committed_config, current_config, changes)
|
|
64
|
+
|
|
65
|
+
except (ValueError, OSError) as e:
|
|
66
|
+
# No previous commit or file doesn't exist in HEAD
|
|
67
|
+
logger.debug(f"Could not get previous pyproject.toml: {e}")
|
|
68
|
+
# This is fine - might be the first commit
|
|
69
|
+
|
|
70
|
+
return changes
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def update_git_status(self, status: GitStatus, current_config: dict) -> None:
|
|
74
|
+
"""Update a GitStatus object with parsed changes.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
status: GitStatus object to update
|
|
78
|
+
current_config: Current pyproject.toml configuration
|
|
79
|
+
"""
|
|
80
|
+
changes = self.parse_changes(current_config)
|
|
81
|
+
|
|
82
|
+
status.nodes_added = changes['nodes_added']
|
|
83
|
+
status.nodes_removed = changes['nodes_removed']
|
|
84
|
+
status.dependencies_added = changes['dependencies_added']
|
|
85
|
+
status.dependencies_removed = changes['dependencies_removed']
|
|
86
|
+
status.dependencies_updated = changes['dependencies_updated']
|
|
87
|
+
status.constraints_added = changes['constraints_added']
|
|
88
|
+
status.constraints_removed = changes['constraints_removed']
|
|
89
|
+
|
|
90
|
+
def _compare_nodes(self, old_config: dict, new_config: dict, changes: dict) -> None:
|
|
91
|
+
"""Compare custom nodes between configs."""
|
|
92
|
+
old_nodes = old_config.get('tool', {}).get('comfygit', {}).get('nodes', {})
|
|
93
|
+
new_nodes = new_config.get('tool', {}).get('comfygit', {}).get('nodes', {})
|
|
94
|
+
|
|
95
|
+
# Flatten old nodes (handle legacy 'development' section)
|
|
96
|
+
old_flat = self._flatten_nodes(old_nodes)
|
|
97
|
+
new_flat = self._flatten_nodes(new_nodes)
|
|
98
|
+
|
|
99
|
+
old_keys = set(old_flat.keys())
|
|
100
|
+
new_keys = set(new_flat.keys())
|
|
101
|
+
|
|
102
|
+
for key in new_keys - old_keys:
|
|
103
|
+
node_data = new_flat[key]
|
|
104
|
+
node_name = node_data.get('name', key)
|
|
105
|
+
is_development = node_data.get('version') == 'dev'
|
|
106
|
+
changes['nodes_added'].append({
|
|
107
|
+
'name': node_name,
|
|
108
|
+
'is_development': is_development
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
for key in old_keys - new_keys:
|
|
112
|
+
node_data = old_flat[key]
|
|
113
|
+
node_name = node_data.get('name', key)
|
|
114
|
+
is_development = node_data.get('version') == 'dev'
|
|
115
|
+
changes['nodes_removed'].append({
|
|
116
|
+
'name': node_name,
|
|
117
|
+
'is_development': is_development
|
|
118
|
+
})
|
|
119
|
+
|
|
120
|
+
def _flatten_nodes(self, nodes_config: dict) -> dict:
|
|
121
|
+
"""Flatten nodes, handling legacy 'development' section."""
|
|
122
|
+
flat = {}
|
|
123
|
+
for key, value in nodes_config.items():
|
|
124
|
+
if key == 'development' and isinstance(value, dict):
|
|
125
|
+
# Legacy development section - flatten it
|
|
126
|
+
for dev_key, dev_value in value.items():
|
|
127
|
+
if isinstance(dev_value, dict):
|
|
128
|
+
flat[dev_key] = dev_value
|
|
129
|
+
elif isinstance(value, dict) and 'name' in value:
|
|
130
|
+
# Regular node
|
|
131
|
+
flat[key] = value
|
|
132
|
+
return flat
|
|
133
|
+
|
|
134
|
+
def _compare_dependencies(self, old_config: dict, new_config: dict, changes: dict) -> None:
|
|
135
|
+
"""Compare Python dependencies using existing utilities."""
|
|
136
|
+
old_deps = extract_all_dependencies(old_config)
|
|
137
|
+
new_deps = extract_all_dependencies(new_config)
|
|
138
|
+
|
|
139
|
+
dep_changes = compare_dependency_sets(old_deps, new_deps)
|
|
140
|
+
changes['dependencies_added'] = dep_changes.get('added', [])
|
|
141
|
+
changes['dependencies_removed'] = dep_changes.get('removed', [])
|
|
142
|
+
changes['dependencies_updated'] = dep_changes.get('updated', [])
|
|
143
|
+
|
|
144
|
+
def _compare_constraints(self, old_config: dict, new_config: dict, changes: dict) -> None:
|
|
145
|
+
"""Compare UV constraint dependencies."""
|
|
146
|
+
old_constraints = old_config.get('tool', {}).get('uv', {}).get('constraint-dependencies', [])
|
|
147
|
+
new_constraints = new_config.get('tool', {}).get('uv', {}).get('constraint-dependencies', [])
|
|
148
|
+
|
|
149
|
+
old_set = set(old_constraints)
|
|
150
|
+
new_set = set(new_constraints)
|
|
151
|
+
|
|
152
|
+
changes['constraints_added'] = list(new_set - old_set)
|
|
153
|
+
changes['constraints_removed'] = list(old_set - new_set)
|
|
154
|
+
|
|
155
|
+
# Workflow tracking comparison removed - all workflows are auto-managed
|
|
156
|
+
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
"""ModelScanner - Model file discovery, hashing, and indexing operations."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from ..configs.model_config import ModelConfig
|
|
9
|
+
from ..logging.logging_config import get_logger
|
|
10
|
+
from ..models.exceptions import ComfyDockError
|
|
11
|
+
from ..models.shared import ModelInfo
|
|
12
|
+
from typing import TYPE_CHECKING
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from ..repositories.model_repository import ModelRepository
|
|
16
|
+
|
|
17
|
+
logger = get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ModelProcessResult(Enum):
|
|
21
|
+
"""Result of processing a single model file."""
|
|
22
|
+
ADDED = "added"
|
|
23
|
+
UPDATED_PATH = "updated_path"
|
|
24
|
+
SKIPPED_DUPLICATE = "skipped_duplicate"
|
|
25
|
+
SKIPPED_SAME_FILE = "skipped_same"
|
|
26
|
+
COLLISION_RESOLVED = "collision_resolved"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# Extensions that are definitely not model files
|
|
30
|
+
EXCLUDED_EXTENSIONS = {
|
|
31
|
+
'.txt', '.md',
|
|
32
|
+
'.lock', '.gitignore', '.gitattributes',
|
|
33
|
+
'.log', '.html', '.xml',
|
|
34
|
+
'.py', '.js', '.ts', '.sh', '.bat', '.ps1',
|
|
35
|
+
'.jpg', '.jpeg', '.png', '.gif', '.bmp', '.svg',
|
|
36
|
+
'.mp4', '.avi', '.mov', '.webm', '.mp3', '.wav',
|
|
37
|
+
'.zip', '.tar', '.gz', '.rar', '.7z',
|
|
38
|
+
'.pdf', '.doc', '.docx', '.xls', '.xlsx',
|
|
39
|
+
'.DS_Store', '.env',
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Minimum file size in bytes
|
|
43
|
+
MIN_MODEL_SIZE = 8
|
|
44
|
+
|
|
45
|
+
# Minimum file size for model files (8 bytes)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class ScanResult:
|
|
50
|
+
"""Result of model scanning operation."""
|
|
51
|
+
scanned_count: int
|
|
52
|
+
added_count: int
|
|
53
|
+
updated_count: int
|
|
54
|
+
skipped_count: int
|
|
55
|
+
error_count: int
|
|
56
|
+
errors: list[str]
|
|
57
|
+
removed_count: int = 0
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class ModelScanProgress:
|
|
61
|
+
"""Callback protocol for model scan progress updates."""
|
|
62
|
+
|
|
63
|
+
def on_scan_start(self, total_files: int) -> None:
|
|
64
|
+
"""Called when scan starts with total file count."""
|
|
65
|
+
pass
|
|
66
|
+
|
|
67
|
+
def on_file_processed(self, current: int, total: int, filename: str) -> None:
|
|
68
|
+
"""Called after each file is processed."""
|
|
69
|
+
pass
|
|
70
|
+
|
|
71
|
+
def on_scan_complete(self, result: ScanResult) -> None:
|
|
72
|
+
"""Called when scan completes."""
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class ModelScanner:
|
|
77
|
+
"""Model file discovery, hashing, and indexing operations."""
|
|
78
|
+
|
|
79
|
+
def __init__(self, index_manager: ModelRepository, model_config: ModelConfig | None = None):
|
|
80
|
+
"""Initialize ModelScanner.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
index_manager: ModelIndexManager for database operations
|
|
84
|
+
model_config: ModelConfig for extension filtering, loads default if None
|
|
85
|
+
"""
|
|
86
|
+
self.index_manager = index_manager
|
|
87
|
+
self.model_config = model_config or ModelConfig.load()
|
|
88
|
+
self.quiet = False
|
|
89
|
+
|
|
90
|
+
def scan_directory(self, models_dir: Path, quiet: bool = False, progress: ModelScanProgress | None = None) -> ScanResult:
|
|
91
|
+
"""Scan single models directory for all model files.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
models_dir: Path to models directory to scan
|
|
95
|
+
quiet: Suppress logging
|
|
96
|
+
progress: Optional progress callback
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
ScanResult with operation statistics
|
|
100
|
+
"""
|
|
101
|
+
self.quiet = quiet
|
|
102
|
+
|
|
103
|
+
if not models_dir.exists():
|
|
104
|
+
raise ComfyDockError(f"Models directory does not exist: {models_dir}")
|
|
105
|
+
if not models_dir.is_dir():
|
|
106
|
+
raise ComfyDockError(f"Models path is not a directory: {models_dir}")
|
|
107
|
+
|
|
108
|
+
if not self.quiet:
|
|
109
|
+
logger.info(f"Scanning models directory: {models_dir}")
|
|
110
|
+
|
|
111
|
+
# Get existing locations from this directory to check for changes (mtime optimization)
|
|
112
|
+
existing_locations = {loc['relative_path']: loc for loc in self.index_manager.get_all_locations(models_dir)}
|
|
113
|
+
|
|
114
|
+
# Find all potential model files
|
|
115
|
+
model_files = self._find_model_files(models_dir) or []
|
|
116
|
+
|
|
117
|
+
result = ScanResult(len(model_files), 0, 0, 0, 0, [])
|
|
118
|
+
|
|
119
|
+
# Notify progress of scan start
|
|
120
|
+
if progress:
|
|
121
|
+
progress.on_scan_start(len(model_files))
|
|
122
|
+
|
|
123
|
+
# Process each model file
|
|
124
|
+
for idx, file_path in enumerate(model_files, 1):
|
|
125
|
+
try:
|
|
126
|
+
relative_path = str(file_path.relative_to(models_dir))
|
|
127
|
+
file_stat = file_path.stat()
|
|
128
|
+
|
|
129
|
+
# Check if file has changed
|
|
130
|
+
existing = existing_locations.get(relative_path)
|
|
131
|
+
if existing and existing['mtime'] == file_stat.st_mtime:
|
|
132
|
+
result.skipped_count += 1
|
|
133
|
+
if progress:
|
|
134
|
+
progress.on_file_processed(idx, len(model_files), file_path.name)
|
|
135
|
+
continue
|
|
136
|
+
|
|
137
|
+
# Process the model file
|
|
138
|
+
process_result = self._process_model_file(file_path, models_dir)
|
|
139
|
+
self._update_result_counters(result, process_result)
|
|
140
|
+
|
|
141
|
+
# Notify progress after processing
|
|
142
|
+
if progress:
|
|
143
|
+
progress.on_file_processed(idx, len(model_files), file_path.name)
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
error_msg = f"Error processing {file_path}: {e}"
|
|
147
|
+
logger.error(error_msg)
|
|
148
|
+
result.errors.append(error_msg)
|
|
149
|
+
result.error_count += 1
|
|
150
|
+
|
|
151
|
+
# Clean up stale locations
|
|
152
|
+
removed_count = self.index_manager.clean_stale_locations(models_dir)
|
|
153
|
+
result.removed_count = removed_count
|
|
154
|
+
|
|
155
|
+
if removed_count > 0 and not self.quiet:
|
|
156
|
+
logger.info(f"Cleaned up {removed_count} stale locations")
|
|
157
|
+
|
|
158
|
+
if not self.quiet:
|
|
159
|
+
logger.info(f"Scan complete: {result.added_count} added, {result.updated_count} updated, {result.skipped_count} skipped")
|
|
160
|
+
|
|
161
|
+
# Notify progress of completion
|
|
162
|
+
if progress:
|
|
163
|
+
progress.on_scan_complete(result)
|
|
164
|
+
|
|
165
|
+
return result
|
|
166
|
+
|
|
167
|
+
def _process_model_file(self, file_path: Path, models_dir: Path) -> ModelProcessResult:
|
|
168
|
+
"""Process a model file and add it to the index.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
file_path: Path to the model file
|
|
172
|
+
models_dir: Base models directory
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
Result of the processing operation
|
|
176
|
+
"""
|
|
177
|
+
try:
|
|
178
|
+
# Get file info
|
|
179
|
+
file_stat = file_path.stat()
|
|
180
|
+
relative_path = str(file_path.relative_to(models_dir))
|
|
181
|
+
filename = file_path.name
|
|
182
|
+
|
|
183
|
+
# Calculate hash
|
|
184
|
+
short_hash = self.index_manager.calculate_short_hash(file_path)
|
|
185
|
+
|
|
186
|
+
# Check if model already exists
|
|
187
|
+
if self.index_manager.has_model(short_hash):
|
|
188
|
+
# Model exists, just add/update the location
|
|
189
|
+
self.index_manager.add_location(short_hash, models_dir, relative_path, filename, file_stat.st_mtime)
|
|
190
|
+
if not self.quiet:
|
|
191
|
+
logger.debug(f"Updated location for existing model: {relative_path}")
|
|
192
|
+
return ModelProcessResult.UPDATED_PATH
|
|
193
|
+
else:
|
|
194
|
+
# New model - add to both tables
|
|
195
|
+
self.index_manager.ensure_model(short_hash, file_stat.st_size)
|
|
196
|
+
self.index_manager.add_location(short_hash, models_dir, relative_path, filename, file_stat.st_mtime)
|
|
197
|
+
if not self.quiet:
|
|
198
|
+
logger.debug(f"Added new model: {relative_path}")
|
|
199
|
+
return ModelProcessResult.ADDED
|
|
200
|
+
|
|
201
|
+
except Exception as e:
|
|
202
|
+
logger.error(f"Error processing model file {file_path}: {e}")
|
|
203
|
+
raise
|
|
204
|
+
|
|
205
|
+
def _update_result_counters(self, result: ScanResult, process_result: ModelProcessResult) -> None:
|
|
206
|
+
"""Update ScanResult counters based on processing result."""
|
|
207
|
+
match process_result:
|
|
208
|
+
case ModelProcessResult.ADDED:
|
|
209
|
+
result.added_count += 1
|
|
210
|
+
case ModelProcessResult.UPDATED_PATH:
|
|
211
|
+
result.updated_count += 1
|
|
212
|
+
case ModelProcessResult.SKIPPED_SAME_FILE:
|
|
213
|
+
result.skipped_count += 1
|
|
214
|
+
|
|
215
|
+
def _find_model_files(self, path: Path) -> list[Path]:
|
|
216
|
+
"""Find and filter valid model files in directory."""
|
|
217
|
+
model_files = []
|
|
218
|
+
total_found = 0
|
|
219
|
+
skipped_hidden = 0
|
|
220
|
+
skipped_not_file = 0
|
|
221
|
+
skipped_small = 0
|
|
222
|
+
skipped_validation = 0
|
|
223
|
+
skipped_error = 0
|
|
224
|
+
|
|
225
|
+
for file_path in path.rglob("*"):
|
|
226
|
+
total_found += 1
|
|
227
|
+
|
|
228
|
+
# Skip hidden directories (check only relative path parts, not absolute)
|
|
229
|
+
try:
|
|
230
|
+
relative_path = file_path.relative_to(path)
|
|
231
|
+
if any(part.startswith('.') for part in relative_path.parts):
|
|
232
|
+
skipped_hidden += 1
|
|
233
|
+
continue
|
|
234
|
+
except ValueError:
|
|
235
|
+
# File not under base path - skip
|
|
236
|
+
skipped_error += 1
|
|
237
|
+
continue
|
|
238
|
+
|
|
239
|
+
try:
|
|
240
|
+
if not file_path.is_file() or file_path.is_symlink():
|
|
241
|
+
skipped_not_file += 1
|
|
242
|
+
continue
|
|
243
|
+
|
|
244
|
+
# Skip small files
|
|
245
|
+
file_size = file_path.stat().st_size
|
|
246
|
+
if file_size < MIN_MODEL_SIZE:
|
|
247
|
+
skipped_small += 1
|
|
248
|
+
if not self.quiet:
|
|
249
|
+
logger.debug(f"Skipped (too small: {file_size} bytes): {file_path.name}")
|
|
250
|
+
continue
|
|
251
|
+
|
|
252
|
+
# Apply config-based validation
|
|
253
|
+
if not self._is_valid_model_file(file_path, path):
|
|
254
|
+
skipped_validation += 1
|
|
255
|
+
if not self.quiet:
|
|
256
|
+
logger.debug(f"Skipped (validation failed): {file_path.relative_to(path)}")
|
|
257
|
+
continue
|
|
258
|
+
|
|
259
|
+
if not self.quiet:
|
|
260
|
+
logger.debug(f"Found valid model: {file_path.relative_to(path)}")
|
|
261
|
+
model_files.append(file_path)
|
|
262
|
+
|
|
263
|
+
except (OSError, PermissionError) as e:
|
|
264
|
+
skipped_error += 1
|
|
265
|
+
if not self.quiet:
|
|
266
|
+
logger.debug(f"Skipped (error: {e}): {file_path.name}")
|
|
267
|
+
continue
|
|
268
|
+
|
|
269
|
+
if not self.quiet:
|
|
270
|
+
logger.debug(
|
|
271
|
+
f"File scan summary: {total_found} total, {len(model_files)} valid, "
|
|
272
|
+
f"{skipped_hidden} hidden, {skipped_not_file} not-file, "
|
|
273
|
+
f"{skipped_small} small, {skipped_validation} validation-failed, {skipped_error} errors"
|
|
274
|
+
)
|
|
275
|
+
return model_files
|
|
276
|
+
|
|
277
|
+
def _is_valid_model_file(self, file_path: Path, base_dir: Path) -> bool:
|
|
278
|
+
"""Check if file is valid based on directory-specific rules."""
|
|
279
|
+
|
|
280
|
+
# Always exclude obviously non-model files
|
|
281
|
+
if file_path.suffix.lower() in EXCLUDED_EXTENSIONS:
|
|
282
|
+
if not self.quiet:
|
|
283
|
+
logger.debug(f" Excluded extension {file_path.suffix}: {file_path.name}")
|
|
284
|
+
return False
|
|
285
|
+
|
|
286
|
+
# Get the relative path to determine directory structure
|
|
287
|
+
try:
|
|
288
|
+
relative_path = file_path.relative_to(base_dir)
|
|
289
|
+
if len(relative_path.parts) > 0:
|
|
290
|
+
# First directory after base is the model type directory
|
|
291
|
+
model_dir = relative_path.parts[0]
|
|
292
|
+
|
|
293
|
+
if self.model_config.is_standard_directory(model_dir):
|
|
294
|
+
# Standard directory - use specific extensions
|
|
295
|
+
valid_extensions = self.model_config.get_extensions_for_directory(model_dir)
|
|
296
|
+
is_valid = file_path.suffix.lower() in valid_extensions
|
|
297
|
+
if not is_valid and not self.quiet:
|
|
298
|
+
logger.debug(
|
|
299
|
+
f" Invalid extension for {model_dir}/: {file_path.suffix} "
|
|
300
|
+
f"(valid: {valid_extensions}) - {file_path.name}"
|
|
301
|
+
)
|
|
302
|
+
return is_valid
|
|
303
|
+
else:
|
|
304
|
+
# Non-standard directory - be permissive (already excluded obvious non-models)
|
|
305
|
+
if not self.quiet:
|
|
306
|
+
logger.debug(f" Non-standard directory {model_dir}/, allowing: {file_path.name}")
|
|
307
|
+
return True
|
|
308
|
+
except ValueError as e:
|
|
309
|
+
# File not under base_dir? Shouldn't happen with rglob
|
|
310
|
+
if not self.quiet:
|
|
311
|
+
logger.debug(f" ValueError getting relative path: {e} - {file_path}")
|
|
312
|
+
return False
|
|
313
|
+
|
|
314
|
+
# Fallback: check against default extensions
|
|
315
|
+
is_valid = file_path.suffix.lower() in self.model_config.default_extensions
|
|
316
|
+
if not is_valid and not self.quiet:
|
|
317
|
+
logger.debug(f" Not in default extensions: {file_path.suffix} - {file_path.name}")
|
|
318
|
+
return is_valid
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Node classification service for workflow analysis."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from ..logging.logging_config import get_logger
|
|
10
|
+
from ..configs.comfyui_builtin_nodes import COMFYUI_BUILTIN_NODES
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from ..configs.model_config import ModelConfig
|
|
14
|
+
from ..models.workflow import Workflow, WorkflowNode
|
|
15
|
+
|
|
16
|
+
logger = get_logger(__name__)
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class NodeClassifierResultMulti:
|
|
20
|
+
builtin_nodes: list[WorkflowNode]
|
|
21
|
+
custom_nodes: list[WorkflowNode]
|
|
22
|
+
|
|
23
|
+
class NodeClassifier:
|
|
24
|
+
"""Service for classifying and categorizing workflow nodes."""
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
self.builtin_nodes = set(COMFYUI_BUILTIN_NODES["all_builtin_nodes"])
|
|
28
|
+
|
|
29
|
+
def get_custom_node_types(self, workflow: Workflow) -> set[str]:
|
|
30
|
+
"""Get custom node types from workflow."""
|
|
31
|
+
return workflow.node_types - self.builtin_nodes
|
|
32
|
+
|
|
33
|
+
def get_model_loader_nodes(self, workflow: Workflow, model_config: ModelConfig) -> list[WorkflowNode]:
|
|
34
|
+
"""Get model loader nodes from workflow."""
|
|
35
|
+
return [node for node in workflow.nodes.values() if model_config.is_model_loader_node(node.type)]
|
|
36
|
+
|
|
37
|
+
@staticmethod
|
|
38
|
+
def classify_single_node(node: WorkflowNode) -> str:
|
|
39
|
+
"""Classify a single node by type."""
|
|
40
|
+
all_builtin_nodes = set(COMFYUI_BUILTIN_NODES["all_builtin_nodes"])
|
|
41
|
+
if node.type in all_builtin_nodes:
|
|
42
|
+
return "builtin"
|
|
43
|
+
return "custom"
|
|
44
|
+
|
|
45
|
+
@staticmethod
|
|
46
|
+
def classify_nodes(workflow: Workflow) -> NodeClassifierResultMulti:
|
|
47
|
+
"""Classify all nodes by type."""
|
|
48
|
+
all_builtin_nodes = set(COMFYUI_BUILTIN_NODES["all_builtin_nodes"])
|
|
49
|
+
builtin_nodes: list[WorkflowNode] = []
|
|
50
|
+
custom_nodes: list[WorkflowNode] = []
|
|
51
|
+
|
|
52
|
+
for node in workflow.nodes.values():
|
|
53
|
+
if node.type in all_builtin_nodes:
|
|
54
|
+
builtin_nodes.append(node)
|
|
55
|
+
else:
|
|
56
|
+
custom_nodes.append(node)
|
|
57
|
+
|
|
58
|
+
return NodeClassifierResultMulti(builtin_nodes, custom_nodes)
|