comfygit-core 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfygit_core/analyzers/custom_node_scanner.py +109 -0
- comfygit_core/analyzers/git_change_parser.py +156 -0
- comfygit_core/analyzers/model_scanner.py +318 -0
- comfygit_core/analyzers/node_classifier.py +58 -0
- comfygit_core/analyzers/node_git_analyzer.py +77 -0
- comfygit_core/analyzers/status_scanner.py +362 -0
- comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
- comfygit_core/caching/__init__.py +16 -0
- comfygit_core/caching/api_cache.py +210 -0
- comfygit_core/caching/base.py +212 -0
- comfygit_core/caching/comfyui_cache.py +100 -0
- comfygit_core/caching/custom_node_cache.py +320 -0
- comfygit_core/caching/workflow_cache.py +797 -0
- comfygit_core/clients/__init__.py +4 -0
- comfygit_core/clients/civitai_client.py +412 -0
- comfygit_core/clients/github_client.py +349 -0
- comfygit_core/clients/registry_client.py +230 -0
- comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
- comfygit_core/configs/comfyui_models.py +62 -0
- comfygit_core/configs/model_config.py +151 -0
- comfygit_core/constants.py +82 -0
- comfygit_core/core/environment.py +1635 -0
- comfygit_core/core/workspace.py +898 -0
- comfygit_core/factories/environment_factory.py +419 -0
- comfygit_core/factories/uv_factory.py +61 -0
- comfygit_core/factories/workspace_factory.py +109 -0
- comfygit_core/infrastructure/sqlite_manager.py +156 -0
- comfygit_core/integrations/__init__.py +7 -0
- comfygit_core/integrations/uv_command.py +318 -0
- comfygit_core/logging/logging_config.py +15 -0
- comfygit_core/managers/environment_git_orchestrator.py +316 -0
- comfygit_core/managers/environment_model_manager.py +296 -0
- comfygit_core/managers/export_import_manager.py +116 -0
- comfygit_core/managers/git_manager.py +667 -0
- comfygit_core/managers/model_download_manager.py +252 -0
- comfygit_core/managers/model_symlink_manager.py +166 -0
- comfygit_core/managers/node_manager.py +1378 -0
- comfygit_core/managers/pyproject_manager.py +1321 -0
- comfygit_core/managers/user_content_symlink_manager.py +436 -0
- comfygit_core/managers/uv_project_manager.py +569 -0
- comfygit_core/managers/workflow_manager.py +1944 -0
- comfygit_core/models/civitai.py +432 -0
- comfygit_core/models/commit.py +18 -0
- comfygit_core/models/environment.py +293 -0
- comfygit_core/models/exceptions.py +378 -0
- comfygit_core/models/manifest.py +132 -0
- comfygit_core/models/node_mapping.py +201 -0
- comfygit_core/models/protocols.py +248 -0
- comfygit_core/models/registry.py +63 -0
- comfygit_core/models/shared.py +356 -0
- comfygit_core/models/sync.py +42 -0
- comfygit_core/models/system.py +204 -0
- comfygit_core/models/workflow.py +914 -0
- comfygit_core/models/workspace_config.py +71 -0
- comfygit_core/py.typed +0 -0
- comfygit_core/repositories/migrate_paths.py +49 -0
- comfygit_core/repositories/model_repository.py +958 -0
- comfygit_core/repositories/node_mappings_repository.py +246 -0
- comfygit_core/repositories/workflow_repository.py +57 -0
- comfygit_core/repositories/workspace_config_repository.py +121 -0
- comfygit_core/resolvers/global_node_resolver.py +459 -0
- comfygit_core/resolvers/model_resolver.py +250 -0
- comfygit_core/services/import_analyzer.py +218 -0
- comfygit_core/services/model_downloader.py +422 -0
- comfygit_core/services/node_lookup_service.py +251 -0
- comfygit_core/services/registry_data_manager.py +161 -0
- comfygit_core/strategies/__init__.py +4 -0
- comfygit_core/strategies/auto.py +72 -0
- comfygit_core/strategies/confirmation.py +69 -0
- comfygit_core/utils/comfyui_ops.py +125 -0
- comfygit_core/utils/common.py +164 -0
- comfygit_core/utils/conflict_parser.py +232 -0
- comfygit_core/utils/dependency_parser.py +231 -0
- comfygit_core/utils/download.py +216 -0
- comfygit_core/utils/environment_cleanup.py +111 -0
- comfygit_core/utils/filesystem.py +178 -0
- comfygit_core/utils/git.py +1184 -0
- comfygit_core/utils/input_signature.py +145 -0
- comfygit_core/utils/model_categories.py +52 -0
- comfygit_core/utils/pytorch.py +71 -0
- comfygit_core/utils/requirements.py +211 -0
- comfygit_core/utils/retry.py +242 -0
- comfygit_core/utils/symlink_utils.py +119 -0
- comfygit_core/utils/system_detector.py +258 -0
- comfygit_core/utils/uuid.py +28 -0
- comfygit_core/utils/uv_error_handler.py +158 -0
- comfygit_core/utils/version.py +73 -0
- comfygit_core/utils/workflow_hash.py +90 -0
- comfygit_core/validation/resolution_tester.py +297 -0
- comfygit_core-0.2.0.dist-info/METADATA +939 -0
- comfygit_core-0.2.0.dist-info/RECORD +93 -0
- comfygit_core-0.2.0.dist-info/WHEEL +4 -0
- comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
# models/manifest.py
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
|
|
4
|
+
from comfygit_core.models.shared import ModelWithLocation
|
|
5
|
+
from comfygit_core.models.workflow import WorkflowNodeWidgetRef
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class ManifestWorkflowModel:
|
|
10
|
+
"""Workflow model entry as stored in pyproject.toml"""
|
|
11
|
+
filename: str
|
|
12
|
+
category: str # "checkpoints", "loras", etc.
|
|
13
|
+
criticality: str # "required", "flexible", "optional"
|
|
14
|
+
status: str # "resolved", "unresolved"
|
|
15
|
+
nodes: list[WorkflowNodeWidgetRef]
|
|
16
|
+
hash: str | None = None # Only present if resolved
|
|
17
|
+
sources: list[str] = field(default_factory=list) # Download URLs
|
|
18
|
+
relative_path: str | None = None # Target path for download intents
|
|
19
|
+
|
|
20
|
+
def to_toml_dict(self) -> dict:
|
|
21
|
+
"""Serialize to TOML-compatible dict with inline table formatting."""
|
|
22
|
+
import tomlkit
|
|
23
|
+
|
|
24
|
+
# Build nodes as inline tables for clean TOML output
|
|
25
|
+
nodes_array = tomlkit.array()
|
|
26
|
+
for n in self.nodes:
|
|
27
|
+
node_entry = tomlkit.inline_table()
|
|
28
|
+
node_entry['node_id'] = n.node_id
|
|
29
|
+
node_entry['node_type'] = n.node_type
|
|
30
|
+
node_entry['widget_idx'] = n.widget_index
|
|
31
|
+
node_entry['widget_value'] = n.widget_value
|
|
32
|
+
nodes_array.append(node_entry)
|
|
33
|
+
|
|
34
|
+
result = {
|
|
35
|
+
"filename": self.filename,
|
|
36
|
+
"category": self.category,
|
|
37
|
+
"criticality": self.criticality,
|
|
38
|
+
"status": self.status,
|
|
39
|
+
"nodes": nodes_array
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
# Only include optional fields if present
|
|
43
|
+
if self.hash is not None:
|
|
44
|
+
result["hash"] = self.hash
|
|
45
|
+
if self.sources:
|
|
46
|
+
result["sources"] = self.sources
|
|
47
|
+
if self.relative_path is not None:
|
|
48
|
+
result["relative_path"] = self.relative_path
|
|
49
|
+
|
|
50
|
+
return result
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def from_toml_dict(cls, data: dict) -> "ManifestWorkflowModel":
|
|
54
|
+
"""Deserialize from TOML dict."""
|
|
55
|
+
nodes = [
|
|
56
|
+
WorkflowNodeWidgetRef(
|
|
57
|
+
node_id=n["node_id"],
|
|
58
|
+
node_type=n["node_type"],
|
|
59
|
+
widget_index=n["widget_idx"],
|
|
60
|
+
widget_value=n["widget_value"]
|
|
61
|
+
)
|
|
62
|
+
for n in data.get("nodes", [])
|
|
63
|
+
]
|
|
64
|
+
|
|
65
|
+
return cls(
|
|
66
|
+
filename=data["filename"],
|
|
67
|
+
category=data["category"],
|
|
68
|
+
criticality=data.get("criticality", "flexible"),
|
|
69
|
+
status=data.get("status", "resolved"),
|
|
70
|
+
nodes=nodes,
|
|
71
|
+
hash=data.get("hash"),
|
|
72
|
+
sources=data.get("sources", []),
|
|
73
|
+
relative_path=data.get("relative_path")
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
@dataclass
|
|
77
|
+
class ManifestModel:
|
|
78
|
+
"""Global model entry in [tool.comfygit.models]"""
|
|
79
|
+
hash: str # Primary key
|
|
80
|
+
filename: str
|
|
81
|
+
size: int
|
|
82
|
+
relative_path: str
|
|
83
|
+
category: str
|
|
84
|
+
sources: list[str] = field(default_factory=list)
|
|
85
|
+
|
|
86
|
+
def to_toml_dict(self) -> dict:
|
|
87
|
+
"""Serialize to TOML-compatible dict."""
|
|
88
|
+
result = {
|
|
89
|
+
"filename": self.filename,
|
|
90
|
+
"size": self.size,
|
|
91
|
+
"relative_path": self.relative_path,
|
|
92
|
+
"category": self.category
|
|
93
|
+
}
|
|
94
|
+
if self.sources:
|
|
95
|
+
result["sources"] = self.sources
|
|
96
|
+
return result
|
|
97
|
+
|
|
98
|
+
@classmethod
|
|
99
|
+
def from_toml_dict(cls, hash_key: str, data: dict) -> "ManifestModel":
|
|
100
|
+
"""Deserialize from TOML dict."""
|
|
101
|
+
return cls(
|
|
102
|
+
hash=hash_key,
|
|
103
|
+
filename=data["filename"],
|
|
104
|
+
size=data["size"],
|
|
105
|
+
relative_path=data["relative_path"],
|
|
106
|
+
category=data.get("category", "unknown"),
|
|
107
|
+
sources=data.get("sources", [])
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
@classmethod
|
|
111
|
+
def from_model_with_location(cls, model: "ModelWithLocation") -> "ManifestModel":
|
|
112
|
+
"""Convert runtime model to manifest entry.
|
|
113
|
+
|
|
114
|
+
Note: Sources are intentionally empty here. They should be fetched from
|
|
115
|
+
the repository and provided when creating ManifestModel instances.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
model: ModelWithLocation from model repository
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
ManifestModel ready for TOML serialization
|
|
122
|
+
"""
|
|
123
|
+
from comfygit_core.models.shared import ModelWithLocation
|
|
124
|
+
|
|
125
|
+
return cls(
|
|
126
|
+
hash=model.hash,
|
|
127
|
+
filename=model.filename,
|
|
128
|
+
size=model.file_size,
|
|
129
|
+
relative_path=model.relative_path,
|
|
130
|
+
category=model.category,
|
|
131
|
+
sources=[]
|
|
132
|
+
)
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""Global node mappings table dataclasses."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
|
|
7
|
+
""" example mappings:
|
|
8
|
+
"mappings": {
|
|
9
|
+
"(Down)Load Hibiki Model::_": {
|
|
10
|
+
"package_id": "comfyui-hibiki",
|
|
11
|
+
"versions": [
|
|
12
|
+
"1.0.0"
|
|
13
|
+
]
|
|
14
|
+
},
|
|
15
|
+
"(Down)Load Kokoro Model::_": {
|
|
16
|
+
"package_id": "comfyui-jhj-kokoro-onnx",
|
|
17
|
+
"versions": [],
|
|
18
|
+
"source": "manager"
|
|
19
|
+
},
|
|
20
|
+
"Test::_": [ # TODO: Multiple mappings possible for same node type, order by downloads/stars
|
|
21
|
+
{
|
|
22
|
+
"package_id": "comfyui-jhj-kokoro-onnx",
|
|
23
|
+
"versions": [...],
|
|
24
|
+
"source": "registry",
|
|
25
|
+
"rank": 1
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
"package_id": "comfyui-some-node",
|
|
29
|
+
"versions": [...],
|
|
30
|
+
"source": "manager",
|
|
31
|
+
"rank": 2
|
|
32
|
+
},
|
|
33
|
+
...
|
|
34
|
+
],
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
class PackageMapping:
|
|
40
|
+
"""Single package mapping entry within a node key."""
|
|
41
|
+
package_id: str
|
|
42
|
+
versions: list[str]
|
|
43
|
+
rank: int # 1-based popularity ranking
|
|
44
|
+
source: str | None = None # "manager" or None (Registry default)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class GlobalNodeMapping:
|
|
49
|
+
"""Mapping from node type to list of package options (ranked)."""
|
|
50
|
+
|
|
51
|
+
id: str # Compound key (e.g. "NodeType::<input list hash>")
|
|
52
|
+
packages: list[PackageMapping] # List of package options, ranked by popularity
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
""" example package:
|
|
56
|
+
"comfyui-hibiki": {
|
|
57
|
+
"display_name": "ComfyUI-hibiki",
|
|
58
|
+
"author": "",
|
|
59
|
+
"description": "ComfyUI wrapper for Speech-to-Speech translation, hibiki: https://github.com/kyutai-labs/hibiki",
|
|
60
|
+
"repository": "https://github.com/jhj0517/ComfyUI-hibiki.git",
|
|
61
|
+
"downloads": 909,
|
|
62
|
+
"github_stars": 0,
|
|
63
|
+
"rating": 0,
|
|
64
|
+
"license": "{\"file\": \"LICENSE\"}",
|
|
65
|
+
"category": "",
|
|
66
|
+
"tags": [],
|
|
67
|
+
"status": "NodeStatusActive",
|
|
68
|
+
"created_at": "2025-02-09T12:51:54.479852Z",
|
|
69
|
+
"versions": {
|
|
70
|
+
"1.0.0": {
|
|
71
|
+
"version": "1.0.0",
|
|
72
|
+
"changelog": "",
|
|
73
|
+
"release_date": "2025-02-09T12:51:54.912872Z",
|
|
74
|
+
"dependencies": [
|
|
75
|
+
"git+https://github.com/jhj0517/moshi_comfyui_wrapper.git@main#subdirectory=moshi"
|
|
76
|
+
],
|
|
77
|
+
"deprecated": false,
|
|
78
|
+
"download_url": "https://cdn.comfy.org/jhj0517/comfyui-hibiki/1.0.0/node.zip",
|
|
79
|
+
"status": "NodeVersionStatusFlagged",
|
|
80
|
+
"supported_accelerators": null,
|
|
81
|
+
"supported_comfyui_version": "",
|
|
82
|
+
"supported_os": null
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
},
|
|
86
|
+
...
|
|
87
|
+
"github_zzw5516_comfyui-zw-tools": {
|
|
88
|
+
"display_name": "comfyui-zw-tools",
|
|
89
|
+
"author": "zzw5516",
|
|
90
|
+
"description": "",
|
|
91
|
+
"repository": "https://github.com/zzw5516/ComfyUI-zw-tools",
|
|
92
|
+
"synthetic": true,
|
|
93
|
+
"source": "manager",
|
|
94
|
+
"versions": {}
|
|
95
|
+
}
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class GlobalNodePackageVersion:
|
|
100
|
+
"""Package version data."""
|
|
101
|
+
version: str # Version (required)
|
|
102
|
+
# Core fields used by CLI
|
|
103
|
+
download_url: str | None = None # Download URL
|
|
104
|
+
deprecated: bool | None = None # Deprecated
|
|
105
|
+
dependencies: list[str] | None = None # Dependencies
|
|
106
|
+
# Unused fields (kept for potential future use, omitted from minimal schema)
|
|
107
|
+
changelog: str | None = None # Changelog
|
|
108
|
+
release_date: str | None = None # Release date
|
|
109
|
+
status: str | None = None # Status
|
|
110
|
+
supported_accelerators: list[str] | None = None # Supported accelerators
|
|
111
|
+
supported_comfyui_version: str | None = None # Supported ComfyUI version
|
|
112
|
+
supported_os: list[str] | None = None # Supported OS
|
|
113
|
+
|
|
114
|
+
def __repr__(self) -> str:
|
|
115
|
+
"""Concise representation showing version and key flags."""
|
|
116
|
+
parts = [f"v{self.version}"]
|
|
117
|
+
if self.deprecated:
|
|
118
|
+
parts.append("deprecated")
|
|
119
|
+
if self.dependencies:
|
|
120
|
+
parts.append(f"{len(self.dependencies)} deps")
|
|
121
|
+
return f"GlobalNodePackageVersion({', '.join(parts)})"
|
|
122
|
+
|
|
123
|
+
@dataclass
|
|
124
|
+
class GlobalNodePackage:
|
|
125
|
+
"""Global standard package data."""
|
|
126
|
+
|
|
127
|
+
id: str # Package ID (required)
|
|
128
|
+
# Core fields used by CLI
|
|
129
|
+
display_name: str | None = None # Display name
|
|
130
|
+
description: str | None = None # Description
|
|
131
|
+
repository: str | None = None # Repository
|
|
132
|
+
github_stars: int | None = None # GitHub stars
|
|
133
|
+
versions: dict[str, GlobalNodePackageVersion] | None = None # Versions
|
|
134
|
+
source: str | None = None # Source of the package (None = Registry, "manager" = Manager-only)
|
|
135
|
+
# Unused fields (kept for potential future use, omitted from minimal schema)
|
|
136
|
+
author: str | None = None # Author
|
|
137
|
+
downloads: int | None = None # Downloads
|
|
138
|
+
rating: int | None = None # Rating
|
|
139
|
+
license: str | None = None # License
|
|
140
|
+
category: str | None = None # Category
|
|
141
|
+
icon: str | None = None # Icon URL
|
|
142
|
+
tags: list[str] | None = None # Tags
|
|
143
|
+
status: str | None = None # Status
|
|
144
|
+
created_at: str | None = None # Created at
|
|
145
|
+
|
|
146
|
+
def __repr__(self) -> str:
|
|
147
|
+
"""Concise representation showing key package info and version list."""
|
|
148
|
+
version_str = ""
|
|
149
|
+
if self.versions:
|
|
150
|
+
version_list = list(self.versions.keys())
|
|
151
|
+
if len(version_list) <= 3:
|
|
152
|
+
version_str = f", versions=[{', '.join(version_list)}]"
|
|
153
|
+
else:
|
|
154
|
+
version_str = f", versions=[{', '.join(version_list[:3])}, ... +{len(version_list) - 3} more]"
|
|
155
|
+
|
|
156
|
+
repo_short = ""
|
|
157
|
+
if self.repository:
|
|
158
|
+
# Extract just the repo name from URL
|
|
159
|
+
repo_parts = self.repository.rstrip('/').split('/')
|
|
160
|
+
repo_short = f", repo={repo_parts[-1] if repo_parts else self.repository}"
|
|
161
|
+
|
|
162
|
+
return f"GlobalNodePackage(id={self.id!r}{repo_short}{version_str})"
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
""" example full mappings file:
|
|
166
|
+
"version": "2025.09.19",
|
|
167
|
+
"generated_at": "2025-09-19T18:25:18.347947",
|
|
168
|
+
"stats": {
|
|
169
|
+
"packages": 3398,
|
|
170
|
+
"signatures": 34049,
|
|
171
|
+
"total_nodes": 15280,
|
|
172
|
+
"augmented": true,
|
|
173
|
+
"augmentation_date": "2025-09-19T18:26:03.820776",
|
|
174
|
+
"nodes_from_manager": 19402,
|
|
175
|
+
"synthetic_packages": 485
|
|
176
|
+
},
|
|
177
|
+
"mappings": {...},
|
|
178
|
+
"packages": {...},
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@dataclass
|
|
183
|
+
class GlobalNodeMappingsStats:
|
|
184
|
+
packages: int | None = None
|
|
185
|
+
signatures: int | None = None
|
|
186
|
+
total_nodes: int | None = None
|
|
187
|
+
augmented: bool | None = None
|
|
188
|
+
augmentation_date: str | None = None
|
|
189
|
+
nodes_from_manager: int | None = None
|
|
190
|
+
manager_packages: int | None = None
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
@dataclass
|
|
194
|
+
class GlobalNodeMappings:
|
|
195
|
+
"""Global node mappings table."""
|
|
196
|
+
|
|
197
|
+
version: str
|
|
198
|
+
generated_at: str
|
|
199
|
+
stats: GlobalNodeMappingsStats | None
|
|
200
|
+
mappings: dict[str, GlobalNodeMapping] = field(default_factory=dict)
|
|
201
|
+
packages: dict[str, GlobalNodePackage] = field(default_factory=dict)
|
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
"""Resolution strategy protocols for dependency injection."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from typing import TYPE_CHECKING, Protocol
|
|
5
|
+
|
|
6
|
+
from .workflow import (
|
|
7
|
+
ModelResolutionContext,
|
|
8
|
+
NodeResolutionContext,
|
|
9
|
+
ResolvedModel,
|
|
10
|
+
WorkflowNodeWidgetRef,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from ..models.workflow import ResolvedNodePackage
|
|
15
|
+
|
|
16
|
+
class NodeResolutionStrategy(Protocol):
|
|
17
|
+
"""Protocol for resolving unknown custom nodes."""
|
|
18
|
+
|
|
19
|
+
def resolve_unknown_node(
|
|
20
|
+
self,
|
|
21
|
+
node_type: str,
|
|
22
|
+
possible: list[ResolvedNodePackage],
|
|
23
|
+
context: NodeResolutionContext
|
|
24
|
+
) -> ResolvedNodePackage | None:
|
|
25
|
+
"""Given node type and suggestions, return package ID or None.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
node_type: The unknown node type (e.g. "MyCustomNode")
|
|
29
|
+
possible: List of registry suggestions with package_id, confidence
|
|
30
|
+
context: Resolution context with search function and installed packages
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
ResolvedNodePackage to install or None to skip
|
|
34
|
+
"""
|
|
35
|
+
...
|
|
36
|
+
|
|
37
|
+
def confirm_node_install(self, package: ResolvedNodePackage) -> bool:
|
|
38
|
+
"""Confirm whether to install a node package.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
package: Resolved node package to confirm
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
True to install, False to skip
|
|
45
|
+
"""
|
|
46
|
+
...
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ModelResolutionStrategy(Protocol):
|
|
50
|
+
"""Protocol for resolving model references."""
|
|
51
|
+
|
|
52
|
+
def resolve_model(
|
|
53
|
+
self,
|
|
54
|
+
reference: WorkflowNodeWidgetRef,
|
|
55
|
+
candidates: list[ResolvedModel],
|
|
56
|
+
context: ModelResolutionContext,
|
|
57
|
+
) -> ResolvedModel | None:
|
|
58
|
+
"""Resolve a model reference (ambiguous or missing).
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
reference: The model reference from workflow
|
|
62
|
+
candidates: List of potential matches (may be empty for missing models)
|
|
63
|
+
context: Resolution context with search function and workflow info
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
ResolvedModel with resolved_model filled (or None for optional unresolved)
|
|
67
|
+
None to skip resolution
|
|
68
|
+
|
|
69
|
+
Note:
|
|
70
|
+
- For resolved models: Return ResolvedModel with resolved_model set
|
|
71
|
+
- For optional unresolved: Return ResolvedModel with resolved_model=None, is_optional=True
|
|
72
|
+
- To skip: Return None
|
|
73
|
+
"""
|
|
74
|
+
...
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class RollbackStrategy(Protocol):
|
|
78
|
+
"""Protocol for confirming destructive rollback operations."""
|
|
79
|
+
|
|
80
|
+
def confirm_destructive_rollback(
|
|
81
|
+
self,
|
|
82
|
+
git_changes: bool,
|
|
83
|
+
workflow_changes: bool,
|
|
84
|
+
) -> bool:
|
|
85
|
+
"""Confirm rollback that will discard uncommitted changes.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
git_changes: Whether there are uncommitted git changes in .cec/
|
|
89
|
+
workflow_changes: Whether there are modified/new/deleted workflows
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
True to proceed with rollback, False to cancel
|
|
93
|
+
"""
|
|
94
|
+
...
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class SyncCallbacks(Protocol):
|
|
98
|
+
"""Protocol for sync operation callbacks."""
|
|
99
|
+
|
|
100
|
+
def on_dependency_group_start(self, group_name: str, is_optional: bool) -> None:
|
|
101
|
+
"""Called when starting to install a dependency group.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
group_name: Name of the dependency group
|
|
105
|
+
is_optional: Whether this is an optional group
|
|
106
|
+
"""
|
|
107
|
+
...
|
|
108
|
+
|
|
109
|
+
def on_dependency_group_complete(self, group_name: str, success: bool, error: str | None = None) -> None:
|
|
110
|
+
"""Called when dependency group installation completes.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
group_name: Name of the dependency group
|
|
114
|
+
success: Whether the installation succeeded
|
|
115
|
+
error: Error message if failed (None if succeeded)
|
|
116
|
+
"""
|
|
117
|
+
...
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class ImportCallbacks(Protocol):
|
|
121
|
+
"""Protocol for import operation callbacks."""
|
|
122
|
+
|
|
123
|
+
def on_phase(self, phase: str, description: str) -> None:
|
|
124
|
+
"""Called when entering a new import phase.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
phase: Phase identifier (e.g., "extract", "install_deps", "sync_nodes")
|
|
128
|
+
description: Human-readable phase description
|
|
129
|
+
"""
|
|
130
|
+
...
|
|
131
|
+
|
|
132
|
+
def on_dependency_group_start(self, group_name: str, is_optional: bool) -> None:
|
|
133
|
+
"""Called when starting to install a dependency group.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
group_name: Name of the dependency group
|
|
137
|
+
is_optional: Whether this is an optional group
|
|
138
|
+
"""
|
|
139
|
+
...
|
|
140
|
+
|
|
141
|
+
def on_dependency_group_complete(self, group_name: str, success: bool, error: str | None = None) -> None:
|
|
142
|
+
"""Called when dependency group installation completes.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
group_name: Name of the dependency group
|
|
146
|
+
success: Whether the installation succeeded
|
|
147
|
+
error: Error message if failed (None if succeeded)
|
|
148
|
+
"""
|
|
149
|
+
...
|
|
150
|
+
|
|
151
|
+
def on_workflow_copied(self, workflow_name: str) -> None:
|
|
152
|
+
"""Called when a workflow file is copied.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
workflow_name: Name of the workflow file
|
|
156
|
+
"""
|
|
157
|
+
...
|
|
158
|
+
|
|
159
|
+
def on_node_installed(self, node_name: str) -> None:
|
|
160
|
+
"""Called when a custom node is installed.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
node_name: Name of the installed node
|
|
164
|
+
"""
|
|
165
|
+
...
|
|
166
|
+
|
|
167
|
+
def on_workflow_resolved(self, workflow_name: str, downloads: int) -> None:
|
|
168
|
+
"""Called when a workflow is resolved.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
workflow_name: Name of the workflow
|
|
172
|
+
downloads: Number of models downloaded for this workflow
|
|
173
|
+
"""
|
|
174
|
+
...
|
|
175
|
+
|
|
176
|
+
def on_error(self, error: str) -> None:
|
|
177
|
+
"""Called when a non-fatal error occurs.
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
error: Error message
|
|
181
|
+
"""
|
|
182
|
+
...
|
|
183
|
+
|
|
184
|
+
def on_download_failures(self, failures: list[tuple[str, str]]) -> None:
|
|
185
|
+
"""Called when model downloads fail during import.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
failures: List of (workflow_name, model_filename) tuples
|
|
189
|
+
"""
|
|
190
|
+
...
|
|
191
|
+
|
|
192
|
+
def on_download_batch_start(self, count: int) -> None:
|
|
193
|
+
"""Called when batch model downloads start.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
count: Number of models to download
|
|
197
|
+
"""
|
|
198
|
+
...
|
|
199
|
+
|
|
200
|
+
def on_download_file_start(self, name: str, idx: int, total: int) -> None:
|
|
201
|
+
"""Called when individual model download starts.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
name: Model filename
|
|
205
|
+
idx: Current file index (1-based)
|
|
206
|
+
total: Total number of files
|
|
207
|
+
"""
|
|
208
|
+
...
|
|
209
|
+
|
|
210
|
+
def on_download_file_progress(self, downloaded: int, total: int | None) -> None:
|
|
211
|
+
"""Called during model download progress.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
downloaded: Bytes downloaded so far
|
|
215
|
+
total: Total bytes (None if unknown)
|
|
216
|
+
"""
|
|
217
|
+
...
|
|
218
|
+
|
|
219
|
+
def on_download_file_complete(self, name: str, success: bool, error: str | None) -> None:
|
|
220
|
+
"""Called when model download completes.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
name: Model filename
|
|
224
|
+
success: Whether download succeeded
|
|
225
|
+
error: Error message if failed
|
|
226
|
+
"""
|
|
227
|
+
...
|
|
228
|
+
|
|
229
|
+
def on_download_batch_complete(self, success: int, total: int) -> None:
|
|
230
|
+
"""Called when all downloads complete.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
success: Number of successful downloads
|
|
234
|
+
total: Total number of downloads attempted
|
|
235
|
+
"""
|
|
236
|
+
...
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class ExportCallbacks(Protocol):
|
|
240
|
+
"""Protocol for export operation callbacks."""
|
|
241
|
+
|
|
242
|
+
def on_models_without_sources(self, models: list) -> None:
|
|
243
|
+
"""Called when models are missing source URLs.
|
|
244
|
+
|
|
245
|
+
Args:
|
|
246
|
+
models: List of ModelWithoutSourceInfo instances
|
|
247
|
+
"""
|
|
248
|
+
...
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from dataclasses import asdict, dataclass, field
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
from urllib.parse import urlparse
|
|
6
|
+
|
|
7
|
+
from .exceptions import ComfyDockError
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class RegistryNodeVersion:
|
|
11
|
+
"""Version information for a node."""
|
|
12
|
+
changelog: str
|
|
13
|
+
dependencies: list[str]
|
|
14
|
+
deprecated: bool
|
|
15
|
+
id: str
|
|
16
|
+
version: str
|
|
17
|
+
download_url: str
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def from_api_data(cls, api_data: dict) -> "RegistryNodeVersion | None":
|
|
21
|
+
if not api_data:
|
|
22
|
+
return None
|
|
23
|
+
return cls(
|
|
24
|
+
changelog=api_data.get("changelog", ""),
|
|
25
|
+
dependencies=api_data.get("dependencies", []),
|
|
26
|
+
deprecated=api_data.get("deprecated", False),
|
|
27
|
+
id=api_data.get("id", ""),
|
|
28
|
+
version=api_data.get("version", ""),
|
|
29
|
+
download_url=api_data.get("downloadUrl", ""),
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class RegistryNodeInfo:
|
|
34
|
+
"""Information about a custom node."""
|
|
35
|
+
id: str
|
|
36
|
+
name: str
|
|
37
|
+
description: str
|
|
38
|
+
author: str | None = None
|
|
39
|
+
license: str | None = None
|
|
40
|
+
icon: str | None = None
|
|
41
|
+
repository: str | None = None
|
|
42
|
+
tags: list[str] = field(default_factory=list)
|
|
43
|
+
latest_version: RegistryNodeVersion | None = None
|
|
44
|
+
|
|
45
|
+
@classmethod
|
|
46
|
+
def from_api_data(cls, api_data: dict) -> "RegistryNodeInfo | None":
|
|
47
|
+
# Ensure dict has id, name and description keys:
|
|
48
|
+
id = api_data.get("id")
|
|
49
|
+
name = api_data.get("name")
|
|
50
|
+
description = api_data.get("description")
|
|
51
|
+
if not id or not name or not description:
|
|
52
|
+
return None
|
|
53
|
+
return cls(
|
|
54
|
+
id=id,
|
|
55
|
+
name=name,
|
|
56
|
+
description=description,
|
|
57
|
+
author=api_data.get("author"),
|
|
58
|
+
license=api_data.get("license"),
|
|
59
|
+
icon=api_data.get("icon"),
|
|
60
|
+
repository=api_data.get("repository"),
|
|
61
|
+
tags=api_data.get("tags", []),
|
|
62
|
+
latest_version=RegistryNodeVersion.from_api_data(api_data.get("latest_version", {})),
|
|
63
|
+
)
|