comfygit-core 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfygit_core/analyzers/custom_node_scanner.py +109 -0
- comfygit_core/analyzers/git_change_parser.py +156 -0
- comfygit_core/analyzers/model_scanner.py +318 -0
- comfygit_core/analyzers/node_classifier.py +58 -0
- comfygit_core/analyzers/node_git_analyzer.py +77 -0
- comfygit_core/analyzers/status_scanner.py +362 -0
- comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
- comfygit_core/caching/__init__.py +16 -0
- comfygit_core/caching/api_cache.py +210 -0
- comfygit_core/caching/base.py +212 -0
- comfygit_core/caching/comfyui_cache.py +100 -0
- comfygit_core/caching/custom_node_cache.py +320 -0
- comfygit_core/caching/workflow_cache.py +797 -0
- comfygit_core/clients/__init__.py +4 -0
- comfygit_core/clients/civitai_client.py +412 -0
- comfygit_core/clients/github_client.py +349 -0
- comfygit_core/clients/registry_client.py +230 -0
- comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
- comfygit_core/configs/comfyui_models.py +62 -0
- comfygit_core/configs/model_config.py +151 -0
- comfygit_core/constants.py +82 -0
- comfygit_core/core/environment.py +1635 -0
- comfygit_core/core/workspace.py +898 -0
- comfygit_core/factories/environment_factory.py +419 -0
- comfygit_core/factories/uv_factory.py +61 -0
- comfygit_core/factories/workspace_factory.py +109 -0
- comfygit_core/infrastructure/sqlite_manager.py +156 -0
- comfygit_core/integrations/__init__.py +7 -0
- comfygit_core/integrations/uv_command.py +318 -0
- comfygit_core/logging/logging_config.py +15 -0
- comfygit_core/managers/environment_git_orchestrator.py +316 -0
- comfygit_core/managers/environment_model_manager.py +296 -0
- comfygit_core/managers/export_import_manager.py +116 -0
- comfygit_core/managers/git_manager.py +667 -0
- comfygit_core/managers/model_download_manager.py +252 -0
- comfygit_core/managers/model_symlink_manager.py +166 -0
- comfygit_core/managers/node_manager.py +1378 -0
- comfygit_core/managers/pyproject_manager.py +1321 -0
- comfygit_core/managers/user_content_symlink_manager.py +436 -0
- comfygit_core/managers/uv_project_manager.py +569 -0
- comfygit_core/managers/workflow_manager.py +1944 -0
- comfygit_core/models/civitai.py +432 -0
- comfygit_core/models/commit.py +18 -0
- comfygit_core/models/environment.py +293 -0
- comfygit_core/models/exceptions.py +378 -0
- comfygit_core/models/manifest.py +132 -0
- comfygit_core/models/node_mapping.py +201 -0
- comfygit_core/models/protocols.py +248 -0
- comfygit_core/models/registry.py +63 -0
- comfygit_core/models/shared.py +356 -0
- comfygit_core/models/sync.py +42 -0
- comfygit_core/models/system.py +204 -0
- comfygit_core/models/workflow.py +914 -0
- comfygit_core/models/workspace_config.py +71 -0
- comfygit_core/py.typed +0 -0
- comfygit_core/repositories/migrate_paths.py +49 -0
- comfygit_core/repositories/model_repository.py +958 -0
- comfygit_core/repositories/node_mappings_repository.py +246 -0
- comfygit_core/repositories/workflow_repository.py +57 -0
- comfygit_core/repositories/workspace_config_repository.py +121 -0
- comfygit_core/resolvers/global_node_resolver.py +459 -0
- comfygit_core/resolvers/model_resolver.py +250 -0
- comfygit_core/services/import_analyzer.py +218 -0
- comfygit_core/services/model_downloader.py +422 -0
- comfygit_core/services/node_lookup_service.py +251 -0
- comfygit_core/services/registry_data_manager.py +161 -0
- comfygit_core/strategies/__init__.py +4 -0
- comfygit_core/strategies/auto.py +72 -0
- comfygit_core/strategies/confirmation.py +69 -0
- comfygit_core/utils/comfyui_ops.py +125 -0
- comfygit_core/utils/common.py +164 -0
- comfygit_core/utils/conflict_parser.py +232 -0
- comfygit_core/utils/dependency_parser.py +231 -0
- comfygit_core/utils/download.py +216 -0
- comfygit_core/utils/environment_cleanup.py +111 -0
- comfygit_core/utils/filesystem.py +178 -0
- comfygit_core/utils/git.py +1184 -0
- comfygit_core/utils/input_signature.py +145 -0
- comfygit_core/utils/model_categories.py +52 -0
- comfygit_core/utils/pytorch.py +71 -0
- comfygit_core/utils/requirements.py +211 -0
- comfygit_core/utils/retry.py +242 -0
- comfygit_core/utils/symlink_utils.py +119 -0
- comfygit_core/utils/system_detector.py +258 -0
- comfygit_core/utils/uuid.py +28 -0
- comfygit_core/utils/uv_error_handler.py +158 -0
- comfygit_core/utils/version.py +73 -0
- comfygit_core/utils/workflow_hash.py +90 -0
- comfygit_core/validation/resolution_tester.py +297 -0
- comfygit_core-0.2.0.dist-info/METADATA +939 -0
- comfygit_core-0.2.0.dist-info/RECORD +93 -0
- comfygit_core-0.2.0.dist-info/WHEEL +4 -0
- comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
|
@@ -0,0 +1,246 @@
|
|
|
1
|
+
"""Repository for node mappings data access - loads and provides query interface for global node mappings."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from functools import cached_property
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from ..logging.logging_config import get_logger
|
|
10
|
+
from ..models.node_mapping import (
|
|
11
|
+
GlobalNodeMapping,
|
|
12
|
+
GlobalNodeMappings,
|
|
13
|
+
GlobalNodeMappingsStats,
|
|
14
|
+
GlobalNodePackage,
|
|
15
|
+
GlobalNodePackageVersion,
|
|
16
|
+
PackageMapping,
|
|
17
|
+
)
|
|
18
|
+
from ..utils.git import normalize_github_url
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from ..services.registry_data_manager import RegistryDataManager
|
|
22
|
+
|
|
23
|
+
logger = get_logger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class NodeMappingsRepository:
|
|
27
|
+
"""Repository for accessing global node mappings data.
|
|
28
|
+
|
|
29
|
+
Responsible for:
|
|
30
|
+
- Loading mappings JSON file
|
|
31
|
+
- Building indexes (GitHub URL -> package)
|
|
32
|
+
- Providing simple query interface
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(self, data_manager: RegistryDataManager):
|
|
36
|
+
"""Initialize repository with data manager.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
data_manager: RegistryDataManager that handles freshness and caching
|
|
40
|
+
|
|
41
|
+
Raises:
|
|
42
|
+
CDRegistryDataError: If mappings file doesn't exist after freshness check
|
|
43
|
+
"""
|
|
44
|
+
from ..models.exceptions import CDRegistryDataError
|
|
45
|
+
|
|
46
|
+
self.data_manager = data_manager
|
|
47
|
+
# Staleness check happens here - data_manager ensures file is fresh
|
|
48
|
+
self.mappings_path = data_manager.get_mappings_path()
|
|
49
|
+
|
|
50
|
+
if not self.mappings_path.exists():
|
|
51
|
+
raise CDRegistryDataError(
|
|
52
|
+
message="Registry node mappings not available. The mappings file was not found after attempting to fetch it.",
|
|
53
|
+
cache_path=str(self.mappings_path.parent),
|
|
54
|
+
can_retry=True
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
@cached_property
|
|
58
|
+
def global_mappings(self) -> GlobalNodeMappings:
|
|
59
|
+
"""Get cached global mappings (loads on first access)."""
|
|
60
|
+
return self._load_mappings()
|
|
61
|
+
|
|
62
|
+
@cached_property
|
|
63
|
+
def github_to_registry(self) -> dict[str, GlobalNodePackage]:
|
|
64
|
+
"""Get cached GitHub URL to package mapping."""
|
|
65
|
+
return self._build_github_to_registry_map(self.global_mappings)
|
|
66
|
+
|
|
67
|
+
def _load_mappings(self) -> GlobalNodeMappings:
|
|
68
|
+
"""Load global mappings from JSON file.
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
GlobalNodeMappings with parsed data structures
|
|
72
|
+
|
|
73
|
+
Raises:
|
|
74
|
+
json.JSONDecodeError: If file contains invalid JSON
|
|
75
|
+
OSError: If file cannot be read
|
|
76
|
+
"""
|
|
77
|
+
try:
|
|
78
|
+
with open(self.mappings_path, encoding='utf-8') as f:
|
|
79
|
+
data = json.load(f)
|
|
80
|
+
|
|
81
|
+
# Load stats
|
|
82
|
+
stats_data = data.get("stats", {})
|
|
83
|
+
stats = GlobalNodeMappingsStats(
|
|
84
|
+
packages=stats_data.get("packages"),
|
|
85
|
+
signatures=stats_data.get("signatures"),
|
|
86
|
+
total_nodes=stats_data.get("total_nodes"),
|
|
87
|
+
augmented=stats_data.get("augmented"),
|
|
88
|
+
augmentation_date=stats_data.get("augmentation_date"),
|
|
89
|
+
nodes_from_manager=stats_data.get("nodes_from_manager"),
|
|
90
|
+
manager_packages=stats_data.get("manager_packages"),
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
# Convert mappings dict to GlobalNodeMapping objects
|
|
94
|
+
mappings = {}
|
|
95
|
+
for key, mapping_data in data.get("mappings", {}).items():
|
|
96
|
+
package_mappings = []
|
|
97
|
+
|
|
98
|
+
# mapping_data is an array of PackageMapping dicts
|
|
99
|
+
for pkg_mapping in mapping_data:
|
|
100
|
+
package_mappings.append(PackageMapping(
|
|
101
|
+
package_id=pkg_mapping["package_id"],
|
|
102
|
+
versions=pkg_mapping.get("versions", []),
|
|
103
|
+
rank=pkg_mapping["rank"],
|
|
104
|
+
source=pkg_mapping.get("source")
|
|
105
|
+
))
|
|
106
|
+
|
|
107
|
+
mappings[key] = GlobalNodeMapping(
|
|
108
|
+
id=key,
|
|
109
|
+
packages=package_mappings
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Convert packages dict to GlobalNodePackage objects
|
|
113
|
+
packages = {}
|
|
114
|
+
for pkg_id, pkg_data in data.get("packages", {}).items():
|
|
115
|
+
# Loop over versions and create global node package version objects
|
|
116
|
+
versions: dict[str, GlobalNodePackageVersion] = {}
|
|
117
|
+
pkg_versions = pkg_data.get("versions", {})
|
|
118
|
+
for version_id, version_data in pkg_versions.items():
|
|
119
|
+
version = GlobalNodePackageVersion(
|
|
120
|
+
version=version_id,
|
|
121
|
+
changelog=version_data.get("changelog"),
|
|
122
|
+
release_date=version_data.get("release_date"),
|
|
123
|
+
dependencies=version_data.get("dependencies"),
|
|
124
|
+
deprecated=version_data.get("deprecated"),
|
|
125
|
+
download_url=version_data.get("download_url"),
|
|
126
|
+
status=version_data.get("status"),
|
|
127
|
+
supported_accelerators=version_data.get("supported_accelerators"),
|
|
128
|
+
supported_comfyui_version=version_data.get("supported_comfyui_version"),
|
|
129
|
+
supported_os=version_data.get("supported_os"),
|
|
130
|
+
)
|
|
131
|
+
versions[version_id] = version
|
|
132
|
+
|
|
133
|
+
packages[pkg_id] = GlobalNodePackage(
|
|
134
|
+
id=pkg_id,
|
|
135
|
+
display_name=pkg_data.get("display_name"),
|
|
136
|
+
author=pkg_data.get("author"),
|
|
137
|
+
description=pkg_data.get("description"),
|
|
138
|
+
repository=pkg_data.get("repository"),
|
|
139
|
+
downloads=pkg_data.get("downloads"),
|
|
140
|
+
github_stars=pkg_data.get("github_stars"),
|
|
141
|
+
rating=pkg_data.get("rating"),
|
|
142
|
+
license=pkg_data.get("license"),
|
|
143
|
+
category=pkg_data.get("category"),
|
|
144
|
+
icon=pkg_data.get("icon"),
|
|
145
|
+
tags=pkg_data.get("tags"),
|
|
146
|
+
status=pkg_data.get("status"),
|
|
147
|
+
created_at=pkg_data.get("created_at"),
|
|
148
|
+
versions=versions,
|
|
149
|
+
source=pkg_data.get("source"),
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
global_mappings = GlobalNodeMappings(
|
|
153
|
+
version=data.get("version", "unknown"),
|
|
154
|
+
generated_at=data.get("generated_at", ""),
|
|
155
|
+
stats=stats,
|
|
156
|
+
mappings=mappings,
|
|
157
|
+
packages=packages,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
if stats:
|
|
161
|
+
logger.info(
|
|
162
|
+
f"Loaded global mappings: {stats.signatures} signatures "
|
|
163
|
+
f"from {stats.packages} packages"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
return global_mappings
|
|
167
|
+
|
|
168
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
169
|
+
logger.error(f"Failed to load global mappings: {e}")
|
|
170
|
+
raise
|
|
171
|
+
|
|
172
|
+
def _build_github_to_registry_map(self, global_mappings: GlobalNodeMappings) -> dict[str, GlobalNodePackage]:
|
|
173
|
+
"""Build reverse mapping from GitHub URLs to registry packages.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
global_mappings: Loaded mappings data
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
Dict mapping normalized GitHub URLs to packages
|
|
180
|
+
"""
|
|
181
|
+
github_to_registry = {}
|
|
182
|
+
|
|
183
|
+
for _, package in global_mappings.packages.items():
|
|
184
|
+
if package.repository:
|
|
185
|
+
normalized_url = normalize_github_url(package.repository)
|
|
186
|
+
if normalized_url:
|
|
187
|
+
github_to_registry[normalized_url] = package
|
|
188
|
+
|
|
189
|
+
logger.debug(f"Built GitHub to registry map with {len(github_to_registry)} entries")
|
|
190
|
+
return github_to_registry
|
|
191
|
+
|
|
192
|
+
# Query Methods
|
|
193
|
+
|
|
194
|
+
def get_package(self, package_id: str) -> GlobalNodePackage | None:
|
|
195
|
+
"""Get package by ID.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
package_id: Package identifier
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
GlobalNodePackage or None if not found
|
|
202
|
+
"""
|
|
203
|
+
return self.global_mappings.packages.get(package_id)
|
|
204
|
+
|
|
205
|
+
def get_mapping(self, node_key: str) -> GlobalNodeMapping | None:
|
|
206
|
+
"""Get mapping by node key (e.g., "NodeType::input_hash").
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
node_key: Node mapping key
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
GlobalNodeMapping or None if not found
|
|
213
|
+
"""
|
|
214
|
+
return self.global_mappings.mappings.get(node_key)
|
|
215
|
+
|
|
216
|
+
def get_all_packages(self) -> dict[str, GlobalNodePackage]:
|
|
217
|
+
"""Get all packages.
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
Dict of package_id -> GlobalNodePackage
|
|
221
|
+
"""
|
|
222
|
+
return self.global_mappings.packages
|
|
223
|
+
|
|
224
|
+
def resolve_github_url(self, github_url: str) -> GlobalNodePackage | None:
|
|
225
|
+
"""Resolve GitHub URL to registry package.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
github_url: GitHub repository URL (any format)
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
GlobalNodePackage if URL maps to registry package, None otherwise
|
|
232
|
+
"""
|
|
233
|
+
normalized_url = normalize_github_url(github_url)
|
|
234
|
+
return self.github_to_registry.get(normalized_url)
|
|
235
|
+
|
|
236
|
+
def get_github_url_for_package(self, package_id: str) -> str | None:
|
|
237
|
+
"""Get GitHub URL for a package ID.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
package_id: Package identifier
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
GitHub URL or None if package not found or has no repository
|
|
244
|
+
"""
|
|
245
|
+
package = self.get_package(package_id)
|
|
246
|
+
return package.repository if package else None
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""Repository for workflow file operations."""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from ..logging.logging_config import get_logger
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from ..models.workflow import Workflow
|
|
12
|
+
|
|
13
|
+
logger = get_logger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class WorkflowRepository:
|
|
17
|
+
"""Repository for workflow file operations."""
|
|
18
|
+
|
|
19
|
+
@staticmethod
|
|
20
|
+
def load(path: Path) -> Workflow:
|
|
21
|
+
"""Load workflow from file."""
|
|
22
|
+
from ..models.workflow import Workflow
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
with open(path, encoding='utf-8') as f:
|
|
26
|
+
data = json.load(f)
|
|
27
|
+
return Workflow.from_json(data)
|
|
28
|
+
except (json.JSONDecodeError, FileNotFoundError, UnicodeDecodeError) as e:
|
|
29
|
+
raise ValueError(f"Failed to load workflow {path}: {e}") from e
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def load_raw_text(path: Path) -> str:
|
|
33
|
+
"""Load raw workflow text for string matching."""
|
|
34
|
+
try:
|
|
35
|
+
with open(path, encoding='utf-8') as f:
|
|
36
|
+
return f.read()
|
|
37
|
+
except (FileNotFoundError, UnicodeDecodeError) as e:
|
|
38
|
+
raise ValueError(f"Failed to load workflow text {path}: {e}") from e
|
|
39
|
+
|
|
40
|
+
@staticmethod
|
|
41
|
+
def load_raw_json(path: Path) -> dict:
|
|
42
|
+
"""Load raw workflow JSON."""
|
|
43
|
+
try:
|
|
44
|
+
with open(path, encoding='utf-8') as f:
|
|
45
|
+
return json.load(f)
|
|
46
|
+
except (json.JSONDecodeError, FileNotFoundError, UnicodeDecodeError) as e:
|
|
47
|
+
raise ValueError(f"Failed to load workflow JSON {path}: {e}") from e
|
|
48
|
+
|
|
49
|
+
@staticmethod
|
|
50
|
+
def save(workflow: Workflow, path: Path) -> None:
|
|
51
|
+
"""Save workflow to file."""
|
|
52
|
+
try:
|
|
53
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
54
|
+
with open(path, 'w', encoding='utf-8') as f:
|
|
55
|
+
json.dump(workflow.to_json(), f, indent=2)
|
|
56
|
+
except (OSError, UnicodeEncodeError) as e:
|
|
57
|
+
raise ValueError(f"Failed to save workflow {path}: {e}") from e
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from functools import cached_property
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
|
|
7
|
+
from ..models.workspace_config import WorkspaceConfig, ModelDirectory, APICredentials
|
|
8
|
+
from comfygit_core.models.exceptions import ComfyDockError
|
|
9
|
+
from ..logging.logging_config import get_logger
|
|
10
|
+
|
|
11
|
+
logger = get_logger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class WorkspaceConfigRepository:
|
|
15
|
+
|
|
16
|
+
def __init__(self, config_file: Path):
|
|
17
|
+
self.config_file_path = config_file
|
|
18
|
+
|
|
19
|
+
@cached_property
|
|
20
|
+
def config_file(self) -> WorkspaceConfig:
|
|
21
|
+
data = self.load()
|
|
22
|
+
if data is None:
|
|
23
|
+
raise ComfyDockError("No workspace config found")
|
|
24
|
+
return data
|
|
25
|
+
|
|
26
|
+
def load(self) -> WorkspaceConfig:
|
|
27
|
+
result = None
|
|
28
|
+
try:
|
|
29
|
+
with self.config_file_path.open("r") as f:
|
|
30
|
+
result = WorkspaceConfig.from_dict(json.load(f))
|
|
31
|
+
except Exception as e:
|
|
32
|
+
logger.warning(f"Failed to load workspace config: {e}")
|
|
33
|
+
|
|
34
|
+
logger.debug(f"Loaded workspace config: {result}")
|
|
35
|
+
|
|
36
|
+
if result is None:
|
|
37
|
+
logger.info("No workspace config found, creating a new one")
|
|
38
|
+
result = WorkspaceConfig(
|
|
39
|
+
version=1,
|
|
40
|
+
active_environment="",
|
|
41
|
+
created_at=str(datetime.now().isoformat()),
|
|
42
|
+
global_model_directory=None
|
|
43
|
+
)
|
|
44
|
+
self.save(result)
|
|
45
|
+
return result
|
|
46
|
+
|
|
47
|
+
def save(self, data: WorkspaceConfig):
|
|
48
|
+
# First serialize to JSON
|
|
49
|
+
with self.config_file_path.open("w") as f:
|
|
50
|
+
data_dict = WorkspaceConfig.to_dict(data)
|
|
51
|
+
json.dump(data_dict, f, indent=2)
|
|
52
|
+
|
|
53
|
+
def set_models_directory(self, path: Path):
|
|
54
|
+
logger.info(f"Setting models directory to {path}")
|
|
55
|
+
data = self.config_file
|
|
56
|
+
logger.debug(f"Loaded data: {data}")
|
|
57
|
+
model_dir = ModelDirectory(
|
|
58
|
+
path=str(path),
|
|
59
|
+
added_at=str(datetime.now().isoformat()),
|
|
60
|
+
last_sync=str(datetime.now().isoformat()),
|
|
61
|
+
)
|
|
62
|
+
data.global_model_directory = model_dir
|
|
63
|
+
logger.debug(f"Updated data: {data}, saving...")
|
|
64
|
+
self.save(data)
|
|
65
|
+
logger.info(f"Models directory set to {path}")
|
|
66
|
+
|
|
67
|
+
def get_models_directory(self) -> Path:
|
|
68
|
+
"""Get path to tracked model directory."""
|
|
69
|
+
data = self.config_file
|
|
70
|
+
if data.global_model_directory is None:
|
|
71
|
+
raise ComfyDockError("No models directory set")
|
|
72
|
+
return Path(data.global_model_directory.path)
|
|
73
|
+
|
|
74
|
+
def update_models_sync_time(self):
|
|
75
|
+
data = self.config_file
|
|
76
|
+
if data.global_model_directory is None:
|
|
77
|
+
raise ComfyDockError("No models directory set")
|
|
78
|
+
data.global_model_directory.last_sync = str(datetime.now().isoformat())
|
|
79
|
+
self.save(data)
|
|
80
|
+
|
|
81
|
+
def set_civitai_token(self, token: str | None):
|
|
82
|
+
"""Set or clear CivitAI API token."""
|
|
83
|
+
data = self.config_file
|
|
84
|
+
if token:
|
|
85
|
+
if not data.api_credentials:
|
|
86
|
+
data.api_credentials = APICredentials(civitai_token=token)
|
|
87
|
+
else:
|
|
88
|
+
data.api_credentials.civitai_token = token
|
|
89
|
+
logger.info("CivitAI API token configured")
|
|
90
|
+
else:
|
|
91
|
+
if data.api_credentials:
|
|
92
|
+
data.api_credentials.civitai_token = None
|
|
93
|
+
logger.info("CivitAI API token cleared")
|
|
94
|
+
self.save(data)
|
|
95
|
+
|
|
96
|
+
def get_civitai_token(self) -> str | None:
|
|
97
|
+
"""Get CivitAI API token from config or environment."""
|
|
98
|
+
# Priority: environment variable > config file
|
|
99
|
+
env_token = os.environ.get("CIVITAI_API_TOKEN")
|
|
100
|
+
if env_token:
|
|
101
|
+
logger.debug("Using CivitAI token from environment")
|
|
102
|
+
return env_token
|
|
103
|
+
|
|
104
|
+
data = self.config_file
|
|
105
|
+
if data.api_credentials and data.api_credentials.civitai_token:
|
|
106
|
+
logger.debug("Using CivitAI token from config")
|
|
107
|
+
return data.api_credentials.civitai_token
|
|
108
|
+
|
|
109
|
+
return None
|
|
110
|
+
|
|
111
|
+
def get_prefer_registry_cache(self) -> bool:
|
|
112
|
+
"""Get prefer_registry_cache setting (defaults to True)."""
|
|
113
|
+
data = self.config_file
|
|
114
|
+
return data.prefer_registry_cache
|
|
115
|
+
|
|
116
|
+
def set_prefer_registry_cache(self, enabled: bool):
|
|
117
|
+
"""Set prefer_registry_cache setting."""
|
|
118
|
+
data = self.config_file
|
|
119
|
+
data.prefer_registry_cache = enabled
|
|
120
|
+
self.save(data)
|
|
121
|
+
logger.info(f"Registry cache preference set to: {enabled}")
|