comfygit-core 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- comfygit_core/analyzers/custom_node_scanner.py +109 -0
- comfygit_core/analyzers/git_change_parser.py +156 -0
- comfygit_core/analyzers/model_scanner.py +318 -0
- comfygit_core/analyzers/node_classifier.py +58 -0
- comfygit_core/analyzers/node_git_analyzer.py +77 -0
- comfygit_core/analyzers/status_scanner.py +362 -0
- comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
- comfygit_core/caching/__init__.py +16 -0
- comfygit_core/caching/api_cache.py +210 -0
- comfygit_core/caching/base.py +212 -0
- comfygit_core/caching/comfyui_cache.py +100 -0
- comfygit_core/caching/custom_node_cache.py +320 -0
- comfygit_core/caching/workflow_cache.py +797 -0
- comfygit_core/clients/__init__.py +4 -0
- comfygit_core/clients/civitai_client.py +412 -0
- comfygit_core/clients/github_client.py +349 -0
- comfygit_core/clients/registry_client.py +230 -0
- comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
- comfygit_core/configs/comfyui_models.py +62 -0
- comfygit_core/configs/model_config.py +151 -0
- comfygit_core/constants.py +82 -0
- comfygit_core/core/environment.py +1635 -0
- comfygit_core/core/workspace.py +898 -0
- comfygit_core/factories/environment_factory.py +419 -0
- comfygit_core/factories/uv_factory.py +61 -0
- comfygit_core/factories/workspace_factory.py +109 -0
- comfygit_core/infrastructure/sqlite_manager.py +156 -0
- comfygit_core/integrations/__init__.py +7 -0
- comfygit_core/integrations/uv_command.py +318 -0
- comfygit_core/logging/logging_config.py +15 -0
- comfygit_core/managers/environment_git_orchestrator.py +316 -0
- comfygit_core/managers/environment_model_manager.py +296 -0
- comfygit_core/managers/export_import_manager.py +116 -0
- comfygit_core/managers/git_manager.py +667 -0
- comfygit_core/managers/model_download_manager.py +252 -0
- comfygit_core/managers/model_symlink_manager.py +166 -0
- comfygit_core/managers/node_manager.py +1378 -0
- comfygit_core/managers/pyproject_manager.py +1321 -0
- comfygit_core/managers/user_content_symlink_manager.py +436 -0
- comfygit_core/managers/uv_project_manager.py +569 -0
- comfygit_core/managers/workflow_manager.py +1944 -0
- comfygit_core/models/civitai.py +432 -0
- comfygit_core/models/commit.py +18 -0
- comfygit_core/models/environment.py +293 -0
- comfygit_core/models/exceptions.py +378 -0
- comfygit_core/models/manifest.py +132 -0
- comfygit_core/models/node_mapping.py +201 -0
- comfygit_core/models/protocols.py +248 -0
- comfygit_core/models/registry.py +63 -0
- comfygit_core/models/shared.py +356 -0
- comfygit_core/models/sync.py +42 -0
- comfygit_core/models/system.py +204 -0
- comfygit_core/models/workflow.py +914 -0
- comfygit_core/models/workspace_config.py +71 -0
- comfygit_core/py.typed +0 -0
- comfygit_core/repositories/migrate_paths.py +49 -0
- comfygit_core/repositories/model_repository.py +958 -0
- comfygit_core/repositories/node_mappings_repository.py +246 -0
- comfygit_core/repositories/workflow_repository.py +57 -0
- comfygit_core/repositories/workspace_config_repository.py +121 -0
- comfygit_core/resolvers/global_node_resolver.py +459 -0
- comfygit_core/resolvers/model_resolver.py +250 -0
- comfygit_core/services/import_analyzer.py +218 -0
- comfygit_core/services/model_downloader.py +422 -0
- comfygit_core/services/node_lookup_service.py +251 -0
- comfygit_core/services/registry_data_manager.py +161 -0
- comfygit_core/strategies/__init__.py +4 -0
- comfygit_core/strategies/auto.py +72 -0
- comfygit_core/strategies/confirmation.py +69 -0
- comfygit_core/utils/comfyui_ops.py +125 -0
- comfygit_core/utils/common.py +164 -0
- comfygit_core/utils/conflict_parser.py +232 -0
- comfygit_core/utils/dependency_parser.py +231 -0
- comfygit_core/utils/download.py +216 -0
- comfygit_core/utils/environment_cleanup.py +111 -0
- comfygit_core/utils/filesystem.py +178 -0
- comfygit_core/utils/git.py +1184 -0
- comfygit_core/utils/input_signature.py +145 -0
- comfygit_core/utils/model_categories.py +52 -0
- comfygit_core/utils/pytorch.py +71 -0
- comfygit_core/utils/requirements.py +211 -0
- comfygit_core/utils/retry.py +242 -0
- comfygit_core/utils/symlink_utils.py +119 -0
- comfygit_core/utils/system_detector.py +258 -0
- comfygit_core/utils/uuid.py +28 -0
- comfygit_core/utils/uv_error_handler.py +158 -0
- comfygit_core/utils/version.py +73 -0
- comfygit_core/utils/workflow_hash.py +90 -0
- comfygit_core/validation/resolution_tester.py +297 -0
- comfygit_core-0.2.0.dist-info/METADATA +939 -0
- comfygit_core-0.2.0.dist-info/RECORD +93 -0
- comfygit_core-0.2.0.dist-info/WHEEL +4 -0
- comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""Unified cache manager for API responses with expiration support."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from ..logging.logging_config import get_logger
|
|
10
|
+
from ..models.exceptions import ComfyDockError
|
|
11
|
+
|
|
12
|
+
logger = get_logger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class APICacheManager:
|
|
16
|
+
"""Manages persistent caching of API responses with expiration."""
|
|
17
|
+
|
|
18
|
+
def __init__(self, cache_name: str = "api",
|
|
19
|
+
default_ttl_hours: int = 24,
|
|
20
|
+
cache_base_path: Path | None = None):
|
|
21
|
+
"""Initialize cache manager.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
cache_name: Name of the cache subdirectory
|
|
25
|
+
default_ttl_hours: Default time-to-live in hours for cache entries
|
|
26
|
+
cache_base_path: Required cache base path (workspace cache directory)
|
|
27
|
+
|
|
28
|
+
Raises:
|
|
29
|
+
ValueError: If cache_base_path is None
|
|
30
|
+
"""
|
|
31
|
+
if cache_base_path is None:
|
|
32
|
+
raise ValueError(
|
|
33
|
+
"cache_base_path is required. All caches must be workspace-relative."
|
|
34
|
+
)
|
|
35
|
+
self.cache_name = cache_name
|
|
36
|
+
self.default_ttl_seconds = default_ttl_hours * 3600
|
|
37
|
+
self.cache_dir = cache_base_path / cache_name
|
|
38
|
+
self._ensure_cache_directory()
|
|
39
|
+
|
|
40
|
+
logger.debug(f"Initialized API cache at: {self.cache_dir}")
|
|
41
|
+
|
|
42
|
+
def _ensure_cache_directory(self):
|
|
43
|
+
"""Ensure cache directory exists.
|
|
44
|
+
|
|
45
|
+
Raises:
|
|
46
|
+
ComfyDockError: If cache directory creation fails
|
|
47
|
+
"""
|
|
48
|
+
try:
|
|
49
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
logger.debug(f"Cache directory: {self.cache_dir}")
|
|
51
|
+
except Exception as e:
|
|
52
|
+
raise ComfyDockError(
|
|
53
|
+
f"Failed to create cache directory {self.cache_dir}. "
|
|
54
|
+
f"Workspace cache should exist before cache initialization: {e}"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
def _get_cache_file_path(self, cache_type: str) -> Path:
|
|
58
|
+
"""Get path for a specific cache file."""
|
|
59
|
+
return self.cache_dir / f"{cache_type}_cache.json"
|
|
60
|
+
|
|
61
|
+
def _sanitize_key(self, key: str) -> str:
|
|
62
|
+
"""Sanitize cache key to be filesystem-safe."""
|
|
63
|
+
# Create a hash of the key to avoid filesystem issues
|
|
64
|
+
key_hash = hashlib.md5(key.encode()).hexdigest()[:8]
|
|
65
|
+
# Keep some readable part of the key
|
|
66
|
+
safe_key = "".join(c if c.isalnum() or c in '-_' else '_' for c in key)
|
|
67
|
+
return f"{safe_key[:50]}_{key_hash}"
|
|
68
|
+
|
|
69
|
+
def get(self, cache_type: str, key: str, ttl_seconds: int | None = None) -> Any | None:
|
|
70
|
+
"""Get a value from cache if it exists and hasn't expired.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
cache_type: Type of cache (e.g., 'github', 'registry')
|
|
74
|
+
key: Cache key
|
|
75
|
+
ttl_seconds: Time-to-live in seconds (overrides default)
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Cached value if valid, None if expired or not found
|
|
79
|
+
"""
|
|
80
|
+
ttl = ttl_seconds if ttl_seconds is not None else self.default_ttl_seconds
|
|
81
|
+
cache_file = self._get_cache_file_path(cache_type)
|
|
82
|
+
|
|
83
|
+
if not cache_file.exists():
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
logger.debug(f"Reading cache for {cache_type}:{key}")
|
|
88
|
+
with open(cache_file, encoding='utf-8') as f:
|
|
89
|
+
cache_data = json.load(f)
|
|
90
|
+
|
|
91
|
+
logger.debug(f"Found {len(cache_data)} entries in cache for {cache_type}")
|
|
92
|
+
sanitized_key = self._sanitize_key(key)
|
|
93
|
+
if sanitized_key not in cache_data:
|
|
94
|
+
logger.debug(f"Cache miss for {cache_type}:{key}")
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
entry = cache_data[sanitized_key]
|
|
98
|
+
timestamp = entry.get('timestamp', 0)
|
|
99
|
+
|
|
100
|
+
logger.debug(f"Cache timestamp for {cache_type}:{key}: {timestamp}")
|
|
101
|
+
|
|
102
|
+
# Check if entry has expired
|
|
103
|
+
if time.time() - timestamp > ttl:
|
|
104
|
+
logger.debug(f"Cache expired for {cache_type}:{key}")
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
logger.debug(f"Cache hit for {cache_type}:{key}")
|
|
108
|
+
return entry.get('data')
|
|
109
|
+
|
|
110
|
+
except Exception as e:
|
|
111
|
+
logger.warning(f"Error reading cache for {cache_type}: {e}")
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
def set(self, cache_type: str, key: str, value: Any) -> bool:
|
|
115
|
+
"""Store a value in cache with current timestamp.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
cache_type: Type of cache (e.g., 'github', 'registry')
|
|
119
|
+
key: Cache key
|
|
120
|
+
value: Value to cache
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
True if successfully cached, False otherwise
|
|
124
|
+
"""
|
|
125
|
+
cache_file = self._get_cache_file_path(cache_type)
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
# Load existing cache
|
|
129
|
+
if cache_file.exists():
|
|
130
|
+
with open(cache_file, encoding='utf-8') as f:
|
|
131
|
+
cache_data = json.load(f)
|
|
132
|
+
else:
|
|
133
|
+
cache_data = {}
|
|
134
|
+
|
|
135
|
+
# Add new entry
|
|
136
|
+
sanitized_key = self._sanitize_key(key)
|
|
137
|
+
cache_data[sanitized_key] = {
|
|
138
|
+
'timestamp': time.time(),
|
|
139
|
+
'data': value,
|
|
140
|
+
'original_key': key # Store original key for debugging
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Write updated cache
|
|
144
|
+
with open(cache_file, 'w', encoding='utf-8') as f:
|
|
145
|
+
json.dump(cache_data, f, indent=2)
|
|
146
|
+
|
|
147
|
+
logger.debug(f"Cached {cache_type}:{key}")
|
|
148
|
+
return True
|
|
149
|
+
|
|
150
|
+
except Exception as e:
|
|
151
|
+
logger.warning(f"Error writing cache for {cache_type}: {e}")
|
|
152
|
+
return False
|
|
153
|
+
|
|
154
|
+
def clear(self, cache_type: str | None = None):
|
|
155
|
+
"""Clear cache entries.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
cache_type: Specific cache type to clear, or None to clear all
|
|
159
|
+
"""
|
|
160
|
+
try:
|
|
161
|
+
if cache_type:
|
|
162
|
+
cache_file = self._get_cache_file_path(cache_type)
|
|
163
|
+
if cache_file.exists():
|
|
164
|
+
cache_file.unlink()
|
|
165
|
+
logger.info(f"Cleared {cache_type} cache")
|
|
166
|
+
else:
|
|
167
|
+
# Clear all cache files
|
|
168
|
+
for cache_file in self.cache_dir.glob("*_cache.json"):
|
|
169
|
+
cache_file.unlink()
|
|
170
|
+
logger.info("Cleared all caches")
|
|
171
|
+
except Exception as e:
|
|
172
|
+
logger.warning(f"Error clearing cache: {e}")
|
|
173
|
+
|
|
174
|
+
def cleanup_expired(self, cache_type: str | None = None):
|
|
175
|
+
"""Remove expired entries from cache.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
cache_type: Specific cache type to clean, or None to clean all
|
|
179
|
+
"""
|
|
180
|
+
cache_types = [cache_type] if cache_type else ['github', 'registry']
|
|
181
|
+
|
|
182
|
+
for ct in cache_types:
|
|
183
|
+
cache_file = self._get_cache_file_path(ct)
|
|
184
|
+
if not cache_file.exists():
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
try:
|
|
188
|
+
with open(cache_file, encoding='utf-8') as f:
|
|
189
|
+
cache_data = json.load(f)
|
|
190
|
+
|
|
191
|
+
# Filter out expired entries
|
|
192
|
+
current_time = time.time()
|
|
193
|
+
cleaned_data = {}
|
|
194
|
+
expired_count = 0
|
|
195
|
+
|
|
196
|
+
for key, entry in cache_data.items():
|
|
197
|
+
timestamp = entry.get('timestamp', 0)
|
|
198
|
+
if current_time - timestamp <= self.default_ttl_seconds:
|
|
199
|
+
cleaned_data[key] = entry
|
|
200
|
+
else:
|
|
201
|
+
expired_count += 1
|
|
202
|
+
|
|
203
|
+
if expired_count > 0:
|
|
204
|
+
with open(cache_file, 'w', encoding='utf-8') as f:
|
|
205
|
+
json.dump(cleaned_data, f, indent=2)
|
|
206
|
+
logger.info(f"Removed {expired_count} expired entries from {ct} cache")
|
|
207
|
+
|
|
208
|
+
except Exception as e:
|
|
209
|
+
logger.warning(f"Error cleaning up {ct} cache: {e}")
|
|
210
|
+
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
"""Base classes for caching infrastructure.
|
|
2
|
+
|
|
3
|
+
Provides workspace-relative cache path management and content caching infrastructure
|
|
4
|
+
that can be extended for specific cache types (ComfyUI, custom nodes, models, etc).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import hashlib
|
|
8
|
+
import json
|
|
9
|
+
import shutil
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
from ..logging.logging_config import get_logger
|
|
14
|
+
from ..models.exceptions import ComfyDockError
|
|
15
|
+
|
|
16
|
+
logger = get_logger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CacheBase:
|
|
20
|
+
"""Minimal base providing workspace-relative cache management.
|
|
21
|
+
|
|
22
|
+
All caches must be workspace-relative. No platform-specific defaults
|
|
23
|
+
or environment variable overrides.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, cache_name: str = "comfygit",
|
|
27
|
+
cache_base_path: Path | None = None):
|
|
28
|
+
"""Initialize cache base.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
cache_name: Name of the cache subdirectory
|
|
32
|
+
cache_base_path: Required cache base path (workspace cache directory)
|
|
33
|
+
|
|
34
|
+
Raises:
|
|
35
|
+
ValueError: If cache_base_path is None
|
|
36
|
+
"""
|
|
37
|
+
if cache_base_path is None:
|
|
38
|
+
raise ValueError(
|
|
39
|
+
"cache_base_path is required. All caches must be workspace-relative."
|
|
40
|
+
)
|
|
41
|
+
self.cache_name = cache_name
|
|
42
|
+
self.cache_base = cache_base_path
|
|
43
|
+
|
|
44
|
+
def _ensure_cache_dirs(self, *subdirs: str):
|
|
45
|
+
"""Ensure cache subdirectories exist.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
*subdirs: Subdirectory paths to create under cache_base
|
|
49
|
+
|
|
50
|
+
Raises:
|
|
51
|
+
ComfyDockError: If cache directory creation fails
|
|
52
|
+
"""
|
|
53
|
+
try:
|
|
54
|
+
for subdir in subdirs:
|
|
55
|
+
(self.cache_base / subdir).mkdir(parents=True, exist_ok=True)
|
|
56
|
+
except Exception as e:
|
|
57
|
+
raise ComfyDockError(
|
|
58
|
+
f"Failed to create cache directory under {self.cache_base}. "
|
|
59
|
+
f"Workspace cache should exist before cache initialization: {e}"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class ContentCacheBase(CacheBase):
|
|
64
|
+
"""Base for content-based caching (files, directories, large data).
|
|
65
|
+
|
|
66
|
+
Provides infrastructure for caching content with:
|
|
67
|
+
- Directory-based storage (cache_key/content/)
|
|
68
|
+
- Metadata tracking (size, hash, timestamps)
|
|
69
|
+
- Index file for fast lookup
|
|
70
|
+
- Content hashing for integrity
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
def __init__(self, content_type: str, cache_base_path: Path | None = None):
|
|
74
|
+
"""Initialize content cache.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
content_type: Type of content being cached (e.g., "comfyui", "custom_nodes")
|
|
78
|
+
cache_base_path: Override cache base path (for testing)
|
|
79
|
+
"""
|
|
80
|
+
super().__init__("comfygit", cache_base_path)
|
|
81
|
+
self.content_type = content_type
|
|
82
|
+
self.cache_dir = self.cache_base / content_type
|
|
83
|
+
self.store_dir = self.cache_dir / "store"
|
|
84
|
+
self.index_file = self.cache_dir / "index.json"
|
|
85
|
+
|
|
86
|
+
# Ensure directories exist
|
|
87
|
+
self._ensure_cache_dirs(content_type, f"{content_type}/store")
|
|
88
|
+
|
|
89
|
+
# Load index
|
|
90
|
+
self.index = self._load_index()
|
|
91
|
+
|
|
92
|
+
def _load_index(self) -> dict:
|
|
93
|
+
"""Load cache index from disk.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
Index dictionary mapping cache keys to metadata
|
|
97
|
+
"""
|
|
98
|
+
if not self.index_file.exists():
|
|
99
|
+
return {}
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
with open(self.index_file, encoding='utf-8') as f:
|
|
103
|
+
data = json.load(f)
|
|
104
|
+
return data.get("items", {})
|
|
105
|
+
except Exception as e:
|
|
106
|
+
logger.error(f"Failed to load cache index: {e}")
|
|
107
|
+
return {}
|
|
108
|
+
|
|
109
|
+
def _save_index(self):
|
|
110
|
+
"""Save cache index to disk atomically."""
|
|
111
|
+
try:
|
|
112
|
+
data = {
|
|
113
|
+
"version": "1.0",
|
|
114
|
+
"content_type": self.content_type,
|
|
115
|
+
"updated_at": datetime.now(timezone.utc).isoformat(),
|
|
116
|
+
"items": self.index
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
# Atomic write: temp file then replace
|
|
120
|
+
temp_file = self.index_file.with_suffix(".tmp")
|
|
121
|
+
with open(temp_file, "w", encoding='utf-8') as f:
|
|
122
|
+
json.dump(data, f, indent=2)
|
|
123
|
+
temp_file.replace(self.index_file)
|
|
124
|
+
|
|
125
|
+
except Exception as e:
|
|
126
|
+
logger.error(f"Failed to save cache index: {e}")
|
|
127
|
+
|
|
128
|
+
def _calculate_content_hash(self, content_dir: Path) -> str:
|
|
129
|
+
"""Calculate SHA256 hash of directory content for integrity checking.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
content_dir: Directory to hash
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
SHA256 hexdigest of all files in directory
|
|
136
|
+
"""
|
|
137
|
+
hasher = hashlib.sha256()
|
|
138
|
+
|
|
139
|
+
# Sort files for deterministic hashing
|
|
140
|
+
for file_path in sorted(content_dir.rglob("*")):
|
|
141
|
+
if file_path.is_file():
|
|
142
|
+
# Include relative path in hash
|
|
143
|
+
rel_path = file_path.relative_to(content_dir)
|
|
144
|
+
hasher.update(str(rel_path).encode())
|
|
145
|
+
|
|
146
|
+
# Include file content
|
|
147
|
+
with open(file_path, "rb") as f:
|
|
148
|
+
for chunk in iter(lambda: f.read(65536), b""):
|
|
149
|
+
hasher.update(chunk)
|
|
150
|
+
|
|
151
|
+
return hasher.hexdigest()
|
|
152
|
+
|
|
153
|
+
def cache_content(self, cache_key: str, source_path: Path,
|
|
154
|
+
metadata: dict | None = None) -> Path:
|
|
155
|
+
"""Cache content from source directory.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
cache_key: Unique cache key
|
|
159
|
+
source_path: Path to source content directory
|
|
160
|
+
metadata: Optional additional metadata to store
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Path to cached content directory
|
|
164
|
+
"""
|
|
165
|
+
cache_dir = self.store_dir / cache_key
|
|
166
|
+
content_dir = cache_dir / "content"
|
|
167
|
+
|
|
168
|
+
# Clean up existing cache entry
|
|
169
|
+
if cache_dir.exists():
|
|
170
|
+
shutil.rmtree(cache_dir)
|
|
171
|
+
|
|
172
|
+
cache_dir.mkdir(parents=True)
|
|
173
|
+
|
|
174
|
+
# Copy content
|
|
175
|
+
shutil.copytree(source_path, content_dir)
|
|
176
|
+
|
|
177
|
+
# Calculate metadata
|
|
178
|
+
size_bytes = sum(
|
|
179
|
+
f.stat().st_size for f in content_dir.rglob("*") if f.is_file()
|
|
180
|
+
)
|
|
181
|
+
content_hash = self._calculate_content_hash(content_dir)
|
|
182
|
+
|
|
183
|
+
# Store metadata
|
|
184
|
+
full_metadata = {
|
|
185
|
+
"cache_key": cache_key,
|
|
186
|
+
"cached_at": datetime.now(timezone.utc).isoformat(),
|
|
187
|
+
"size_bytes": size_bytes,
|
|
188
|
+
"content_hash": content_hash,
|
|
189
|
+
**(metadata or {})
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
with open(cache_dir / "metadata.json", "w", encoding='utf-8') as f:
|
|
193
|
+
json.dump(full_metadata, f, indent=2)
|
|
194
|
+
|
|
195
|
+
# Update index
|
|
196
|
+
self.index[cache_key] = full_metadata
|
|
197
|
+
self._save_index()
|
|
198
|
+
|
|
199
|
+
logger.debug(f"Cached {self.content_type} with key: {cache_key}")
|
|
200
|
+
return content_dir
|
|
201
|
+
|
|
202
|
+
def get_cached_path(self, cache_key: str) -> Path | None:
|
|
203
|
+
"""Get path to cached content if it exists.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
cache_key: Cache key to look up
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
Path to content directory, or None if not cached
|
|
210
|
+
"""
|
|
211
|
+
content_path = self.store_dir / cache_key / "content"
|
|
212
|
+
return content_path if content_path.exists() else None
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""ComfyUI version cache manager.
|
|
2
|
+
|
|
3
|
+
Caches ComfyUI installations by version to avoid re-downloading and re-cloning.
|
|
4
|
+
Supports releases, commits, and branches.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from .base import ContentCacheBase
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class ComfyUISpec:
|
|
15
|
+
"""Specification for a ComfyUI version."""
|
|
16
|
+
version: str # "v0.3.20", "abc123", "main"
|
|
17
|
+
version_type: str # "release", "commit", "branch"
|
|
18
|
+
commit_sha: str | None = None # Actual commit SHA (for branches)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ComfyUICacheManager(ContentCacheBase):
|
|
22
|
+
"""Cache manager for ComfyUI versions.
|
|
23
|
+
|
|
24
|
+
Caches ComfyUI installations by version, including the .git directory
|
|
25
|
+
for faster cloning and git operations.
|
|
26
|
+
|
|
27
|
+
Cache structure:
|
|
28
|
+
store/
|
|
29
|
+
release_v0.3.20/
|
|
30
|
+
content/ # Full ComfyUI directory with .git
|
|
31
|
+
metadata.json # version, type, commit_sha, size, hash
|
|
32
|
+
commit_abc123/
|
|
33
|
+
content/
|
|
34
|
+
metadata.json
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, cache_base_path: Path | None = None):
|
|
38
|
+
"""Initialize ComfyUI cache manager.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
cache_base_path: Override cache base path (for testing)
|
|
42
|
+
"""
|
|
43
|
+
super().__init__("comfyui", cache_base_path)
|
|
44
|
+
|
|
45
|
+
def generate_cache_key(self, spec: ComfyUISpec | str) -> str:
|
|
46
|
+
"""Generate cache key from version specification.
|
|
47
|
+
|
|
48
|
+
For releases: "release_v0.3.20"
|
|
49
|
+
For commits: "commit_abc123"
|
|
50
|
+
For branches: Use commit SHA for exact caching (branches can change)
|
|
51
|
+
For simple strings: "version_{version}"
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
spec: ComfyUISpec or simple version string
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Cache key string
|
|
58
|
+
"""
|
|
59
|
+
if isinstance(spec, str):
|
|
60
|
+
# Simple string version
|
|
61
|
+
return f"version_{spec}"
|
|
62
|
+
|
|
63
|
+
# Use commit SHA for branches (they can change)
|
|
64
|
+
if spec.version_type == "branch" and spec.commit_sha:
|
|
65
|
+
return f"commit_{spec.commit_sha}"
|
|
66
|
+
|
|
67
|
+
# For releases and commits, use the version
|
|
68
|
+
return f"{spec.version_type}_{spec.version}"
|
|
69
|
+
|
|
70
|
+
def cache_comfyui(self, spec: ComfyUISpec, source_path: Path) -> Path:
|
|
71
|
+
"""Cache a ComfyUI installation.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
spec: ComfyUI version specification
|
|
75
|
+
source_path: Path to ComfyUI installation (with .git)
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Path to cached content
|
|
79
|
+
"""
|
|
80
|
+
cache_key = self.generate_cache_key(spec)
|
|
81
|
+
|
|
82
|
+
metadata = {
|
|
83
|
+
"version": spec.version,
|
|
84
|
+
"version_type": spec.version_type,
|
|
85
|
+
"commit_sha": spec.commit_sha
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
return self.cache_content(cache_key, source_path, metadata)
|
|
89
|
+
|
|
90
|
+
def get_cached_comfyui(self, spec: ComfyUISpec | str) -> Path | None:
|
|
91
|
+
"""Get cached ComfyUI path if it exists.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
spec: ComfyUISpec or simple version string
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Path to cached ComfyUI content, or None if not cached
|
|
98
|
+
"""
|
|
99
|
+
cache_key = self.generate_cache_key(spec)
|
|
100
|
+
return self.get_cached_path(cache_key)
|