comfygit-core 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. comfygit_core/analyzers/custom_node_scanner.py +109 -0
  2. comfygit_core/analyzers/git_change_parser.py +156 -0
  3. comfygit_core/analyzers/model_scanner.py +318 -0
  4. comfygit_core/analyzers/node_classifier.py +58 -0
  5. comfygit_core/analyzers/node_git_analyzer.py +77 -0
  6. comfygit_core/analyzers/status_scanner.py +362 -0
  7. comfygit_core/analyzers/workflow_dependency_parser.py +143 -0
  8. comfygit_core/caching/__init__.py +16 -0
  9. comfygit_core/caching/api_cache.py +210 -0
  10. comfygit_core/caching/base.py +212 -0
  11. comfygit_core/caching/comfyui_cache.py +100 -0
  12. comfygit_core/caching/custom_node_cache.py +320 -0
  13. comfygit_core/caching/workflow_cache.py +797 -0
  14. comfygit_core/clients/__init__.py +4 -0
  15. comfygit_core/clients/civitai_client.py +412 -0
  16. comfygit_core/clients/github_client.py +349 -0
  17. comfygit_core/clients/registry_client.py +230 -0
  18. comfygit_core/configs/comfyui_builtin_nodes.py +1614 -0
  19. comfygit_core/configs/comfyui_models.py +62 -0
  20. comfygit_core/configs/model_config.py +151 -0
  21. comfygit_core/constants.py +82 -0
  22. comfygit_core/core/environment.py +1635 -0
  23. comfygit_core/core/workspace.py +898 -0
  24. comfygit_core/factories/environment_factory.py +419 -0
  25. comfygit_core/factories/uv_factory.py +61 -0
  26. comfygit_core/factories/workspace_factory.py +109 -0
  27. comfygit_core/infrastructure/sqlite_manager.py +156 -0
  28. comfygit_core/integrations/__init__.py +7 -0
  29. comfygit_core/integrations/uv_command.py +318 -0
  30. comfygit_core/logging/logging_config.py +15 -0
  31. comfygit_core/managers/environment_git_orchestrator.py +316 -0
  32. comfygit_core/managers/environment_model_manager.py +296 -0
  33. comfygit_core/managers/export_import_manager.py +116 -0
  34. comfygit_core/managers/git_manager.py +667 -0
  35. comfygit_core/managers/model_download_manager.py +252 -0
  36. comfygit_core/managers/model_symlink_manager.py +166 -0
  37. comfygit_core/managers/node_manager.py +1378 -0
  38. comfygit_core/managers/pyproject_manager.py +1321 -0
  39. comfygit_core/managers/user_content_symlink_manager.py +436 -0
  40. comfygit_core/managers/uv_project_manager.py +569 -0
  41. comfygit_core/managers/workflow_manager.py +1944 -0
  42. comfygit_core/models/civitai.py +432 -0
  43. comfygit_core/models/commit.py +18 -0
  44. comfygit_core/models/environment.py +293 -0
  45. comfygit_core/models/exceptions.py +378 -0
  46. comfygit_core/models/manifest.py +132 -0
  47. comfygit_core/models/node_mapping.py +201 -0
  48. comfygit_core/models/protocols.py +248 -0
  49. comfygit_core/models/registry.py +63 -0
  50. comfygit_core/models/shared.py +356 -0
  51. comfygit_core/models/sync.py +42 -0
  52. comfygit_core/models/system.py +204 -0
  53. comfygit_core/models/workflow.py +914 -0
  54. comfygit_core/models/workspace_config.py +71 -0
  55. comfygit_core/py.typed +0 -0
  56. comfygit_core/repositories/migrate_paths.py +49 -0
  57. comfygit_core/repositories/model_repository.py +958 -0
  58. comfygit_core/repositories/node_mappings_repository.py +246 -0
  59. comfygit_core/repositories/workflow_repository.py +57 -0
  60. comfygit_core/repositories/workspace_config_repository.py +121 -0
  61. comfygit_core/resolvers/global_node_resolver.py +459 -0
  62. comfygit_core/resolvers/model_resolver.py +250 -0
  63. comfygit_core/services/import_analyzer.py +218 -0
  64. comfygit_core/services/model_downloader.py +422 -0
  65. comfygit_core/services/node_lookup_service.py +251 -0
  66. comfygit_core/services/registry_data_manager.py +161 -0
  67. comfygit_core/strategies/__init__.py +4 -0
  68. comfygit_core/strategies/auto.py +72 -0
  69. comfygit_core/strategies/confirmation.py +69 -0
  70. comfygit_core/utils/comfyui_ops.py +125 -0
  71. comfygit_core/utils/common.py +164 -0
  72. comfygit_core/utils/conflict_parser.py +232 -0
  73. comfygit_core/utils/dependency_parser.py +231 -0
  74. comfygit_core/utils/download.py +216 -0
  75. comfygit_core/utils/environment_cleanup.py +111 -0
  76. comfygit_core/utils/filesystem.py +178 -0
  77. comfygit_core/utils/git.py +1184 -0
  78. comfygit_core/utils/input_signature.py +145 -0
  79. comfygit_core/utils/model_categories.py +52 -0
  80. comfygit_core/utils/pytorch.py +71 -0
  81. comfygit_core/utils/requirements.py +211 -0
  82. comfygit_core/utils/retry.py +242 -0
  83. comfygit_core/utils/symlink_utils.py +119 -0
  84. comfygit_core/utils/system_detector.py +258 -0
  85. comfygit_core/utils/uuid.py +28 -0
  86. comfygit_core/utils/uv_error_handler.py +158 -0
  87. comfygit_core/utils/version.py +73 -0
  88. comfygit_core/utils/workflow_hash.py +90 -0
  89. comfygit_core/validation/resolution_tester.py +297 -0
  90. comfygit_core-0.2.0.dist-info/METADATA +939 -0
  91. comfygit_core-0.2.0.dist-info/RECORD +93 -0
  92. comfygit_core-0.2.0.dist-info/WHEEL +4 -0
  93. comfygit_core-0.2.0.dist-info/licenses/LICENSE.txt +661 -0
@@ -0,0 +1,797 @@
1
+ """Persistent cache for workflow analysis AND resolution results.
2
+
3
+ Provides SQLite-backed caching with session optimization and smart
4
+ invalidation based on resolution context changes.
5
+ """
6
+ import json
7
+ import time
8
+ from dataclasses import asdict
9
+ from importlib.metadata import version
10
+ from pathlib import Path
11
+ from typing import TYPE_CHECKING
12
+
13
+ from ..infrastructure.sqlite_manager import SQLiteManager
14
+ from ..logging.logging_config import get_logger
15
+ from ..models.workflow import WorkflowDependencies, ResolutionResult
16
+ from ..utils.workflow_hash import compute_workflow_hash
17
+
18
+ def _get_version() -> str:
19
+ """Get comfygit_core version."""
20
+ try:
21
+ return version('comfygit-core')
22
+ except Exception:
23
+ return "0.0.0" # Fallback for development
24
+
25
+ if TYPE_CHECKING:
26
+ from ..repositories.model_repository import ModelRepository
27
+ from ..managers.pyproject_manager import PyprojectManager
28
+ from ..repositories.workspace_config_repository import WorkspaceConfigRepository
29
+
30
+ logger = get_logger(__name__)
31
+
32
+ # Bump when DB schema OR resolution format changes
33
+ # Breaking changes requiring version bump:
34
+ # - Database: Add/remove/rename columns
35
+ # - Resolution: Change node ID format (e.g., subgraph scoping), WorkflowNodeWidgetRef structure, etc.
36
+ # Migration: Wipes cache and rebuilds (cache is ephemeral)
37
+ SCHEMA_VERSION = 4 # Bumped for models_sync_time column (cache invalidation on model index changes)
38
+
39
+
40
+ class CachedWorkflowAnalysis:
41
+ """Container for cached workflow data."""
42
+ def __init__(
43
+ self,
44
+ dependencies: WorkflowDependencies,
45
+ resolution: ResolutionResult | None = None,
46
+ needs_reresolution: bool = False
47
+ ):
48
+ self.dependencies = dependencies
49
+ self.resolution = resolution
50
+ self.needs_reresolution = needs_reresolution
51
+
52
+
53
+ class WorkflowCacheRepository:
54
+ """Workflow analysis and resolution cache with smart invalidation.
55
+
56
+ Lookup phases:
57
+ 1. Session cache (in-memory, same CLI invocation)
58
+ 2. Workflow mtime + size fast path (~1µs)
59
+ 3. Pyproject mtime fast-reject path (~1µs)
60
+ 4. Resolution context hash check (~7ms)
61
+ 5. Content hash fallback (~20ms)
62
+ """
63
+
64
+ def __init__(
65
+ self,
66
+ db_path: Path,
67
+ pyproject_manager: "PyprojectManager | None" = None,
68
+ model_repository: "ModelRepository | None" = None,
69
+ workspace_config_manager: "WorkspaceConfigRepository | None" = None
70
+ ):
71
+ """Initialize workflow cache repository.
72
+
73
+ Args:
74
+ db_path: Path to SQLite database file
75
+ pyproject_manager: Manager for pyproject.toml access (for context hashing)
76
+ model_repository: Model repository (for context hashing)
77
+ workspace_config_manager: Workspace config for model sync timestamp (for context hashing)
78
+ """
79
+ self.db_path = db_path
80
+ self.sqlite = SQLiteManager(db_path)
81
+ self.pyproject_manager = pyproject_manager
82
+ self.model_repository = model_repository
83
+ self.workspace_config_manager = workspace_config_manager
84
+ self._session_cache: dict[str, CachedWorkflowAnalysis] = {}
85
+
86
+ # Ensure schema exists
87
+ self._ensure_schema()
88
+
89
+ def _ensure_schema(self) -> None:
90
+ """Create database schema if needed."""
91
+ # Create schema info table
92
+ self.sqlite.create_table("""
93
+ CREATE TABLE IF NOT EXISTS schema_info (
94
+ version INTEGER PRIMARY KEY
95
+ )
96
+ """)
97
+
98
+ # Check version and migrate if needed
99
+ current_version = self._get_schema_version()
100
+ if current_version != SCHEMA_VERSION:
101
+ self._migrate_schema(current_version, SCHEMA_VERSION)
102
+ else:
103
+ # Create v2 schema if not exists
104
+ self._create_v2_schema()
105
+
106
+ def _get_schema_version(self) -> int:
107
+ """Get current schema version.
108
+
109
+ Returns:
110
+ Schema version (0 if not initialized)
111
+ """
112
+ results = self.sqlite.execute_query("SELECT version FROM schema_info")
113
+ if not results:
114
+ return 0
115
+ return results[0]['version']
116
+
117
+ def _create_v2_schema(self) -> None:
118
+ """Create v2 schema tables and indices."""
119
+ self.sqlite.create_table("""
120
+ CREATE TABLE IF NOT EXISTS workflow_cache (
121
+ workflow_name TEXT NOT NULL,
122
+ environment_name TEXT NOT NULL,
123
+ workflow_hash TEXT NOT NULL,
124
+ workflow_mtime REAL NOT NULL,
125
+ workflow_size INTEGER NOT NULL,
126
+ resolution_context_hash TEXT NOT NULL,
127
+ pyproject_mtime REAL NOT NULL,
128
+ models_sync_time TEXT,
129
+ comfygit_version TEXT NOT NULL,
130
+ dependencies_json TEXT NOT NULL,
131
+ resolution_json TEXT,
132
+ cached_at INTEGER NOT NULL,
133
+ PRIMARY KEY (environment_name, workflow_name)
134
+ )
135
+ """)
136
+
137
+ self.sqlite.create_table("""
138
+ CREATE INDEX IF NOT EXISTS idx_workflow_hash
139
+ ON workflow_cache(environment_name, workflow_hash)
140
+ """)
141
+
142
+ self.sqlite.create_table("""
143
+ CREATE INDEX IF NOT EXISTS idx_resolution_context
144
+ ON workflow_cache(environment_name, resolution_context_hash)
145
+ """)
146
+
147
+ def _migrate_schema(self, from_version: int, to_version: int) -> None:
148
+ """Migrate database schema between versions.
149
+
150
+ Args:
151
+ from_version: Current schema version
152
+ to_version: Target schema version
153
+ """
154
+ if from_version == to_version:
155
+ return
156
+
157
+ logger.info(f"Migrating workflow cache schema v{from_version} → v{to_version}")
158
+
159
+ # Drop and recreate (cache is ephemeral)
160
+ self.sqlite.execute_write("DROP TABLE IF EXISTS workflow_cache")
161
+ self.sqlite.execute_write("DROP INDEX IF EXISTS idx_workflow_hash")
162
+ self.sqlite.execute_write("DROP INDEX IF EXISTS idx_resolution_context")
163
+
164
+ # Create v2 schema
165
+ self._create_v2_schema()
166
+
167
+ # Update version
168
+ self.sqlite.execute_write("DELETE FROM schema_info")
169
+ self.sqlite.execute_write("INSERT INTO schema_info (version) VALUES (?)", (to_version,))
170
+
171
+ logger.info("Schema migration complete")
172
+
173
+ def get(
174
+ self,
175
+ env_name: str,
176
+ workflow_name: str,
177
+ workflow_path: Path,
178
+ pyproject_path: Path | None = None
179
+ ) -> CachedWorkflowAnalysis | None:
180
+ """Get cached workflow analysis + resolution with smart invalidation.
181
+
182
+ Uses multi-phase lookup:
183
+ 1. Session cache (instant)
184
+ 2. Workflow mtime + size match (fast)
185
+ 3. Pyproject mtime fast-reject (instant)
186
+ 4. Resolution context hash check (moderate)
187
+ 5. Content hash fallback (slow)
188
+
189
+ Args:
190
+ env_name: Environment name
191
+ workflow_name: Workflow name
192
+ workflow_path: Path to workflow file
193
+ pyproject_path: Path to pyproject.toml (for context checking)
194
+
195
+ Returns:
196
+ CachedWorkflowAnalysis with dependencies and resolution, or None if cache miss
197
+ """
198
+ import time
199
+ start_time = time.perf_counter()
200
+
201
+ # TODO will not work for workflows in subdirectories
202
+ session_key = f"{env_name}:{workflow_name}"
203
+
204
+ # Phase 1: Check session cache
205
+ if session_key in self._session_cache:
206
+ elapsed = (time.perf_counter() - start_time) * 1000
207
+ logger.debug(f"[CACHE] Session HIT for '{workflow_name}' ({elapsed:.2f}ms)")
208
+ return self._session_cache[session_key]
209
+
210
+ # Get workflow file stats
211
+ try:
212
+ stat = workflow_path.stat()
213
+ mtime = stat.st_mtime
214
+ size = stat.st_size
215
+ except OSError as e:
216
+ logger.warning(f"Failed to stat workflow file {workflow_path}: {e}")
217
+ return None
218
+
219
+ # Phase 2: Fast path - mtime + size match
220
+ query_start = time.perf_counter()
221
+ query = """
222
+ SELECT workflow_hash, dependencies_json, resolution_json,
223
+ resolution_context_hash, pyproject_mtime, models_sync_time, comfygit_version
224
+ FROM workflow_cache
225
+ WHERE environment_name = ? AND workflow_name = ?
226
+ AND workflow_mtime = ? AND workflow_size = ?
227
+ """
228
+ results = self.sqlite.execute_query(query, (env_name, workflow_name, mtime, size))
229
+ query_elapsed = (time.perf_counter() - query_start) * 1000
230
+
231
+ cached_row = None
232
+ if results:
233
+ cached_row = results[0]
234
+ logger.debug(f"[CACHE] DB query (mtime+size) HIT for '{workflow_name}' ({query_elapsed:.2f}ms)")
235
+ else:
236
+ logger.debug(f"[CACHE] DB query (mtime+size) MISS for '{workflow_name}' ({query_elapsed:.2f}ms)")
237
+ # Phase 3: Content hash fallback
238
+ hash_start = time.perf_counter()
239
+ try:
240
+ current_hash = compute_workflow_hash(workflow_path)
241
+ except Exception as e:
242
+ logger.warning(f"Failed to compute workflow hash for {workflow_path}: {e}")
243
+ return None
244
+ hash_elapsed = (time.perf_counter() - hash_start) * 1000
245
+ logger.debug(f"[CACHE] Hash computation took {hash_elapsed:.2f}ms")
246
+
247
+ query_start = time.perf_counter()
248
+ query = """
249
+ SELECT workflow_hash, dependencies_json, resolution_json,
250
+ resolution_context_hash, pyproject_mtime, models_sync_time, comfygit_version
251
+ FROM workflow_cache
252
+ WHERE environment_name = ? AND workflow_name = ?
253
+ AND workflow_hash = ?
254
+ """
255
+ results = self.sqlite.execute_query(query, (env_name, workflow_name, current_hash))
256
+ query_elapsed = (time.perf_counter() - query_start) * 1000
257
+
258
+ if results:
259
+ cached_row = results[0]
260
+ logger.debug(f"[CACHE] DB query (content hash) HIT for '{workflow_name}' ({query_elapsed:.2f}ms)")
261
+
262
+ if not cached_row:
263
+ elapsed = (time.perf_counter() - start_time) * 1000
264
+ logger.debug(f"[CACHE] MISS (workflow content changed) for '{workflow_name}' ({elapsed:.2f}ms total)")
265
+ return None
266
+
267
+ # Deserialize dependencies (always valid if workflow content matches)
268
+ deser_start = time.perf_counter()
269
+ dependencies = self._deserialize_dependencies(cached_row['dependencies_json'])
270
+ deser_elapsed = (time.perf_counter() - deser_start) * 1000
271
+ logger.debug(f"[CACHE] Deserialization took {deser_elapsed:.2f}ms")
272
+
273
+ # Check version match
274
+ if cached_row['comfygit_version'] != _get_version():
275
+ elapsed = (time.perf_counter() - start_time) * 1000
276
+ logger.debug(f"[CACHE] PARTIAL HIT (version mismatch) for '{workflow_name}' ({elapsed:.2f}ms total)")
277
+ cached = CachedWorkflowAnalysis(
278
+ dependencies=dependencies,
279
+ resolution=None,
280
+ needs_reresolution=True
281
+ )
282
+ self._session_cache[session_key] = cached
283
+ return cached
284
+
285
+ # Phase 4: Check resolution context
286
+ if pyproject_path and pyproject_path.exists():
287
+ pyproject_mtime = pyproject_path.stat().st_mtime
288
+ cached_pyproject_mtime = cached_row['pyproject_mtime']
289
+ mtime_diff = abs(pyproject_mtime - cached_pyproject_mtime)
290
+
291
+ logger.debug(f"[CACHE] Pyproject mtime check for '{workflow_name}': current={pyproject_mtime:.6f}, cached={cached_pyproject_mtime:.6f}, diff={mtime_diff:.6f}s")
292
+
293
+ # Fast reject: if pyproject hasn't been touched, context can't have changed
294
+ # UNLESS the model index has changed (checked via models_sync_time)
295
+ if pyproject_mtime == cached_pyproject_mtime:
296
+ # Check if model index has changed since cache was created
297
+ cached_sync_time = cached_row.get('models_sync_time')
298
+ current_sync_time = None
299
+
300
+ if self.workspace_config_manager:
301
+ try:
302
+ config = self.workspace_config_manager.load()
303
+ if config.global_model_directory and config.global_model_directory.last_sync:
304
+ current_sync_time = config.global_model_directory.last_sync
305
+ except Exception as e:
306
+ logger.warning(f"Failed to check current model sync time: {e}")
307
+
308
+ # Compare sync times (both might be None, which is fine)
309
+ if cached_sync_time != current_sync_time:
310
+ # Model index changed - invalidate cache
311
+ logger.debug(
312
+ f"[CACHE] Model index changed for '{workflow_name}': "
313
+ f"cached_sync={cached_sync_time}, current_sync={current_sync_time}"
314
+ )
315
+ cached = CachedWorkflowAnalysis(
316
+ dependencies=dependencies,
317
+ resolution=None,
318
+ needs_reresolution=True
319
+ )
320
+ self._session_cache[session_key] = cached
321
+ elapsed = (time.perf_counter() - start_time) * 1000
322
+ logger.debug(f"[CACHE] PARTIAL HIT (model index changed) for '{workflow_name}' ({elapsed:.2f}ms total)")
323
+ return cached
324
+
325
+ # Nothing changed - full cache hit
326
+ resolution = self._deserialize_resolution(cached_row['resolution_json']) if cached_row['resolution_json'] else None
327
+ cached = CachedWorkflowAnalysis(
328
+ dependencies=dependencies,
329
+ resolution=resolution,
330
+ needs_reresolution=False
331
+ )
332
+ self._session_cache[session_key] = cached
333
+ elapsed = (time.perf_counter() - start_time) * 1000
334
+ logger.debug(f"[CACHE] FULL HIT (pyproject unchanged, model index unchanged) for '{workflow_name}' ({elapsed:.2f}ms total)")
335
+ return cached
336
+
337
+ logger.debug(f"[CACHE] Pyproject mtime changed for '{workflow_name}', computing context hash...")
338
+
339
+ # Pyproject changed - check if it affects THIS workflow
340
+ if self.pyproject_manager and self.model_repository:
341
+ context_start = time.perf_counter()
342
+ current_context_hash = self._compute_resolution_context_hash(
343
+ dependencies,
344
+ workflow_name
345
+ )
346
+ context_elapsed = (time.perf_counter() - context_start) * 1000
347
+ logger.debug(f"[CACHE] Context hash computation took {context_elapsed:.2f}ms for '{workflow_name}'")
348
+
349
+ if current_context_hash == cached_row['resolution_context_hash']:
350
+ # Pyproject changed but not for THIS workflow - still valid
351
+ # Update pyproject_mtime to avoid recomputing context hash next time
352
+ self._update_pyproject_mtime(env_name, workflow_name, pyproject_mtime)
353
+
354
+ resolution = self._deserialize_resolution(cached_row['resolution_json']) if cached_row['resolution_json'] else None
355
+ cached = CachedWorkflowAnalysis(
356
+ dependencies=dependencies,
357
+ resolution=resolution,
358
+ needs_reresolution=False
359
+ )
360
+ self._session_cache[session_key] = cached
361
+ elapsed = (time.perf_counter() - start_time) * 1000
362
+ logger.debug(f"[CACHE] FULL HIT (context unchanged) for '{workflow_name}' ({elapsed:.2f}ms total)")
363
+ return cached
364
+ else:
365
+ elapsed = (time.perf_counter() - start_time) * 1000
366
+ logger.debug(f"[CACHE] PARTIAL HIT (context changed) for '{workflow_name}' - need re-resolution ({elapsed:.2f}ms total)")
367
+
368
+ # Context changed or can't verify - return dependencies but signal re-resolution needed
369
+ cached = CachedWorkflowAnalysis(
370
+ dependencies=dependencies,
371
+ resolution=None,
372
+ needs_reresolution=True
373
+ )
374
+ self._session_cache[session_key] = cached
375
+ elapsed = (time.perf_counter() - start_time) * 1000
376
+ logger.debug(f"[CACHE] PARTIAL HIT (context verification failed) for '{workflow_name}' ({elapsed:.2f}ms total)")
377
+ return cached
378
+
379
+ def set(
380
+ self,
381
+ env_name: str,
382
+ workflow_name: str,
383
+ workflow_path: Path,
384
+ dependencies: WorkflowDependencies,
385
+ resolution: ResolutionResult | None = None,
386
+ pyproject_path: Path | None = None
387
+ ) -> None:
388
+ """Store workflow analysis and resolution in cache.
389
+
390
+ Args:
391
+ env_name: Environment name
392
+ workflow_name: Workflow name
393
+ workflow_path: Path to workflow file
394
+ dependencies: Analysis result to cache
395
+ resolution: Resolution result to cache (optional)
396
+ pyproject_path: Path to pyproject.toml (for context hash)
397
+ """
398
+ # Compute workflow hash
399
+ try:
400
+ workflow_hash = compute_workflow_hash(workflow_path)
401
+ except Exception as e:
402
+ logger.warning(f"Failed to compute workflow hash, skipping cache: {e}")
403
+ return
404
+
405
+ # Get workflow file stats
406
+ try:
407
+ stat = workflow_path.stat()
408
+ workflow_mtime = stat.st_mtime
409
+ workflow_size = stat.st_size
410
+ except OSError as e:
411
+ logger.warning(f"Failed to stat workflow file, skipping cache: {e}")
412
+ return
413
+
414
+ # Get pyproject mtime
415
+ pyproject_mtime = 0.0
416
+ if pyproject_path and pyproject_path.exists():
417
+ try:
418
+ pyproject_mtime = pyproject_path.stat().st_mtime
419
+ except OSError:
420
+ pass
421
+
422
+ # Compute resolution context hash
423
+ resolution_context_hash = ""
424
+ if self.pyproject_manager and self.model_repository:
425
+ resolution_context_hash = self._compute_resolution_context_hash(
426
+ dependencies,
427
+ workflow_name
428
+ )
429
+
430
+ # Get models_sync_time for cache invalidation check
431
+ models_sync_time = None
432
+ if self.workspace_config_manager:
433
+ try:
434
+ config = self.workspace_config_manager.load()
435
+ if config.global_model_directory and config.global_model_directory.last_sync:
436
+ models_sync_time = config.global_model_directory.last_sync
437
+ except Exception:
438
+ pass
439
+
440
+ # Serialize data
441
+ dependencies_json = self._serialize_dependencies(dependencies)
442
+ resolution_json = self._serialize_resolution(resolution) if resolution else None
443
+ comfygit_version = _get_version()
444
+
445
+ # Store in SQLite
446
+ query = """
447
+ INSERT OR REPLACE INTO workflow_cache
448
+ (environment_name, workflow_name, workflow_hash, workflow_mtime,
449
+ workflow_size, resolution_context_hash, pyproject_mtime, models_sync_time,
450
+ comfygit_version, dependencies_json, resolution_json, cached_at)
451
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
452
+ """
453
+ cached_at = int(time.time())
454
+ self.sqlite.execute_write(
455
+ query,
456
+ (env_name, workflow_name, workflow_hash, workflow_mtime, workflow_size,
457
+ resolution_context_hash, pyproject_mtime, models_sync_time, comfygit_version,
458
+ dependencies_json, resolution_json, cached_at)
459
+ )
460
+
461
+ # Update session cache
462
+ session_key = f"{env_name}:{workflow_name}"
463
+ self._session_cache[session_key] = CachedWorkflowAnalysis(
464
+ dependencies=dependencies,
465
+ resolution=resolution,
466
+ needs_reresolution=False
467
+ )
468
+
469
+ logger.debug(f"Cached workflow '{workflow_name}' (hash={workflow_hash}, context={resolution_context_hash})")
470
+
471
+ def invalidate(self, env_name: str, workflow_name: str | None = None) -> None:
472
+ """Invalidate cache entries.
473
+
474
+ Args:
475
+ env_name: Environment name
476
+ workflow_name: Optional workflow name (if None, invalidate entire environment)
477
+ """
478
+ if workflow_name:
479
+ # Invalidate specific workflow
480
+ query = "DELETE FROM workflow_cache WHERE environment_name = ? AND workflow_name = ?"
481
+ self.sqlite.execute_write(query, (env_name, workflow_name))
482
+
483
+ # Clear from session cache
484
+ session_key = f"{env_name}:{workflow_name}"
485
+ self._session_cache.pop(session_key, None)
486
+
487
+ logger.debug(f"Invalidated cache for workflow '{workflow_name}'")
488
+ else:
489
+ # Invalidate entire environment
490
+ query = "DELETE FROM workflow_cache WHERE environment_name = ?"
491
+ self.sqlite.execute_write(query, (env_name,))
492
+
493
+ # Clear matching session cache entries
494
+ keys_to_remove = [k for k in self._session_cache if k.startswith(f"{env_name}:")]
495
+ for key in keys_to_remove:
496
+ del self._session_cache[key]
497
+
498
+ logger.debug(f"Invalidated cache for environment '{env_name}'")
499
+
500
+ def _update_pyproject_mtime(self, env_name: str, workflow_name: str, new_mtime: float) -> None:
501
+ """Update pyproject_mtime in cache after successful context check.
502
+
503
+ This optimization avoids recomputing context hash on subsequent runs
504
+ when pyproject hasn't changed.
505
+
506
+ Args:
507
+ env_name: Environment name
508
+ workflow_name: Workflow name
509
+ new_mtime: New pyproject mtime to store
510
+ """
511
+ query = """
512
+ UPDATE workflow_cache
513
+ SET pyproject_mtime = ?
514
+ WHERE environment_name = ? AND workflow_name = ?
515
+ """
516
+ self.sqlite.execute_write(query, (new_mtime, env_name, workflow_name))
517
+ logger.debug(f"Updated pyproject_mtime for '{workflow_name}' to {new_mtime}")
518
+
519
+ def _serialize_dependencies(self, dependencies: WorkflowDependencies) -> str:
520
+ """Serialize WorkflowDependencies to JSON string.
521
+
522
+ Args:
523
+ dependencies: Dependencies object
524
+
525
+ Returns:
526
+ JSON string
527
+ """
528
+ # Convert to dict and serialize
529
+ deps_dict = asdict(dependencies)
530
+ return json.dumps(deps_dict)
531
+
532
+ def _deserialize_dependencies(self, dependencies_json: str) -> WorkflowDependencies:
533
+ """Deserialize JSON string to WorkflowDependencies.
534
+
535
+ Args:
536
+ dependencies_json: JSON string
537
+
538
+ Returns:
539
+ WorkflowDependencies object
540
+ """
541
+ from ..models.workflow import WorkflowNode, WorkflowNodeWidgetRef
542
+
543
+ deps_dict = json.loads(dependencies_json)
544
+
545
+ # Reconstruct nested dataclasses
546
+ builtin_nodes = [WorkflowNode(**node) for node in deps_dict.get('builtin_nodes', [])]
547
+ non_builtin_nodes = [WorkflowNode(**node) for node in deps_dict.get('non_builtin_nodes', [])]
548
+ found_models = [WorkflowNodeWidgetRef(**ref) for ref in deps_dict.get('found_models', [])]
549
+
550
+ return WorkflowDependencies(
551
+ workflow_name=deps_dict['workflow_name'],
552
+ builtin_nodes=builtin_nodes,
553
+ non_builtin_nodes=non_builtin_nodes,
554
+ found_models=found_models
555
+ )
556
+
557
+ def _serialize_resolution(self, resolution: ResolutionResult) -> str:
558
+ """Serialize ResolutionResult to JSON string.
559
+
560
+ Args:
561
+ resolution: Resolution result object
562
+
563
+ Returns:
564
+ JSON string
565
+ """
566
+ from pathlib import Path
567
+
568
+ def convert_paths(obj):
569
+ """Recursively convert Path objects to strings for JSON serialization."""
570
+ if isinstance(obj, Path):
571
+ return str(obj)
572
+ elif isinstance(obj, dict):
573
+ return {k: convert_paths(v) for k, v in obj.items()}
574
+ elif isinstance(obj, (list, tuple)):
575
+ return [convert_paths(item) for item in obj]
576
+ return obj
577
+
578
+ res_dict = asdict(resolution)
579
+ res_dict = convert_paths(res_dict)
580
+ return json.dumps(res_dict)
581
+
582
+ def _deserialize_resolution(self, resolution_json: str) -> ResolutionResult:
583
+ """Deserialize JSON string to ResolutionResult.
584
+
585
+ Args:
586
+ resolution_json: JSON string
587
+
588
+ Returns:
589
+ ResolutionResult object
590
+ """
591
+ from pathlib import Path
592
+ from ..models.workflow import (
593
+ ResolvedNodePackage, ResolvedModel, DownloadResult,
594
+ WorkflowNode, WorkflowNodeWidgetRef
595
+ )
596
+ from ..models.shared import ModelWithLocation
597
+ from ..models.node_mapping import GlobalNodePackage, GlobalNodePackageVersion
598
+
599
+ res_dict = json.loads(resolution_json)
600
+
601
+ # Helper to reconstruct GlobalNodePackage with nested versions
602
+ def reconstruct_package_data(pkg_data: dict | None) -> GlobalNodePackage | None:
603
+ if pkg_data is None:
604
+ return None
605
+ # Reconstruct nested GlobalNodePackageVersion objects
606
+ versions = pkg_data.get('versions', {})
607
+ if versions:
608
+ versions = {
609
+ k: GlobalNodePackageVersion(**v) if isinstance(v, dict) else v
610
+ for k, v in versions.items()
611
+ }
612
+ return GlobalNodePackage(
613
+ **{**pkg_data, 'versions': versions}
614
+ )
615
+
616
+ # Reconstruct ResolvedNodePackage with nested package_data
617
+ def reconstruct_node_package(node_dict: dict) -> ResolvedNodePackage:
618
+ pkg_data = node_dict.get('package_data')
619
+ return ResolvedNodePackage(
620
+ **{**node_dict, 'package_data': reconstruct_package_data(pkg_data)}
621
+ )
622
+
623
+ # Reconstruct ResolvedModel with nested ModelWithLocation
624
+ def reconstruct_resolved_model(model_dict: dict) -> ResolvedModel:
625
+ reference = WorkflowNodeWidgetRef(**model_dict['reference'])
626
+ resolved_model = None
627
+ if model_dict.get('resolved_model'):
628
+ resolved_model = ModelWithLocation(**model_dict['resolved_model'])
629
+ target_path = None
630
+ if model_dict.get('target_path'):
631
+ target_path = Path(model_dict['target_path'])
632
+
633
+ return ResolvedModel(
634
+ workflow=model_dict['workflow'],
635
+ reference=reference,
636
+ resolved_model=resolved_model,
637
+ model_source=model_dict.get('model_source'),
638
+ is_optional=model_dict.get('is_optional', False),
639
+ match_type=model_dict.get('match_type'),
640
+ match_confidence=model_dict.get('match_confidence', 1.0),
641
+ target_path=target_path,
642
+ needs_path_sync=model_dict.get('needs_path_sync', False)
643
+ )
644
+
645
+ # Reconstruct nested dataclasses
646
+ nodes_resolved = [reconstruct_node_package(node) for node in res_dict.get('nodes_resolved', [])]
647
+ nodes_unresolved = [WorkflowNode(**node) for node in res_dict.get('nodes_unresolved', [])]
648
+ nodes_ambiguous = [
649
+ [reconstruct_node_package(pkg) for pkg in group]
650
+ for group in res_dict.get('nodes_ambiguous', [])
651
+ ]
652
+
653
+ models_resolved = [reconstruct_resolved_model(model) for model in res_dict.get('models_resolved', [])]
654
+ models_unresolved = [WorkflowNodeWidgetRef(**ref) for ref in res_dict.get('models_unresolved', [])]
655
+ models_ambiguous = [
656
+ [reconstruct_resolved_model(model) for model in group]
657
+ for group in res_dict.get('models_ambiguous', [])
658
+ ]
659
+
660
+ download_results = [DownloadResult(**dl) for dl in res_dict.get('download_results', [])]
661
+
662
+ return ResolutionResult(
663
+ workflow_name=res_dict['workflow_name'],
664
+ nodes_resolved=nodes_resolved,
665
+ nodes_unresolved=nodes_unresolved,
666
+ nodes_ambiguous=nodes_ambiguous,
667
+ models_resolved=models_resolved,
668
+ models_unresolved=models_unresolved,
669
+ models_ambiguous=models_ambiguous,
670
+ download_results=download_results
671
+ )
672
+
673
+ def _compute_resolution_context_hash(
674
+ self,
675
+ dependencies: WorkflowDependencies,
676
+ workflow_name: str
677
+ ) -> str:
678
+ """Compute workflow-specific resolution context hash.
679
+
680
+ Only includes pyproject/model data that affects THIS workflow's resolution.
681
+
682
+ Args:
683
+ dependencies: Workflow dependencies
684
+ workflow_name: Workflow name
685
+
686
+ Returns:
687
+ 16-character hex hash of resolution context
688
+ """
689
+ if not self.pyproject_manager or not self.model_repository:
690
+ return ""
691
+
692
+ import blake3
693
+ import time
694
+
695
+ context_start = time.perf_counter()
696
+ context = {}
697
+
698
+ # 1. Custom node mappings for nodes in THIS workflow
699
+ step_start = time.perf_counter()
700
+ node_types = {n.type for n in dependencies.non_builtin_nodes}
701
+ custom_map = self.pyproject_manager.workflows.get_custom_node_map(workflow_name)
702
+ context["custom_mappings"] = {
703
+ node_type: custom_map[node_type]
704
+ for node_type in node_types
705
+ if node_type in custom_map
706
+ }
707
+ step_elapsed = (time.perf_counter() - step_start) * 1000
708
+ logger.debug(f"[CONTEXT] Step 1 (custom mappings) took {step_elapsed:.2f}ms")
709
+
710
+ # 2. Declared packages for nodes THIS workflow uses
711
+ # Use authoritative workflow.nodes list instead of inferring from workflow content
712
+ step_start = time.perf_counter()
713
+
714
+ # Read nodes list from workflow config (written by apply_resolution)
715
+ workflow_config = self.pyproject_manager.workflows.get_all_with_resolutions().get(workflow_name, {})
716
+ relevant_packages = set(workflow_config.get('nodes', []))
717
+
718
+ # Get global package metadata
719
+ declared_packages = self.pyproject_manager.nodes.get_existing()
720
+
721
+ context["declared_packages"] = {
722
+ pkg: {
723
+ "version": declared_packages[pkg].version,
724
+ "repository": declared_packages[pkg].repository,
725
+ "source": declared_packages[pkg].source
726
+ }
727
+ for pkg in relevant_packages
728
+ if pkg in declared_packages
729
+ }
730
+ step_elapsed = (time.perf_counter() - step_start) * 1000
731
+ logger.debug(f"[CONTEXT] Step 2 (declared packages) took {step_elapsed:.2f}ms")
732
+
733
+ # 3. Model entries from pyproject for THIS workflow
734
+ step_start = time.perf_counter()
735
+ workflow_models = self.pyproject_manager.workflows.get_workflow_models(workflow_name)
736
+ model_pyproject_data = {}
737
+ for manifest_model in workflow_models:
738
+ for ref in manifest_model.nodes:
739
+ ref_key = f"{ref.node_id}_{ref.widget_index}"
740
+ model_pyproject_data[ref_key] = {
741
+ "hash": manifest_model.hash,
742
+ "status": manifest_model.status,
743
+ "criticality": manifest_model.criticality,
744
+ "sources": manifest_model.sources,
745
+ "relative_path": manifest_model.relative_path,
746
+ }
747
+
748
+ context["workflow_models_pyproject"] = model_pyproject_data
749
+ step_elapsed = (time.perf_counter() - step_start) * 1000
750
+ logger.debug(f"[CONTEXT] Step 3 (workflow models) took {step_elapsed:.2f}ms")
751
+
752
+ # 4. Model index subset (only models THIS workflow references)
753
+ step_start = time.perf_counter()
754
+ model_index_subset = {}
755
+ for model_ref in dependencies.found_models:
756
+ filename = Path(model_ref.widget_value).name
757
+ models = self.model_repository.find_by_filename(filename)
758
+ if models:
759
+ model_index_subset[filename] = [m.hash for m in models]
760
+
761
+ context["model_index_subset"] = model_index_subset
762
+ step_elapsed = (time.perf_counter() - step_start) * 1000
763
+ logger.debug(f"[CONTEXT] Step 4 (model index queries, {len(dependencies.found_models)} models) took {step_elapsed:.2f}ms")
764
+
765
+ # 5. Model index sync time (invalidate when model index changes)
766
+ step_start = time.perf_counter()
767
+ if self.workspace_config_manager:
768
+ try:
769
+ config = self.workspace_config_manager.load()
770
+ if config.global_model_directory and config.global_model_directory.last_sync:
771
+ context["models_sync_time"] = config.global_model_directory.last_sync
772
+ else:
773
+ context["models_sync_time"] = None
774
+ except Exception as e:
775
+ logger.warning(f"Failed to get model sync time: {e}")
776
+ context["models_sync_time"] = None
777
+ else:
778
+ context["models_sync_time"] = None
779
+ step_elapsed = (time.perf_counter() - step_start) * 1000
780
+ logger.debug(f"[CONTEXT] Step 5 (model sync time) took {step_elapsed:.2f}ms")
781
+
782
+ # 6. Comfygit version (global invalidator)
783
+ context["comfygit_version"] = _get_version()
784
+
785
+ # Hash the normalized context
786
+ step_start = time.perf_counter()
787
+ context_json = json.dumps(context, sort_keys=True)
788
+ hasher = blake3.blake3()
789
+ hasher.update(context_json.encode('utf-8'))
790
+ hash_result = hasher.hexdigest()[:16]
791
+ step_elapsed = (time.perf_counter() - step_start) * 1000
792
+ logger.debug(f"[CONTEXT] Step 6 (JSON + hash) took {step_elapsed:.2f}ms")
793
+
794
+ total_elapsed = (time.perf_counter() - context_start) * 1000
795
+ logger.debug(f"[CONTEXT] Total context hash computation: {total_elapsed:.2f}ms")
796
+
797
+ return hash_result