crackerjack 0.31.18__py3-none-any.whl → 0.33.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +71 -452
- crackerjack/__main__.py +1 -1
- crackerjack/agents/refactoring_agent.py +67 -46
- crackerjack/cli/handlers.py +7 -7
- crackerjack/config/hooks.py +36 -6
- crackerjack/core/async_workflow_orchestrator.py +2 -2
- crackerjack/core/enhanced_container.py +67 -0
- crackerjack/core/phase_coordinator.py +211 -44
- crackerjack/core/workflow_orchestrator.py +723 -72
- crackerjack/dynamic_config.py +1 -25
- crackerjack/managers/publish_manager.py +22 -5
- crackerjack/managers/test_command_builder.py +19 -13
- crackerjack/managers/test_manager.py +15 -4
- crackerjack/mcp/server_core.py +162 -34
- crackerjack/mcp/tools/core_tools.py +1 -1
- crackerjack/mcp/tools/execution_tools.py +16 -3
- crackerjack/mcp/tools/workflow_executor.py +130 -40
- crackerjack/mixins/__init__.py +5 -0
- crackerjack/mixins/error_handling.py +214 -0
- crackerjack/models/config.py +9 -0
- crackerjack/models/protocols.py +114 -0
- crackerjack/models/task.py +3 -0
- crackerjack/security/__init__.py +1 -0
- crackerjack/security/audit.py +226 -0
- crackerjack/services/config.py +3 -2
- crackerjack/services/config_merge.py +11 -5
- crackerjack/services/coverage_ratchet.py +22 -0
- crackerjack/services/git.py +121 -22
- crackerjack/services/initialization.py +25 -9
- crackerjack/services/memory_optimizer.py +477 -0
- crackerjack/services/parallel_executor.py +474 -0
- crackerjack/services/performance_benchmarks.py +292 -577
- crackerjack/services/performance_cache.py +443 -0
- crackerjack/services/performance_monitor.py +633 -0
- crackerjack/services/security.py +63 -0
- crackerjack/services/security_logger.py +9 -1
- crackerjack/services/terminal_utils.py +0 -0
- crackerjack/tools/validate_regex_patterns.py +14 -0
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/METADATA +2 -2
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/RECORD +43 -34
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/WHEEL +0 -0
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.31.18.dist-info → crackerjack-0.33.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,443 @@
|
|
|
1
|
+
"""Performance-optimized caching layer for expensive operations.
|
|
2
|
+
|
|
3
|
+
This module provides intelligent caching for git operations, file system checks,
|
|
4
|
+
and command results to significantly improve workflow performance.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import builtins
|
|
9
|
+
import hashlib
|
|
10
|
+
import typing as t
|
|
11
|
+
from dataclasses import dataclass, field
|
|
12
|
+
from datetime import datetime, timedelta
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from threading import Lock
|
|
15
|
+
from weakref import WeakValueDictionary
|
|
16
|
+
|
|
17
|
+
from crackerjack.services.logging import get_logger
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class CacheEntry:
|
|
22
|
+
"""Represents a cached value with metadata."""
|
|
23
|
+
|
|
24
|
+
value: t.Any
|
|
25
|
+
created_at: datetime
|
|
26
|
+
access_count: int = 0
|
|
27
|
+
last_accessed: datetime = field(default_factory=datetime.now)
|
|
28
|
+
ttl_seconds: int = 300 # Default 5 minutes
|
|
29
|
+
invalidation_keys: set[str] = field(default_factory=set)
|
|
30
|
+
|
|
31
|
+
def is_expired(self) -> bool:
|
|
32
|
+
"""Check if cache entry has expired."""
|
|
33
|
+
if self.ttl_seconds <= 0: # Permanent cache
|
|
34
|
+
return False
|
|
35
|
+
return datetime.now() > self.created_at + timedelta(seconds=self.ttl_seconds)
|
|
36
|
+
|
|
37
|
+
def access(self) -> t.Any:
|
|
38
|
+
"""Mark entry as accessed and return value."""
|
|
39
|
+
self.access_count += 1
|
|
40
|
+
self.last_accessed = datetime.now()
|
|
41
|
+
return self.value
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class CacheStats:
|
|
46
|
+
"""Cache performance statistics."""
|
|
47
|
+
|
|
48
|
+
hits: int = 0
|
|
49
|
+
misses: int = 0
|
|
50
|
+
evictions: int = 0
|
|
51
|
+
memory_usage_bytes: int = 0
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def hit_ratio(self) -> float:
|
|
55
|
+
"""Calculate cache hit ratio."""
|
|
56
|
+
total = self.hits + self.misses
|
|
57
|
+
return self.hits / total if total > 0 else 0.0
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class PerformanceCache:
|
|
61
|
+
"""High-performance async cache with intelligent invalidation."""
|
|
62
|
+
|
|
63
|
+
def __init__(
|
|
64
|
+
self,
|
|
65
|
+
max_memory_mb: int = 50,
|
|
66
|
+
default_ttl_seconds: int = 300,
|
|
67
|
+
cleanup_interval_seconds: int = 60,
|
|
68
|
+
):
|
|
69
|
+
self.max_memory_bytes = max_memory_mb * 1024 * 1024
|
|
70
|
+
self.default_ttl_seconds = default_ttl_seconds
|
|
71
|
+
self.cleanup_interval_seconds = cleanup_interval_seconds
|
|
72
|
+
|
|
73
|
+
self._cache: dict[str, CacheEntry] = {}
|
|
74
|
+
self._lock = Lock()
|
|
75
|
+
self._stats = CacheStats()
|
|
76
|
+
self._logger = get_logger("crackerjack.performance_cache")
|
|
77
|
+
self._cleanup_task: asyncio.Task[None] | None = None
|
|
78
|
+
self._invalidation_map: dict[str, set[str]] = {}
|
|
79
|
+
|
|
80
|
+
# Weak reference cache for heavy objects
|
|
81
|
+
self._weak_cache: WeakValueDictionary[str, t.Any] = WeakValueDictionary()
|
|
82
|
+
|
|
83
|
+
async def start(self) -> None:
|
|
84
|
+
"""Start background cleanup task."""
|
|
85
|
+
if self._cleanup_task is None:
|
|
86
|
+
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
|
87
|
+
self._logger.info("Performance cache started")
|
|
88
|
+
|
|
89
|
+
async def stop(self) -> None:
|
|
90
|
+
"""Stop background cleanup task."""
|
|
91
|
+
if self._cleanup_task:
|
|
92
|
+
self._cleanup_task.cancel()
|
|
93
|
+
try:
|
|
94
|
+
await self._cleanup_task
|
|
95
|
+
except asyncio.CancelledError:
|
|
96
|
+
pass
|
|
97
|
+
self._cleanup_task = None
|
|
98
|
+
self._logger.info("Performance cache stopped")
|
|
99
|
+
|
|
100
|
+
def get(
|
|
101
|
+
self,
|
|
102
|
+
key: str,
|
|
103
|
+
default: t.Any = None,
|
|
104
|
+
) -> t.Any:
|
|
105
|
+
"""Get value from cache."""
|
|
106
|
+
with self._lock:
|
|
107
|
+
if key not in self._cache:
|
|
108
|
+
self._stats.misses += 1
|
|
109
|
+
return default
|
|
110
|
+
|
|
111
|
+
entry = self._cache[key]
|
|
112
|
+
if entry.is_expired():
|
|
113
|
+
del self._cache[key]
|
|
114
|
+
self._stats.misses += 1
|
|
115
|
+
self._stats.evictions += 1
|
|
116
|
+
return default
|
|
117
|
+
|
|
118
|
+
self._stats.hits += 1
|
|
119
|
+
return entry.access()
|
|
120
|
+
|
|
121
|
+
async def get_async(
|
|
122
|
+
self,
|
|
123
|
+
key: str,
|
|
124
|
+
default: t.Any = None,
|
|
125
|
+
) -> t.Any:
|
|
126
|
+
"""Async version of get for compatibility."""
|
|
127
|
+
return self.get(key, default)
|
|
128
|
+
|
|
129
|
+
def set(
|
|
130
|
+
self,
|
|
131
|
+
key: str,
|
|
132
|
+
value: t.Any,
|
|
133
|
+
ttl_seconds: int | None = None,
|
|
134
|
+
invalidation_keys: set[str] | None = None,
|
|
135
|
+
) -> None:
|
|
136
|
+
"""Set value in cache."""
|
|
137
|
+
ttl = ttl_seconds if ttl_seconds is not None else self.default_ttl_seconds
|
|
138
|
+
inv_keys = invalidation_keys or set()
|
|
139
|
+
|
|
140
|
+
entry = CacheEntry(
|
|
141
|
+
value=value,
|
|
142
|
+
created_at=datetime.now(),
|
|
143
|
+
ttl_seconds=ttl,
|
|
144
|
+
invalidation_keys=inv_keys,
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
with self._lock:
|
|
148
|
+
self._cache[key] = entry
|
|
149
|
+
|
|
150
|
+
# Update invalidation map
|
|
151
|
+
for inv_key in inv_keys:
|
|
152
|
+
if inv_key not in self._invalidation_map:
|
|
153
|
+
self._invalidation_map[inv_key] = set()
|
|
154
|
+
self._invalidation_map[inv_key].add(key)
|
|
155
|
+
|
|
156
|
+
# Check memory usage and evict if needed
|
|
157
|
+
self._check_memory_pressure()
|
|
158
|
+
|
|
159
|
+
async def set_async(
|
|
160
|
+
self,
|
|
161
|
+
key: str,
|
|
162
|
+
value: t.Any,
|
|
163
|
+
ttl_seconds: int | None = None,
|
|
164
|
+
invalidation_keys: builtins.set[str] | None = None,
|
|
165
|
+
) -> None:
|
|
166
|
+
"""Async version of set for compatibility."""
|
|
167
|
+
self.set(key, value, ttl_seconds, invalidation_keys)
|
|
168
|
+
|
|
169
|
+
def invalidate(self, invalidation_key: str) -> int:
|
|
170
|
+
"""Invalidate all entries with given invalidation key."""
|
|
171
|
+
with self._lock:
|
|
172
|
+
if invalidation_key not in self._invalidation_map:
|
|
173
|
+
return 0
|
|
174
|
+
|
|
175
|
+
keys_to_remove = self._invalidation_map[invalidation_key].copy()
|
|
176
|
+
count = 0
|
|
177
|
+
|
|
178
|
+
for key in keys_to_remove:
|
|
179
|
+
if key in self._cache:
|
|
180
|
+
del self._cache[key]
|
|
181
|
+
count += 1
|
|
182
|
+
self._stats.evictions += 1
|
|
183
|
+
|
|
184
|
+
# Clean up invalidation map
|
|
185
|
+
del self._invalidation_map[invalidation_key]
|
|
186
|
+
|
|
187
|
+
self._logger.debug(
|
|
188
|
+
f"Invalidated {count} cache entries for key: {invalidation_key}"
|
|
189
|
+
)
|
|
190
|
+
return count
|
|
191
|
+
|
|
192
|
+
def clear(self) -> None:
|
|
193
|
+
"""Clear all cache entries."""
|
|
194
|
+
with self._lock:
|
|
195
|
+
count = len(self._cache)
|
|
196
|
+
self._cache.clear()
|
|
197
|
+
self._invalidation_map.clear()
|
|
198
|
+
self._stats.evictions += count
|
|
199
|
+
self._logger.info(f"Cleared {count} cache entries")
|
|
200
|
+
|
|
201
|
+
def get_stats(self) -> CacheStats:
|
|
202
|
+
"""Get cache performance statistics."""
|
|
203
|
+
with self._lock:
|
|
204
|
+
stats = CacheStats(
|
|
205
|
+
hits=self._stats.hits,
|
|
206
|
+
misses=self._stats.misses,
|
|
207
|
+
evictions=self._stats.evictions,
|
|
208
|
+
memory_usage_bytes=self._estimate_memory_usage(),
|
|
209
|
+
)
|
|
210
|
+
return stats
|
|
211
|
+
|
|
212
|
+
def _estimate_memory_usage(self) -> int:
|
|
213
|
+
"""Estimate memory usage of cache entries."""
|
|
214
|
+
# Simplified estimation - in production would use more sophisticated method
|
|
215
|
+
total_size = 0
|
|
216
|
+
for entry in self._cache.values():
|
|
217
|
+
if isinstance(entry.value, str | bytes):
|
|
218
|
+
total_size += len(entry.value)
|
|
219
|
+
elif isinstance(entry.value, list | tuple):
|
|
220
|
+
total_size += len(entry.value) * 100 # Rough estimate
|
|
221
|
+
elif isinstance(entry.value, dict):
|
|
222
|
+
total_size += len(entry.value) * 200 # Rough estimate
|
|
223
|
+
else:
|
|
224
|
+
total_size += 1000 # Default estimate for other objects
|
|
225
|
+
|
|
226
|
+
return total_size
|
|
227
|
+
|
|
228
|
+
def _check_memory_pressure(self) -> None:
|
|
229
|
+
"""Check memory usage and evict entries if needed."""
|
|
230
|
+
if self._estimate_memory_usage() > self.max_memory_bytes:
|
|
231
|
+
self._evict_lru_entries()
|
|
232
|
+
|
|
233
|
+
def _evict_lru_entries(self) -> None:
|
|
234
|
+
"""Evict least recently used entries to free memory."""
|
|
235
|
+
if not self._cache:
|
|
236
|
+
return
|
|
237
|
+
|
|
238
|
+
# Sort entries by last access time (oldest first)
|
|
239
|
+
entries_by_access = sorted(
|
|
240
|
+
self._cache.items(),
|
|
241
|
+
key=lambda x: x[1].last_accessed,
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
# Remove oldest 25% of entries
|
|
245
|
+
evict_count = max(1, len(entries_by_access) // 4)
|
|
246
|
+
|
|
247
|
+
for key, _ in entries_by_access[:evict_count]:
|
|
248
|
+
del self._cache[key]
|
|
249
|
+
self._stats.evictions += 1
|
|
250
|
+
|
|
251
|
+
self._logger.debug(f"Evicted {evict_count} LRU cache entries")
|
|
252
|
+
|
|
253
|
+
async def _cleanup_loop(self) -> None:
|
|
254
|
+
"""Background cleanup loop for expired entries."""
|
|
255
|
+
while True:
|
|
256
|
+
try:
|
|
257
|
+
await asyncio.sleep(self.cleanup_interval_seconds)
|
|
258
|
+
self._cleanup_expired_entries()
|
|
259
|
+
except asyncio.CancelledError:
|
|
260
|
+
break
|
|
261
|
+
except Exception as e:
|
|
262
|
+
self._logger.error(f"Error in cache cleanup loop: {e}")
|
|
263
|
+
|
|
264
|
+
def _cleanup_expired_entries(self) -> None:
|
|
265
|
+
"""Clean up expired cache entries."""
|
|
266
|
+
with self._lock:
|
|
267
|
+
expired_keys = [
|
|
268
|
+
key for key, entry in self._cache.items() if entry.is_expired()
|
|
269
|
+
]
|
|
270
|
+
|
|
271
|
+
for key in expired_keys:
|
|
272
|
+
del self._cache[key]
|
|
273
|
+
self._stats.evictions += 1
|
|
274
|
+
|
|
275
|
+
if expired_keys:
|
|
276
|
+
self._logger.debug(f"Cleaned up {len(expired_keys)} expired entries")
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
class GitOperationCache:
|
|
280
|
+
"""Specialized cache for Git operations."""
|
|
281
|
+
|
|
282
|
+
def __init__(self, cache: PerformanceCache):
|
|
283
|
+
self.cache = cache
|
|
284
|
+
self._logger = get_logger("crackerjack.git_cache")
|
|
285
|
+
|
|
286
|
+
def _make_repo_key(self, repo_path: Path, operation: str, params: str = "") -> str:
|
|
287
|
+
"""Create cache key for git operation."""
|
|
288
|
+
repo_hash = hashlib.md5(
|
|
289
|
+
str(repo_path).encode(), usedforsecurity=False
|
|
290
|
+
).hexdigest()[:8]
|
|
291
|
+
param_hash = (
|
|
292
|
+
hashlib.md5(params.encode(), usedforsecurity=False).hexdigest()[:8]
|
|
293
|
+
if params
|
|
294
|
+
else ""
|
|
295
|
+
)
|
|
296
|
+
return f"git:{repo_hash}:{operation}:{param_hash}"
|
|
297
|
+
|
|
298
|
+
def get_branch_info(self, repo_path: Path) -> t.Any:
|
|
299
|
+
"""Get cached branch information."""
|
|
300
|
+
key = self._make_repo_key(repo_path, "branch_info")
|
|
301
|
+
return self.cache.get(key)
|
|
302
|
+
|
|
303
|
+
def set_branch_info(
|
|
304
|
+
self,
|
|
305
|
+
repo_path: Path,
|
|
306
|
+
branch_info: t.Any,
|
|
307
|
+
ttl_seconds: int = 60,
|
|
308
|
+
) -> None:
|
|
309
|
+
"""Cache branch information."""
|
|
310
|
+
key = self._make_repo_key(repo_path, "branch_info")
|
|
311
|
+
invalidation_keys = {f"git_repo:{repo_path}"}
|
|
312
|
+
self.cache.set(key, branch_info, ttl_seconds, invalidation_keys)
|
|
313
|
+
|
|
314
|
+
def get_file_status(self, repo_path: Path) -> t.Any:
|
|
315
|
+
"""Get cached file status."""
|
|
316
|
+
key = self._make_repo_key(repo_path, "file_status")
|
|
317
|
+
return self.cache.get(key)
|
|
318
|
+
|
|
319
|
+
def set_file_status(
|
|
320
|
+
self,
|
|
321
|
+
repo_path: Path,
|
|
322
|
+
file_status: t.Any,
|
|
323
|
+
ttl_seconds: int = 30,
|
|
324
|
+
) -> None:
|
|
325
|
+
"""Cache file status."""
|
|
326
|
+
key = self._make_repo_key(repo_path, "file_status")
|
|
327
|
+
invalidation_keys = {f"git_repo:{repo_path}", "git_files"}
|
|
328
|
+
self.cache.set(key, file_status, ttl_seconds, invalidation_keys)
|
|
329
|
+
|
|
330
|
+
def invalidate_repo(self, repo_path: Path) -> None:
|
|
331
|
+
"""Invalidate all cache entries for a repository."""
|
|
332
|
+
self.cache.invalidate(f"git_repo:{repo_path}")
|
|
333
|
+
self._logger.debug(f"Invalidated git cache for repo: {repo_path}")
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
class FileSystemCache:
|
|
337
|
+
"""Specialized cache for file system operations."""
|
|
338
|
+
|
|
339
|
+
def __init__(self, cache: PerformanceCache):
|
|
340
|
+
self.cache = cache
|
|
341
|
+
self._logger = get_logger("crackerjack.filesystem_cache")
|
|
342
|
+
|
|
343
|
+
def _make_file_key(self, file_path: Path, operation: str) -> str:
|
|
344
|
+
"""Create cache key for file operation."""
|
|
345
|
+
file_hash = hashlib.md5(
|
|
346
|
+
str(file_path).encode(), usedforsecurity=False
|
|
347
|
+
).hexdigest()[:8]
|
|
348
|
+
return f"fs:{file_hash}:{operation}"
|
|
349
|
+
|
|
350
|
+
def get_file_stats(self, file_path: Path) -> t.Any:
|
|
351
|
+
"""Get cached file statistics."""
|
|
352
|
+
key = self._make_file_key(file_path, "stats")
|
|
353
|
+
return self.cache.get(key)
|
|
354
|
+
|
|
355
|
+
def set_file_stats(
|
|
356
|
+
self,
|
|
357
|
+
file_path: Path,
|
|
358
|
+
stats: t.Any,
|
|
359
|
+
ttl_seconds: int = 60,
|
|
360
|
+
) -> None:
|
|
361
|
+
"""Cache file statistics."""
|
|
362
|
+
key = self._make_file_key(file_path, "stats")
|
|
363
|
+
invalidation_keys = {f"file:{file_path}"}
|
|
364
|
+
self.cache.set(key, stats, ttl_seconds, invalidation_keys)
|
|
365
|
+
|
|
366
|
+
def invalidate_file(self, file_path: Path) -> None:
|
|
367
|
+
"""Invalidate cache entries for a specific file."""
|
|
368
|
+
self.cache.invalidate(f"file:{file_path}")
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
class CommandResultCache:
|
|
372
|
+
"""Specialized cache for command execution results."""
|
|
373
|
+
|
|
374
|
+
def __init__(self, cache: PerformanceCache):
|
|
375
|
+
self.cache = cache
|
|
376
|
+
self._logger = get_logger("crackerjack.command_cache")
|
|
377
|
+
|
|
378
|
+
def _make_command_key(self, command: list[str], cwd: Path | None = None) -> str:
|
|
379
|
+
"""Create cache key for command execution."""
|
|
380
|
+
cmd_str = " ".join(command)
|
|
381
|
+
cwd_str = str(cwd) if cwd else ""
|
|
382
|
+
combined = f"{cmd_str}:{cwd_str}"
|
|
383
|
+
cmd_hash = hashlib.md5(combined.encode(), usedforsecurity=False).hexdigest()[
|
|
384
|
+
:12
|
|
385
|
+
]
|
|
386
|
+
return f"cmd:{cmd_hash}"
|
|
387
|
+
|
|
388
|
+
def get_command_result(
|
|
389
|
+
self,
|
|
390
|
+
command: list[str],
|
|
391
|
+
cwd: Path | None = None,
|
|
392
|
+
) -> t.Any:
|
|
393
|
+
"""Get cached command result."""
|
|
394
|
+
key = self._make_command_key(command, cwd)
|
|
395
|
+
return self.cache.get(key)
|
|
396
|
+
|
|
397
|
+
def set_command_result(
|
|
398
|
+
self,
|
|
399
|
+
command: list[str],
|
|
400
|
+
result: t.Any,
|
|
401
|
+
cwd: Path | None = None,
|
|
402
|
+
ttl_seconds: int = 120,
|
|
403
|
+
) -> None:
|
|
404
|
+
"""Cache command execution result."""
|
|
405
|
+
key = self._make_command_key(command, cwd)
|
|
406
|
+
invalidation_keys = {"commands"}
|
|
407
|
+
if cwd:
|
|
408
|
+
invalidation_keys.add(f"cwd:{cwd}")
|
|
409
|
+
|
|
410
|
+
self.cache.set(key, result, ttl_seconds, invalidation_keys)
|
|
411
|
+
|
|
412
|
+
def invalidate_commands(self) -> None:
|
|
413
|
+
"""Invalidate all cached command results."""
|
|
414
|
+
self.cache.invalidate("commands")
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
# Global cache instance for the application
|
|
418
|
+
_global_cache: PerformanceCache | None = None
|
|
419
|
+
_cache_lock = Lock()
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
def get_performance_cache() -> PerformanceCache:
|
|
423
|
+
"""Get global performance cache instance."""
|
|
424
|
+
global _global_cache
|
|
425
|
+
with _cache_lock:
|
|
426
|
+
if _global_cache is None:
|
|
427
|
+
_global_cache = PerformanceCache()
|
|
428
|
+
return _global_cache
|
|
429
|
+
|
|
430
|
+
|
|
431
|
+
def get_git_cache() -> GitOperationCache:
|
|
432
|
+
"""Get Git operation cache instance."""
|
|
433
|
+
return GitOperationCache(get_performance_cache())
|
|
434
|
+
|
|
435
|
+
|
|
436
|
+
def get_filesystem_cache() -> FileSystemCache:
|
|
437
|
+
"""Get file system cache instance."""
|
|
438
|
+
return FileSystemCache(get_performance_cache())
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
def get_command_cache() -> CommandResultCache:
|
|
442
|
+
"""Get command result cache instance."""
|
|
443
|
+
return CommandResultCache(get_performance_cache())
|