crackerjack 0.29.0__py3-none-any.whl → 0.31.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +1005 -0
- crackerjack/RULES.md +380 -0
- crackerjack/__init__.py +42 -13
- crackerjack/__main__.py +225 -253
- crackerjack/agents/__init__.py +41 -0
- crackerjack/agents/architect_agent.py +281 -0
- crackerjack/agents/base.py +169 -0
- crackerjack/agents/coordinator.py +512 -0
- crackerjack/agents/documentation_agent.py +498 -0
- crackerjack/agents/dry_agent.py +388 -0
- crackerjack/agents/formatting_agent.py +245 -0
- crackerjack/agents/import_optimization_agent.py +281 -0
- crackerjack/agents/performance_agent.py +669 -0
- crackerjack/agents/proactive_agent.py +104 -0
- crackerjack/agents/refactoring_agent.py +788 -0
- crackerjack/agents/security_agent.py +529 -0
- crackerjack/agents/test_creation_agent.py +652 -0
- crackerjack/agents/test_specialist_agent.py +486 -0
- crackerjack/agents/tracker.py +212 -0
- crackerjack/api.py +560 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/facade.py +104 -0
- crackerjack/cli/handlers.py +267 -0
- crackerjack/cli/interactive.py +471 -0
- crackerjack/cli/options.py +401 -0
- crackerjack/cli/utils.py +18 -0
- crackerjack/code_cleaner.py +670 -0
- crackerjack/config/__init__.py +19 -0
- crackerjack/config/hooks.py +218 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +406 -0
- crackerjack/core/autofix_coordinator.py +200 -0
- crackerjack/core/container.py +104 -0
- crackerjack/core/enhanced_container.py +542 -0
- crackerjack/core/performance.py +243 -0
- crackerjack/core/phase_coordinator.py +561 -0
- crackerjack/core/proactive_workflow.py +316 -0
- crackerjack/core/session_coordinator.py +289 -0
- crackerjack/core/workflow_orchestrator.py +640 -0
- crackerjack/dynamic_config.py +577 -0
- crackerjack/errors.py +263 -41
- crackerjack/executors/__init__.py +11 -0
- crackerjack/executors/async_hook_executor.py +431 -0
- crackerjack/executors/cached_hook_executor.py +242 -0
- crackerjack/executors/hook_executor.py +345 -0
- crackerjack/executors/individual_hook_executor.py +669 -0
- crackerjack/intelligence/__init__.py +44 -0
- crackerjack/intelligence/adaptive_learning.py +751 -0
- crackerjack/intelligence/agent_orchestrator.py +551 -0
- crackerjack/intelligence/agent_registry.py +414 -0
- crackerjack/intelligence/agent_selector.py +502 -0
- crackerjack/intelligence/integration.py +290 -0
- crackerjack/interactive.py +576 -315
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +137 -0
- crackerjack/managers/publish_manager.py +411 -0
- crackerjack/managers/test_command_builder.py +151 -0
- crackerjack/managers/test_executor.py +435 -0
- crackerjack/managers/test_manager.py +258 -0
- crackerjack/managers/test_manager_backup.py +1124 -0
- crackerjack/managers/test_progress.py +144 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +336 -0
- crackerjack/mcp/client_runner.py +104 -0
- crackerjack/mcp/context.py +615 -0
- crackerjack/mcp/dashboard.py +636 -0
- crackerjack/mcp/enhanced_progress_monitor.py +479 -0
- crackerjack/mcp/file_monitor.py +336 -0
- crackerjack/mcp/progress_components.py +569 -0
- crackerjack/mcp/progress_monitor.py +949 -0
- crackerjack/mcp/rate_limiter.py +332 -0
- crackerjack/mcp/server.py +22 -0
- crackerjack/mcp/server_core.py +244 -0
- crackerjack/mcp/service_watchdog.py +501 -0
- crackerjack/mcp/state.py +395 -0
- crackerjack/mcp/task_manager.py +257 -0
- crackerjack/mcp/tools/__init__.py +17 -0
- crackerjack/mcp/tools/core_tools.py +249 -0
- crackerjack/mcp/tools/error_analyzer.py +308 -0
- crackerjack/mcp/tools/execution_tools.py +370 -0
- crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
- crackerjack/mcp/tools/intelligence_tools.py +314 -0
- crackerjack/mcp/tools/monitoring_tools.py +502 -0
- crackerjack/mcp/tools/proactive_tools.py +384 -0
- crackerjack/mcp/tools/progress_tools.py +141 -0
- crackerjack/mcp/tools/utility_tools.py +341 -0
- crackerjack/mcp/tools/workflow_executor.py +360 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +39 -0
- crackerjack/mcp/websocket/endpoints.py +559 -0
- crackerjack/mcp/websocket/jobs.py +253 -0
- crackerjack/mcp/websocket/server.py +116 -0
- crackerjack/mcp/websocket/websocket_handler.py +78 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/__init__.py +31 -0
- crackerjack/models/config.py +93 -0
- crackerjack/models/config_adapter.py +230 -0
- crackerjack/models/protocols.py +118 -0
- crackerjack/models/task.py +154 -0
- crackerjack/monitoring/ai_agent_watchdog.py +450 -0
- crackerjack/monitoring/regression_prevention.py +638 -0
- crackerjack/orchestration/__init__.py +0 -0
- crackerjack/orchestration/advanced_orchestrator.py +970 -0
- crackerjack/orchestration/execution_strategies.py +341 -0
- crackerjack/orchestration/test_progress_streamer.py +636 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +246 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +259 -0
- crackerjack/py313.py +8 -3
- crackerjack/services/__init__.py +22 -0
- crackerjack/services/cache.py +314 -0
- crackerjack/services/config.py +347 -0
- crackerjack/services/config_integrity.py +99 -0
- crackerjack/services/contextual_ai_assistant.py +516 -0
- crackerjack/services/coverage_ratchet.py +347 -0
- crackerjack/services/debug.py +736 -0
- crackerjack/services/dependency_monitor.py +617 -0
- crackerjack/services/enhanced_filesystem.py +439 -0
- crackerjack/services/file_hasher.py +151 -0
- crackerjack/services/filesystem.py +395 -0
- crackerjack/services/git.py +165 -0
- crackerjack/services/health_metrics.py +611 -0
- crackerjack/services/initialization.py +847 -0
- crackerjack/services/log_manager.py +286 -0
- crackerjack/services/logging.py +174 -0
- crackerjack/services/metrics.py +578 -0
- crackerjack/services/pattern_cache.py +362 -0
- crackerjack/services/pattern_detector.py +515 -0
- crackerjack/services/performance_benchmarks.py +653 -0
- crackerjack/services/security.py +163 -0
- crackerjack/services/server_manager.py +234 -0
- crackerjack/services/smart_scheduling.py +144 -0
- crackerjack/services/tool_version_service.py +61 -0
- crackerjack/services/unified_config.py +437 -0
- crackerjack/services/version_checker.py +248 -0
- crackerjack/slash_commands/__init__.py +14 -0
- crackerjack/slash_commands/init.md +122 -0
- crackerjack/slash_commands/run.md +163 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack-0.31.4.dist-info/METADATA +742 -0
- crackerjack-0.31.4.dist-info/RECORD +148 -0
- crackerjack-0.31.4.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -34
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/.pre-commit-config-ai.yaml +0 -149
- crackerjack/.pre-commit-config-fast.yaml +0 -69
- crackerjack/.pre-commit-config.yaml +0 -114
- crackerjack/crackerjack.py +0 -4140
- crackerjack/pyproject.toml +0 -285
- crackerjack-0.29.0.dist-info/METADATA +0 -1289
- crackerjack-0.29.0.dist-info/RECORD +0 -17
- {crackerjack-0.29.0.dist-info → crackerjack-0.31.4.dist-info}/WHEEL +0 -0
- {crackerjack-0.29.0.dist-info → crackerjack-0.31.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import json
|
|
3
|
+
import time
|
|
4
|
+
import typing as t
|
|
5
|
+
from dataclasses import asdict, dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from crackerjack.models.task import HookResult
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class CacheEntry:
|
|
13
|
+
key: str
|
|
14
|
+
value: t.Any
|
|
15
|
+
created_at: float = field(default_factory=time.time)
|
|
16
|
+
accessed_at: float = field(default_factory=time.time)
|
|
17
|
+
ttl_seconds: int = 3600
|
|
18
|
+
access_count: int = 0
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def is_expired(self) -> bool:
|
|
22
|
+
return (time.time() - self.created_at) > self.ttl_seconds
|
|
23
|
+
|
|
24
|
+
@property
|
|
25
|
+
def age_seconds(self) -> int:
|
|
26
|
+
return int(time.time() - self.created_at)
|
|
27
|
+
|
|
28
|
+
def touch(self) -> None:
|
|
29
|
+
self.accessed_at = time.time()
|
|
30
|
+
self.access_count += 1
|
|
31
|
+
|
|
32
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
33
|
+
"""Convert to JSON-serializable dict."""
|
|
34
|
+
return asdict(self)
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def from_dict(cls, data: dict[str, t.Any]) -> "CacheEntry":
|
|
38
|
+
"""Create from dict loaded from JSON."""
|
|
39
|
+
return cls(**data)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class CacheStats:
|
|
44
|
+
hits: int = 0
|
|
45
|
+
misses: int = 0
|
|
46
|
+
evictions: int = 0
|
|
47
|
+
total_entries: int = 0
|
|
48
|
+
total_size_bytes: int = 0
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def hit_rate(self) -> float:
|
|
52
|
+
total = self.hits + self.misses
|
|
53
|
+
return (self.hits / total * 100) if total > 0 else 0.0
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> dict[str, t.Any]:
|
|
56
|
+
return {
|
|
57
|
+
"hits": self.hits,
|
|
58
|
+
"misses": self.misses,
|
|
59
|
+
"evictions": self.evictions,
|
|
60
|
+
"total_entries": self.total_entries,
|
|
61
|
+
"hit_rate_percent": round(self.hit_rate, 2),
|
|
62
|
+
"total_size_mb": round(self.total_size_bytes / 1024 / 1024, 2),
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class InMemoryCache:
|
|
67
|
+
def __init__(self, max_entries: int = 1000, default_ttl: int = 3600) -> None:
|
|
68
|
+
self.max_entries = max_entries
|
|
69
|
+
self.default_ttl = default_ttl
|
|
70
|
+
self._cache: dict[str, CacheEntry] = {}
|
|
71
|
+
self.stats = CacheStats()
|
|
72
|
+
|
|
73
|
+
def get(self, key: str) -> t.Any | None:
|
|
74
|
+
entry = self._cache.get(key)
|
|
75
|
+
|
|
76
|
+
if entry is None:
|
|
77
|
+
self.stats.misses += 1
|
|
78
|
+
return None
|
|
79
|
+
|
|
80
|
+
if entry.is_expired:
|
|
81
|
+
del self._cache[key]
|
|
82
|
+
self.stats.misses += 1
|
|
83
|
+
self.stats.evictions += 1
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
entry.touch()
|
|
87
|
+
self.stats.hits += 1
|
|
88
|
+
return entry.value
|
|
89
|
+
|
|
90
|
+
def set(self, key: str, value: t.Any, ttl_seconds: int | None = None) -> None:
|
|
91
|
+
if ttl_seconds is None:
|
|
92
|
+
ttl_seconds = self.default_ttl
|
|
93
|
+
|
|
94
|
+
if len(self._cache) >= self.max_entries:
|
|
95
|
+
self._evict_lru()
|
|
96
|
+
|
|
97
|
+
self._cache[key] = CacheEntry(
|
|
98
|
+
key=key,
|
|
99
|
+
value=value,
|
|
100
|
+
ttl_seconds=ttl_seconds,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
self.stats.total_entries = len(self._cache)
|
|
104
|
+
|
|
105
|
+
def invalidate(self, key: str) -> bool:
|
|
106
|
+
if key in self._cache:
|
|
107
|
+
del self._cache[key]
|
|
108
|
+
self.stats.total_entries = len(self._cache)
|
|
109
|
+
return True
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
def clear(self) -> None:
|
|
113
|
+
evicted = len(self._cache)
|
|
114
|
+
self._cache.clear()
|
|
115
|
+
self.stats.evictions += evicted
|
|
116
|
+
self.stats.total_entries = 0
|
|
117
|
+
|
|
118
|
+
def cleanup_expired(self) -> int:
|
|
119
|
+
expired_keys = [key for key, entry in self._cache.items() if entry.is_expired]
|
|
120
|
+
|
|
121
|
+
for key in expired_keys:
|
|
122
|
+
del self._cache[key]
|
|
123
|
+
|
|
124
|
+
self.stats.evictions += len(expired_keys)
|
|
125
|
+
self.stats.total_entries = len(self._cache)
|
|
126
|
+
return len(expired_keys)
|
|
127
|
+
|
|
128
|
+
def _evict_lru(self) -> None:
|
|
129
|
+
if not self._cache:
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
lru_key = min(self._cache.keys(), key=lambda k: self._cache[k].accessed_at)
|
|
133
|
+
|
|
134
|
+
del self._cache[lru_key]
|
|
135
|
+
self.stats.evictions += 1
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
class FileCache:
|
|
139
|
+
def __init__(self, cache_dir: Path, namespace: str = "crackerjack") -> None:
|
|
140
|
+
self.cache_dir = cache_dir / namespace
|
|
141
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
142
|
+
self.stats = CacheStats()
|
|
143
|
+
|
|
144
|
+
def get(self, key: str) -> t.Any | None:
|
|
145
|
+
cache_file = self._get_cache_file(key)
|
|
146
|
+
|
|
147
|
+
if not cache_file.exists():
|
|
148
|
+
self.stats.misses += 1
|
|
149
|
+
return None
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
with cache_file.open(encoding="utf-8") as f:
|
|
153
|
+
data = json.load(f)
|
|
154
|
+
entry = CacheEntry.from_dict(data)
|
|
155
|
+
|
|
156
|
+
if entry.is_expired:
|
|
157
|
+
cache_file.unlink(missing_ok=True)
|
|
158
|
+
self.stats.misses += 1
|
|
159
|
+
self.stats.evictions += 1
|
|
160
|
+
return None
|
|
161
|
+
|
|
162
|
+
entry.touch()
|
|
163
|
+
|
|
164
|
+
with cache_file.open("w", encoding="utf-8") as f:
|
|
165
|
+
json.dump(entry.to_dict(), f)
|
|
166
|
+
|
|
167
|
+
self.stats.hits += 1
|
|
168
|
+
return entry.value
|
|
169
|
+
|
|
170
|
+
except (json.JSONDecodeError, FileNotFoundError, OSError, KeyError):
|
|
171
|
+
self.stats.misses += 1
|
|
172
|
+
cache_file.unlink(missing_ok=True)
|
|
173
|
+
return None
|
|
174
|
+
|
|
175
|
+
def set(self, key: str, value: t.Any, ttl_seconds: int = 3600) -> None:
|
|
176
|
+
cache_file = self._get_cache_file(key)
|
|
177
|
+
|
|
178
|
+
entry = CacheEntry(
|
|
179
|
+
key=key,
|
|
180
|
+
value=value,
|
|
181
|
+
ttl_seconds=ttl_seconds,
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
try:
|
|
185
|
+
with cache_file.open("w", encoding="utf-8") as f:
|
|
186
|
+
json.dump(entry.to_dict(), f)
|
|
187
|
+
except (json.JSONDecodeError, OSError, KeyError):
|
|
188
|
+
pass
|
|
189
|
+
|
|
190
|
+
def invalidate(self, key: str) -> bool:
|
|
191
|
+
cache_file = self._get_cache_file(key)
|
|
192
|
+
if cache_file.exists():
|
|
193
|
+
cache_file.unlink()
|
|
194
|
+
return True
|
|
195
|
+
return False
|
|
196
|
+
|
|
197
|
+
def clear(self) -> None:
|
|
198
|
+
for cache_file in self.cache_dir.glob("*.cache"):
|
|
199
|
+
cache_file.unlink(missing_ok=True)
|
|
200
|
+
|
|
201
|
+
def cleanup_expired(self) -> int:
|
|
202
|
+
removed = 0
|
|
203
|
+
for cache_file in self.cache_dir.glob("*.cache"):
|
|
204
|
+
try:
|
|
205
|
+
with cache_file.open(encoding="utf-8") as f:
|
|
206
|
+
data = json.load(f)
|
|
207
|
+
entry = CacheEntry.from_dict(data)
|
|
208
|
+
|
|
209
|
+
if entry.is_expired:
|
|
210
|
+
cache_file.unlink()
|
|
211
|
+
removed += 1
|
|
212
|
+
except (json.JSONDecodeError, FileNotFoundError, OSError, KeyError):
|
|
213
|
+
cache_file.unlink(missing_ok=True)
|
|
214
|
+
removed += 1
|
|
215
|
+
|
|
216
|
+
self.stats.evictions += removed
|
|
217
|
+
return removed
|
|
218
|
+
|
|
219
|
+
def _get_cache_file(self, key: str) -> Path:
|
|
220
|
+
safe_key = hashlib.md5(key.encode(), usedforsecurity=False).hexdigest()
|
|
221
|
+
return self.cache_dir / f"{safe_key}.cache"
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class CrackerjackCache:
|
|
225
|
+
def __init__(
|
|
226
|
+
self,
|
|
227
|
+
cache_dir: Path | None = None,
|
|
228
|
+
enable_disk_cache: bool = True,
|
|
229
|
+
) -> None:
|
|
230
|
+
self.cache_dir = cache_dir or Path.cwd() / ".crackerjack_cache"
|
|
231
|
+
self.enable_disk_cache = enable_disk_cache
|
|
232
|
+
|
|
233
|
+
self.hook_results_cache = InMemoryCache(max_entries=500, default_ttl=1800)
|
|
234
|
+
self.file_hash_cache = InMemoryCache(max_entries=2000)
|
|
235
|
+
self.config_cache = InMemoryCache(max_entries=100, default_ttl=7200)
|
|
236
|
+
|
|
237
|
+
if enable_disk_cache:
|
|
238
|
+
self.disk_cache = FileCache(self.cache_dir)
|
|
239
|
+
|
|
240
|
+
def get_hook_result(
|
|
241
|
+
self,
|
|
242
|
+
hook_name: str,
|
|
243
|
+
file_hashes: list[str],
|
|
244
|
+
) -> HookResult | None:
|
|
245
|
+
cache_key = self._get_hook_cache_key(hook_name, file_hashes)
|
|
246
|
+
return self.hook_results_cache.get(cache_key)
|
|
247
|
+
|
|
248
|
+
def set_hook_result(
|
|
249
|
+
self,
|
|
250
|
+
hook_name: str,
|
|
251
|
+
file_hashes: list[str],
|
|
252
|
+
result: HookResult,
|
|
253
|
+
) -> None:
|
|
254
|
+
cache_key = self._get_hook_cache_key(hook_name, file_hashes)
|
|
255
|
+
self.hook_results_cache.set(cache_key, result, ttl_seconds=1800)
|
|
256
|
+
|
|
257
|
+
def get_file_hash(self, file_path: Path) -> str | None:
|
|
258
|
+
stat = file_path.stat()
|
|
259
|
+
cache_key = f"file_hash:{file_path}:{stat.st_mtime}:{stat.st_size}"
|
|
260
|
+
return self.file_hash_cache.get(cache_key)
|
|
261
|
+
|
|
262
|
+
def set_file_hash(self, file_path: Path, file_hash: str) -> None:
|
|
263
|
+
stat = file_path.stat()
|
|
264
|
+
cache_key = f"file_hash:{file_path}:{stat.st_mtime}:{stat.st_size}"
|
|
265
|
+
self.file_hash_cache.set(cache_key, file_hash, ttl_seconds=3600)
|
|
266
|
+
|
|
267
|
+
def get_config_data(self, config_key: str) -> t.Any | None:
|
|
268
|
+
return self.config_cache.get(f"config:{config_key}")
|
|
269
|
+
|
|
270
|
+
def set_config_data(self, config_key: str, data: t.Any) -> None:
|
|
271
|
+
self.config_cache.set(f"config:{config_key}", data, ttl_seconds=7200)
|
|
272
|
+
|
|
273
|
+
def invalidate_hook_cache(self, hook_name: str | None = None) -> None:
|
|
274
|
+
if hook_name:
|
|
275
|
+
keys_to_remove = [
|
|
276
|
+
key
|
|
277
|
+
for key in self.hook_results_cache._cache
|
|
278
|
+
if key.startswith(f"hook_result:{hook_name}:")
|
|
279
|
+
]
|
|
280
|
+
for key in keys_to_remove:
|
|
281
|
+
self.hook_results_cache.invalidate(key)
|
|
282
|
+
else:
|
|
283
|
+
self.hook_results_cache.clear()
|
|
284
|
+
|
|
285
|
+
def cleanup_all(self) -> dict[str, int]:
|
|
286
|
+
results = {
|
|
287
|
+
"hook_results": self.hook_results_cache.cleanup_expired(),
|
|
288
|
+
"file_hashes": self.file_hash_cache.cleanup_expired(),
|
|
289
|
+
"config": self.config_cache.cleanup_expired(),
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
if self.enable_disk_cache:
|
|
293
|
+
results["disk_cache"] = self.disk_cache.cleanup_expired()
|
|
294
|
+
|
|
295
|
+
return results
|
|
296
|
+
|
|
297
|
+
def get_cache_stats(self) -> dict[str, t.Any]:
|
|
298
|
+
stats = {
|
|
299
|
+
"hook_results": self.hook_results_cache.stats.to_dict(),
|
|
300
|
+
"file_hashes": self.file_hash_cache.stats.to_dict(),
|
|
301
|
+
"config": self.config_cache.stats.to_dict(),
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
if self.enable_disk_cache:
|
|
305
|
+
stats["disk_cache"] = self.disk_cache.stats.to_dict()
|
|
306
|
+
|
|
307
|
+
return stats
|
|
308
|
+
|
|
309
|
+
def _get_hook_cache_key(self, hook_name: str, file_hashes: list[str]) -> str:
|
|
310
|
+
hash_signature = hashlib.md5(
|
|
311
|
+
",".join(sorted(file_hashes)).encode(),
|
|
312
|
+
usedforsecurity=False,
|
|
313
|
+
).hexdigest()
|
|
314
|
+
return f"hook_result:{hook_name}:{hash_signature}"
|
|
@@ -0,0 +1,347 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
import typing as t
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
|
|
7
|
+
from crackerjack.dynamic_config import DynamicConfigGenerator, generate_config_for_mode
|
|
8
|
+
from crackerjack.models.protocols import OptionsProtocol
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ConfigurationService:
|
|
12
|
+
def __init__(self, console: Console, pkg_path: Path) -> None:
|
|
13
|
+
self.console = console
|
|
14
|
+
self.pkg_path = pkg_path
|
|
15
|
+
self.config_generator = DynamicConfigGenerator()
|
|
16
|
+
|
|
17
|
+
def update_precommit_config(self, options: OptionsProtocol) -> bool:
|
|
18
|
+
"""Update pre-commit configuration and dynamic config versions."""
|
|
19
|
+
try:
|
|
20
|
+
# Generate config first
|
|
21
|
+
mode = self._determine_config_mode(options)
|
|
22
|
+
config_temp_path = generate_config_for_mode(mode)
|
|
23
|
+
if not config_temp_path:
|
|
24
|
+
self.console.print("[yellow]⚠️ No configuration generated[/yellow]")
|
|
25
|
+
return False
|
|
26
|
+
|
|
27
|
+
config_file = self.pkg_path / ".pre-commit-config.yaml"
|
|
28
|
+
config_content = config_temp_path.read_text()
|
|
29
|
+
config_file.write_text(config_content)
|
|
30
|
+
|
|
31
|
+
self._temp_config_path = config_temp_path
|
|
32
|
+
self.console.print("[green]✅[/green] Pre-commit configuration generated")
|
|
33
|
+
|
|
34
|
+
# Run pre-commit autoupdate if requested via -u flag
|
|
35
|
+
if getattr(options, "update_precommit", False):
|
|
36
|
+
success = self._run_precommit_autoupdate()
|
|
37
|
+
if success:
|
|
38
|
+
self.console.print("[green]✅[/green] Pre-commit hooks updated")
|
|
39
|
+
else:
|
|
40
|
+
self.console.print(
|
|
41
|
+
"[yellow]⚠️[/yellow] Pre-commit autoupdate had issues"
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
# Also update dynamic config versions
|
|
45
|
+
self._update_dynamic_config_versions()
|
|
46
|
+
|
|
47
|
+
return True
|
|
48
|
+
except Exception as e:
|
|
49
|
+
self.console.print(
|
|
50
|
+
f"[red]❌[/red] Failed to generate pre-commit config: {e}",
|
|
51
|
+
)
|
|
52
|
+
return False
|
|
53
|
+
|
|
54
|
+
def get_temp_config_path(self) -> Path | None:
|
|
55
|
+
return getattr(self, "_temp_config_path", None)
|
|
56
|
+
|
|
57
|
+
def _determine_config_mode(self, options: OptionsProtocol) -> str:
|
|
58
|
+
if options.experimental_hooks:
|
|
59
|
+
return "experimental"
|
|
60
|
+
if hasattr(options, "test") and options.test:
|
|
61
|
+
return "comprehensive"
|
|
62
|
+
return "comprehensive"
|
|
63
|
+
|
|
64
|
+
def validate_config(self) -> bool:
|
|
65
|
+
try:
|
|
66
|
+
config_file = self.pkg_path / ".pre-commit-config.yaml"
|
|
67
|
+
if not config_file.exists():
|
|
68
|
+
self.console.print(
|
|
69
|
+
"[yellow]⚠️ No pre-commit configuration found[/yellow]",
|
|
70
|
+
)
|
|
71
|
+
return False
|
|
72
|
+
import yaml
|
|
73
|
+
|
|
74
|
+
with config_file.open() as f:
|
|
75
|
+
yaml_result = yaml.safe_load(f)
|
|
76
|
+
_ = (
|
|
77
|
+
t.cast("dict[str, t.Any]", yaml_result)
|
|
78
|
+
if yaml_result is not None
|
|
79
|
+
else {}
|
|
80
|
+
)
|
|
81
|
+
self.console.print("[green]✅[/green] Pre-commit configuration is valid")
|
|
82
|
+
return True
|
|
83
|
+
except Exception as e:
|
|
84
|
+
self.console.print(f"[red]❌[/red] Configuration validation failed: {e}")
|
|
85
|
+
return False
|
|
86
|
+
|
|
87
|
+
def backup_config(self) -> Path | None:
|
|
88
|
+
try:
|
|
89
|
+
config_file = self.pkg_path / ".pre-commit-config.yaml"
|
|
90
|
+
if not config_file.exists():
|
|
91
|
+
return None
|
|
92
|
+
import time
|
|
93
|
+
|
|
94
|
+
timestamp = int(time.time())
|
|
95
|
+
backup_file = self.pkg_path / f".pre-commit-config.yaml.backup.{timestamp}"
|
|
96
|
+
backup_file.write_text(config_file.read_text())
|
|
97
|
+
self.console.print(
|
|
98
|
+
f"[cyan]💾[/cyan] Configuration backed up to {backup_file.name}",
|
|
99
|
+
)
|
|
100
|
+
return backup_file
|
|
101
|
+
except Exception as e:
|
|
102
|
+
self.console.print(
|
|
103
|
+
f"[yellow]⚠️[/yellow] Failed to backup configuration: {e}",
|
|
104
|
+
)
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
def restore_config(self, backup_file: Path) -> bool:
|
|
108
|
+
try:
|
|
109
|
+
if not backup_file.exists():
|
|
110
|
+
self.console.print(
|
|
111
|
+
f"[red]❌[/red] Backup file not found: {backup_file}",
|
|
112
|
+
)
|
|
113
|
+
return False
|
|
114
|
+
config_file = self.pkg_path / ".pre-commit-config.yaml"
|
|
115
|
+
config_file.write_text(backup_file.read_text())
|
|
116
|
+
self.console.print(
|
|
117
|
+
f"[green]✅[/green] Configuration restored from {backup_file.name}",
|
|
118
|
+
)
|
|
119
|
+
return True
|
|
120
|
+
except Exception as e:
|
|
121
|
+
self.console.print(f"[red]❌[/red] Failed to restore configuration: {e}")
|
|
122
|
+
return False
|
|
123
|
+
|
|
124
|
+
def get_config_info(self) -> dict[str, t.Any]:
|
|
125
|
+
try:
|
|
126
|
+
config_file = self.pkg_path / ".pre-commit-config.yaml"
|
|
127
|
+
if not config_file.exists():
|
|
128
|
+
return {"exists": False}
|
|
129
|
+
import yaml
|
|
130
|
+
|
|
131
|
+
with config_file.open() as f:
|
|
132
|
+
yaml_result = yaml.safe_load(f)
|
|
133
|
+
config_data = (
|
|
134
|
+
t.cast("dict[str, t.Any]", yaml_result)
|
|
135
|
+
if isinstance(yaml_result, dict)
|
|
136
|
+
else {}
|
|
137
|
+
)
|
|
138
|
+
repos = config_data.get("repos", [])
|
|
139
|
+
if not isinstance(repos, list):
|
|
140
|
+
repos = []
|
|
141
|
+
hook_count = sum(
|
|
142
|
+
len(repo.get("hooks", [])) for repo in repos if isinstance(repo, dict)
|
|
143
|
+
)
|
|
144
|
+
stat = config_file.stat()
|
|
145
|
+
|
|
146
|
+
return {
|
|
147
|
+
"exists": True,
|
|
148
|
+
"file_size": stat.st_size,
|
|
149
|
+
"modified_time": stat.st_mtime,
|
|
150
|
+
"repo_count": len([r for r in repos if isinstance(r, dict)]),
|
|
151
|
+
"hook_count": hook_count,
|
|
152
|
+
"repos": [
|
|
153
|
+
{
|
|
154
|
+
"repo": repo.get("repo", "unknown"),
|
|
155
|
+
"rev": repo.get("rev", "unknown"),
|
|
156
|
+
"hooks": len(repo.get("hooks", [])),
|
|
157
|
+
}
|
|
158
|
+
for repo in repos
|
|
159
|
+
if isinstance(repo, dict)
|
|
160
|
+
],
|
|
161
|
+
}
|
|
162
|
+
except Exception as e:
|
|
163
|
+
return {"exists": True, "error": str(e)}
|
|
164
|
+
|
|
165
|
+
def update_pyproject_config(self, options: OptionsProtocol) -> bool:
|
|
166
|
+
try:
|
|
167
|
+
pyproject_file = self.pkg_path / "pyproject.toml"
|
|
168
|
+
if not pyproject_file.exists():
|
|
169
|
+
self.console.print("[yellow]⚠️ No pyproject.toml found[/yellow]")
|
|
170
|
+
return False
|
|
171
|
+
from tomllib import loads
|
|
172
|
+
|
|
173
|
+
from tomli_w import dumps
|
|
174
|
+
|
|
175
|
+
with pyproject_file.open() as f:
|
|
176
|
+
content = f.read()
|
|
177
|
+
config = loads(content)
|
|
178
|
+
if "tool" not in config:
|
|
179
|
+
config["tool"] = {}
|
|
180
|
+
if "ruff" not in config["tool"]:
|
|
181
|
+
config["tool"]["ruff"] = {
|
|
182
|
+
"target-version": "py313",
|
|
183
|
+
"line-length": 88,
|
|
184
|
+
"fix": True,
|
|
185
|
+
"unsafe-fixes": True,
|
|
186
|
+
"show-fixes": True,
|
|
187
|
+
"output-format": "full",
|
|
188
|
+
}
|
|
189
|
+
config["tool"]["ruff"]["format"] = {"docstring-code-format": True}
|
|
190
|
+
config["tool"]["ruff"]["lint"] = {
|
|
191
|
+
"extend-select": ["C901", "F", "I", "UP"],
|
|
192
|
+
"ignore": ["E402", "F821"],
|
|
193
|
+
"fixable": ["ALL"],
|
|
194
|
+
}
|
|
195
|
+
if "pytest" not in config["tool"]:
|
|
196
|
+
config["tool"]["pytest"] = {
|
|
197
|
+
"ini_options": {
|
|
198
|
+
"asyncio_mode": "auto",
|
|
199
|
+
"timeout": 300,
|
|
200
|
+
"addopts": "--cov=crackerjack --cov-report=term",
|
|
201
|
+
"markers": [
|
|
202
|
+
"unit: marks test as a unit test",
|
|
203
|
+
"benchmark: mark test as a benchmark",
|
|
204
|
+
"integration: marks test as an integration test",
|
|
205
|
+
"no_leaks: detect asyncio task leaks",
|
|
206
|
+
],
|
|
207
|
+
},
|
|
208
|
+
}
|
|
209
|
+
with pyproject_file.open("w") as f:
|
|
210
|
+
f.write(dumps(config))
|
|
211
|
+
self.console.print("[green]✅[/green] pyproject.toml configuration updated")
|
|
212
|
+
return True
|
|
213
|
+
except Exception as e:
|
|
214
|
+
self.console.print(f"[red]❌[/red] Failed to update pyproject.toml: {e}")
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
def _run_precommit_autoupdate(self) -> bool:
|
|
218
|
+
"""Run pre-commit autoupdate to get latest hook versions."""
|
|
219
|
+
import subprocess
|
|
220
|
+
|
|
221
|
+
try:
|
|
222
|
+
self.console.print("[cyan]🔄[/cyan] Running pre-commit autoupdate...")
|
|
223
|
+
result = self._execute_precommit_autoupdate()
|
|
224
|
+
|
|
225
|
+
if result.returncode == 0:
|
|
226
|
+
self._display_autoupdate_results(result.stdout)
|
|
227
|
+
return True
|
|
228
|
+
else:
|
|
229
|
+
self._handle_autoupdate_error(result.stderr)
|
|
230
|
+
return False
|
|
231
|
+
|
|
232
|
+
except subprocess.TimeoutExpired:
|
|
233
|
+
self.console.print("[red]❌[/red] Pre-commit autoupdate timed out")
|
|
234
|
+
return False
|
|
235
|
+
except Exception as e:
|
|
236
|
+
self.console.print(
|
|
237
|
+
f"[red]❌[/red] Failed to run pre-commit autoupdate: {e}"
|
|
238
|
+
)
|
|
239
|
+
return False
|
|
240
|
+
|
|
241
|
+
def _execute_precommit_autoupdate(self) -> subprocess.CompletedProcess[str]:
|
|
242
|
+
"""Execute the pre-commit autoupdate command."""
|
|
243
|
+
|
|
244
|
+
return subprocess.run(
|
|
245
|
+
["uv", "run", "pre-commit", "autoupdate"],
|
|
246
|
+
cwd=self.pkg_path,
|
|
247
|
+
capture_output=True,
|
|
248
|
+
text=True,
|
|
249
|
+
timeout=60,
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
def _display_autoupdate_results(self, stdout: str) -> None:
|
|
253
|
+
"""Display updated versions if any."""
|
|
254
|
+
if self._has_updates(stdout):
|
|
255
|
+
for line in stdout.split("\n"):
|
|
256
|
+
if self._is_update_line(line):
|
|
257
|
+
self.console.print(f"[dim] {line.strip()}[/dim]")
|
|
258
|
+
|
|
259
|
+
def _has_updates(self, stdout: str) -> bool:
|
|
260
|
+
"""Check if the output contains update information."""
|
|
261
|
+
stdout_lower = stdout.lower()
|
|
262
|
+
return "updating" in stdout_lower or "updated" in stdout_lower
|
|
263
|
+
|
|
264
|
+
def _is_update_line(self, line: str) -> bool:
|
|
265
|
+
"""Check if a line contains update information."""
|
|
266
|
+
return "updating" in line.lower() or "->" in line
|
|
267
|
+
|
|
268
|
+
def _handle_autoupdate_error(self, stderr: str) -> None:
|
|
269
|
+
"""Handle pre-commit autoupdate error output."""
|
|
270
|
+
if stderr:
|
|
271
|
+
self.console.print(
|
|
272
|
+
f"[yellow]Pre-commit autoupdate stderr:[/yellow] {stderr}"
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
def _update_dynamic_config_versions(self) -> None:
|
|
276
|
+
"""Update hardcoded versions in dynamic_config.py based on .pre-commit-config.yaml."""
|
|
277
|
+
try:
|
|
278
|
+
self.console.print("[cyan]🔄[/cyan] Updating dynamic config versions...")
|
|
279
|
+
|
|
280
|
+
version_updates = self._extract_version_updates()
|
|
281
|
+
if version_updates:
|
|
282
|
+
self._update_dynamic_config_file(version_updates)
|
|
283
|
+
|
|
284
|
+
except Exception as e:
|
|
285
|
+
self.console.print(
|
|
286
|
+
f"[yellow]⚠️[/yellow] Failed to update dynamic config versions: {e}"
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
def _extract_version_updates(self) -> dict[str, str]:
|
|
290
|
+
"""Extract version mappings from .pre-commit-config.yaml."""
|
|
291
|
+
config_file = self.pkg_path / ".pre-commit-config.yaml"
|
|
292
|
+
if not config_file.exists():
|
|
293
|
+
return {}
|
|
294
|
+
|
|
295
|
+
import yaml
|
|
296
|
+
|
|
297
|
+
with config_file.open() as f:
|
|
298
|
+
config = yaml.safe_load(f)
|
|
299
|
+
|
|
300
|
+
if not config or "repos" not in config:
|
|
301
|
+
return {}
|
|
302
|
+
|
|
303
|
+
version_updates = {}
|
|
304
|
+
repos = config.get("repos", []) if isinstance(config, dict) else []
|
|
305
|
+
for repo in repos:
|
|
306
|
+
repo_url = repo.get("repo", "")
|
|
307
|
+
rev = repo.get("rev", "")
|
|
308
|
+
if repo_url and rev:
|
|
309
|
+
version_updates[repo_url] = rev
|
|
310
|
+
|
|
311
|
+
return version_updates
|
|
312
|
+
|
|
313
|
+
def _update_dynamic_config_file(self, version_updates: dict[str, str]) -> None:
|
|
314
|
+
"""Update dynamic_config.py with version mappings."""
|
|
315
|
+
dynamic_config_path = self.pkg_path / "crackerjack" / "dynamic_config.py"
|
|
316
|
+
if dynamic_config_path.exists():
|
|
317
|
+
self._apply_version_updates(dynamic_config_path, version_updates)
|
|
318
|
+
|
|
319
|
+
def _apply_version_updates(
|
|
320
|
+
self, config_path: Path, version_updates: dict[str, str]
|
|
321
|
+
) -> None:
|
|
322
|
+
"""Apply version updates to dynamic_config.py."""
|
|
323
|
+
try:
|
|
324
|
+
content = config_path.read_text()
|
|
325
|
+
updated = False
|
|
326
|
+
|
|
327
|
+
for repo_url, new_rev in version_updates.items():
|
|
328
|
+
# Find and update the revision for this repo
|
|
329
|
+
import re
|
|
330
|
+
|
|
331
|
+
pattern = rf'("repo": "{re.escape(repo_url)}".*?"rev": )"([^"]+)"'
|
|
332
|
+
replacement = rf'\1"{new_rev}"'
|
|
333
|
+
|
|
334
|
+
new_content = re.sub(pattern, replacement, content, flags=re.DOTALL)
|
|
335
|
+
if new_content != content:
|
|
336
|
+
content = new_content
|
|
337
|
+
updated = True
|
|
338
|
+
self.console.print(f"[dim] Updated {repo_url} to {new_rev}[/dim]")
|
|
339
|
+
|
|
340
|
+
if updated:
|
|
341
|
+
config_path.write_text(content)
|
|
342
|
+
self.console.print("[green]✅[/green] Dynamic config versions updated")
|
|
343
|
+
|
|
344
|
+
except Exception as e:
|
|
345
|
+
self.console.print(
|
|
346
|
+
f"[yellow]⚠️[/yellow] Failed to apply version updates: {e}"
|
|
347
|
+
)
|