crackerjack 0.31.10__py3-none-any.whl → 0.31.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/CLAUDE.md +288 -705
- crackerjack/__main__.py +22 -8
- crackerjack/agents/__init__.py +0 -3
- crackerjack/agents/architect_agent.py +0 -43
- crackerjack/agents/base.py +1 -9
- crackerjack/agents/coordinator.py +2 -148
- crackerjack/agents/documentation_agent.py +109 -81
- crackerjack/agents/dry_agent.py +122 -97
- crackerjack/agents/formatting_agent.py +3 -16
- crackerjack/agents/import_optimization_agent.py +1174 -130
- crackerjack/agents/performance_agent.py +956 -188
- crackerjack/agents/performance_helpers.py +229 -0
- crackerjack/agents/proactive_agent.py +1 -48
- crackerjack/agents/refactoring_agent.py +516 -246
- crackerjack/agents/refactoring_helpers.py +282 -0
- crackerjack/agents/security_agent.py +393 -90
- crackerjack/agents/test_creation_agent.py +1776 -120
- crackerjack/agents/test_specialist_agent.py +59 -15
- crackerjack/agents/tracker.py +0 -102
- crackerjack/api.py +145 -37
- crackerjack/cli/handlers.py +48 -30
- crackerjack/cli/interactive.py +11 -11
- crackerjack/cli/options.py +66 -4
- crackerjack/code_cleaner.py +808 -148
- crackerjack/config/global_lock_config.py +110 -0
- crackerjack/config/hooks.py +43 -64
- crackerjack/core/async_workflow_orchestrator.py +247 -97
- crackerjack/core/autofix_coordinator.py +192 -109
- crackerjack/core/enhanced_container.py +46 -63
- crackerjack/core/file_lifecycle.py +549 -0
- crackerjack/core/performance.py +9 -8
- crackerjack/core/performance_monitor.py +395 -0
- crackerjack/core/phase_coordinator.py +281 -94
- crackerjack/core/proactive_workflow.py +9 -58
- crackerjack/core/resource_manager.py +501 -0
- crackerjack/core/service_watchdog.py +490 -0
- crackerjack/core/session_coordinator.py +4 -8
- crackerjack/core/timeout_manager.py +504 -0
- crackerjack/core/websocket_lifecycle.py +475 -0
- crackerjack/core/workflow_orchestrator.py +343 -209
- crackerjack/dynamic_config.py +47 -6
- crackerjack/errors.py +3 -4
- crackerjack/executors/async_hook_executor.py +63 -13
- crackerjack/executors/cached_hook_executor.py +14 -14
- crackerjack/executors/hook_executor.py +100 -37
- crackerjack/executors/hook_lock_manager.py +856 -0
- crackerjack/executors/individual_hook_executor.py +120 -86
- crackerjack/intelligence/__init__.py +0 -7
- crackerjack/intelligence/adaptive_learning.py +13 -86
- crackerjack/intelligence/agent_orchestrator.py +15 -78
- crackerjack/intelligence/agent_registry.py +12 -59
- crackerjack/intelligence/agent_selector.py +31 -92
- crackerjack/intelligence/integration.py +1 -41
- crackerjack/interactive.py +9 -9
- crackerjack/managers/async_hook_manager.py +25 -8
- crackerjack/managers/hook_manager.py +9 -9
- crackerjack/managers/publish_manager.py +57 -59
- crackerjack/managers/test_command_builder.py +6 -36
- crackerjack/managers/test_executor.py +9 -61
- crackerjack/managers/test_manager.py +17 -63
- crackerjack/managers/test_manager_backup.py +77 -127
- crackerjack/managers/test_progress.py +4 -23
- crackerjack/mcp/cache.py +5 -12
- crackerjack/mcp/client_runner.py +10 -10
- crackerjack/mcp/context.py +64 -6
- crackerjack/mcp/dashboard.py +14 -11
- crackerjack/mcp/enhanced_progress_monitor.py +55 -55
- crackerjack/mcp/file_monitor.py +72 -42
- crackerjack/mcp/progress_components.py +103 -84
- crackerjack/mcp/progress_monitor.py +122 -49
- crackerjack/mcp/rate_limiter.py +12 -12
- crackerjack/mcp/server_core.py +16 -22
- crackerjack/mcp/service_watchdog.py +26 -26
- crackerjack/mcp/state.py +15 -0
- crackerjack/mcp/tools/core_tools.py +95 -39
- crackerjack/mcp/tools/error_analyzer.py +6 -32
- crackerjack/mcp/tools/execution_tools.py +1 -56
- crackerjack/mcp/tools/execution_tools_backup.py +35 -131
- crackerjack/mcp/tools/intelligence_tool_registry.py +0 -36
- crackerjack/mcp/tools/intelligence_tools.py +2 -55
- crackerjack/mcp/tools/monitoring_tools.py +308 -145
- crackerjack/mcp/tools/proactive_tools.py +12 -42
- crackerjack/mcp/tools/progress_tools.py +23 -15
- crackerjack/mcp/tools/utility_tools.py +3 -40
- crackerjack/mcp/tools/workflow_executor.py +40 -60
- crackerjack/mcp/websocket/app.py +0 -3
- crackerjack/mcp/websocket/endpoints.py +206 -268
- crackerjack/mcp/websocket/jobs.py +213 -66
- crackerjack/mcp/websocket/server.py +84 -6
- crackerjack/mcp/websocket/websocket_handler.py +137 -29
- crackerjack/models/config_adapter.py +3 -16
- crackerjack/models/protocols.py +162 -3
- crackerjack/models/resource_protocols.py +454 -0
- crackerjack/models/task.py +3 -3
- crackerjack/monitoring/__init__.py +0 -0
- crackerjack/monitoring/ai_agent_watchdog.py +25 -71
- crackerjack/monitoring/regression_prevention.py +28 -87
- crackerjack/orchestration/advanced_orchestrator.py +44 -78
- crackerjack/orchestration/coverage_improvement.py +10 -60
- crackerjack/orchestration/execution_strategies.py +16 -16
- crackerjack/orchestration/test_progress_streamer.py +61 -53
- crackerjack/plugins/base.py +1 -1
- crackerjack/plugins/managers.py +22 -20
- crackerjack/py313.py +65 -21
- crackerjack/services/backup_service.py +467 -0
- crackerjack/services/bounded_status_operations.py +627 -0
- crackerjack/services/cache.py +7 -9
- crackerjack/services/config.py +35 -52
- crackerjack/services/config_integrity.py +5 -16
- crackerjack/services/config_merge.py +542 -0
- crackerjack/services/contextual_ai_assistant.py +17 -19
- crackerjack/services/coverage_ratchet.py +44 -73
- crackerjack/services/debug.py +25 -39
- crackerjack/services/dependency_monitor.py +52 -50
- crackerjack/services/enhanced_filesystem.py +14 -11
- crackerjack/services/file_hasher.py +1 -1
- crackerjack/services/filesystem.py +1 -12
- crackerjack/services/git.py +71 -47
- crackerjack/services/health_metrics.py +31 -27
- crackerjack/services/initialization.py +276 -428
- crackerjack/services/input_validator.py +760 -0
- crackerjack/services/log_manager.py +16 -16
- crackerjack/services/logging.py +7 -6
- crackerjack/services/metrics.py +43 -43
- crackerjack/services/pattern_cache.py +2 -31
- crackerjack/services/pattern_detector.py +26 -63
- crackerjack/services/performance_benchmarks.py +20 -45
- crackerjack/services/regex_patterns.py +2887 -0
- crackerjack/services/regex_utils.py +537 -0
- crackerjack/services/secure_path_utils.py +683 -0
- crackerjack/services/secure_status_formatter.py +534 -0
- crackerjack/services/secure_subprocess.py +605 -0
- crackerjack/services/security.py +47 -10
- crackerjack/services/security_logger.py +492 -0
- crackerjack/services/server_manager.py +109 -50
- crackerjack/services/smart_scheduling.py +8 -25
- crackerjack/services/status_authentication.py +603 -0
- crackerjack/services/status_security_manager.py +442 -0
- crackerjack/services/thread_safe_status_collector.py +546 -0
- crackerjack/services/tool_version_service.py +1 -23
- crackerjack/services/unified_config.py +36 -58
- crackerjack/services/validation_rate_limiter.py +269 -0
- crackerjack/services/version_checker.py +9 -40
- crackerjack/services/websocket_resource_limiter.py +572 -0
- crackerjack/slash_commands/__init__.py +52 -2
- crackerjack/tools/__init__.py +0 -0
- crackerjack/tools/validate_input_validator_patterns.py +262 -0
- crackerjack/tools/validate_regex_patterns.py +198 -0
- {crackerjack-0.31.10.dist-info → crackerjack-0.31.12.dist-info}/METADATA +197 -12
- crackerjack-0.31.12.dist-info/RECORD +178 -0
- crackerjack/cli/facade.py +0 -104
- crackerjack-0.31.10.dist-info/RECORD +0 -149
- {crackerjack-0.31.10.dist-info → crackerjack-0.31.12.dist-info}/WHEEL +0 -0
- {crackerjack-0.31.10.dist-info → crackerjack-0.31.12.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.31.10.dist-info → crackerjack-0.31.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,542 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import io
|
|
3
|
+
import typing as t
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import tomli
|
|
7
|
+
import tomli_w
|
|
8
|
+
import yaml
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
|
|
11
|
+
from crackerjack.models.protocols import ConfigMergeServiceProtocol
|
|
12
|
+
from crackerjack.services.filesystem import FileSystemService
|
|
13
|
+
from crackerjack.services.git import GitService
|
|
14
|
+
from crackerjack.services.logging import get_logger
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ConfigMergeService(ConfigMergeServiceProtocol):
|
|
18
|
+
"""Smart configuration file merging service.
|
|
19
|
+
|
|
20
|
+
Extracts and centralizes smart merge logic for:
|
|
21
|
+
- pyproject.toml files (preserves project identity, merges tool configs)
|
|
22
|
+
- .pre-commit-config.yaml files (adds missing repos, preserves existing hooks)
|
|
23
|
+
- .gitignore files (merges patterns while avoiding duplicates)
|
|
24
|
+
- Generic file appending with markers
|
|
25
|
+
|
|
26
|
+
Follows crackerjack's DRY, YAGNI, KISS principles.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
console: Console,
|
|
32
|
+
filesystem: FileSystemService,
|
|
33
|
+
git_service: GitService,
|
|
34
|
+
) -> None:
|
|
35
|
+
self.console = console
|
|
36
|
+
self.filesystem = filesystem
|
|
37
|
+
self.git_service = git_service
|
|
38
|
+
self.logger = get_logger("crackerjack.config_merge")
|
|
39
|
+
|
|
40
|
+
def smart_merge_pyproject(
|
|
41
|
+
self,
|
|
42
|
+
source_content: dict[str, t.Any],
|
|
43
|
+
target_path: str | t.Any,
|
|
44
|
+
project_name: str,
|
|
45
|
+
) -> dict[str, t.Any]:
|
|
46
|
+
"""Smart merge pyproject.toml preserving project identity and merging tool configs."""
|
|
47
|
+
target_path = Path(target_path)
|
|
48
|
+
|
|
49
|
+
if not target_path.exists():
|
|
50
|
+
# No existing file, return source with project name replacement
|
|
51
|
+
return self._replace_project_name_in_config_value(
|
|
52
|
+
source_content, project_name
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
with target_path.open("rb") as f:
|
|
56
|
+
target_content = tomli.load(f)
|
|
57
|
+
|
|
58
|
+
# Ensure crackerjack dev dependency
|
|
59
|
+
self._ensure_crackerjack_dev_dependency(target_content, source_content)
|
|
60
|
+
|
|
61
|
+
# Merge tool configurations
|
|
62
|
+
self._merge_tool_configurations(target_content, source_content, project_name)
|
|
63
|
+
|
|
64
|
+
# Remove fixed coverage requirements (use ratchet system)
|
|
65
|
+
self._remove_fixed_coverage_requirements(target_content)
|
|
66
|
+
|
|
67
|
+
self.logger.info("Smart merged pyproject.toml", project_name=project_name)
|
|
68
|
+
return target_content
|
|
69
|
+
|
|
70
|
+
def smart_merge_pre_commit_config(
|
|
71
|
+
self,
|
|
72
|
+
source_content: dict[str, t.Any],
|
|
73
|
+
target_path: str | t.Any,
|
|
74
|
+
project_name: str,
|
|
75
|
+
) -> dict[str, t.Any]:
|
|
76
|
+
"""Smart merge .pre-commit-config.yaml adding missing repos."""
|
|
77
|
+
target_path = Path(target_path)
|
|
78
|
+
|
|
79
|
+
if not target_path.exists():
|
|
80
|
+
# No existing file, return source
|
|
81
|
+
return source_content
|
|
82
|
+
|
|
83
|
+
with target_path.open() as f:
|
|
84
|
+
target_content = yaml.safe_load(f) or {}
|
|
85
|
+
|
|
86
|
+
# Ensure target_content is a dict
|
|
87
|
+
if not isinstance(target_content, dict):
|
|
88
|
+
self.logger.warning(
|
|
89
|
+
f"Target config is not a dictionary, using source: {type(target_content)}"
|
|
90
|
+
)
|
|
91
|
+
return source_content
|
|
92
|
+
|
|
93
|
+
source_repos = source_content.get("repos", [])
|
|
94
|
+
target_repos = target_content.get("repos", [])
|
|
95
|
+
|
|
96
|
+
# Ensure target_repos is a list of dicts
|
|
97
|
+
if not isinstance(target_repos, list):
|
|
98
|
+
target_repos = []
|
|
99
|
+
|
|
100
|
+
# Get existing repo URLs to avoid duplicates
|
|
101
|
+
existing_repo_urls = {
|
|
102
|
+
repo.get("repo", "") for repo in target_repos if isinstance(repo, dict)
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
# Find new repos to add
|
|
106
|
+
new_repos = [
|
|
107
|
+
repo
|
|
108
|
+
for repo in source_repos
|
|
109
|
+
if isinstance(repo, dict) and repo.get("repo", "") not in existing_repo_urls
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
if new_repos:
|
|
113
|
+
target_repos.extend(new_repos)
|
|
114
|
+
target_content["repos"] = target_repos
|
|
115
|
+
self.logger.info(
|
|
116
|
+
"Merged .pre-commit-config.yaml",
|
|
117
|
+
new_repos_count=len(new_repos),
|
|
118
|
+
project_name=project_name,
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
return target_content
|
|
122
|
+
|
|
123
|
+
def smart_append_file(
|
|
124
|
+
self,
|
|
125
|
+
source_content: str,
|
|
126
|
+
target_path: str | t.Any,
|
|
127
|
+
start_marker: str,
|
|
128
|
+
end_marker: str,
|
|
129
|
+
force: bool = False,
|
|
130
|
+
) -> str:
|
|
131
|
+
"""Smart append content to file with markers (for CLAUDE.md, etc)."""
|
|
132
|
+
target_path = Path(target_path)
|
|
133
|
+
|
|
134
|
+
if not target_path.exists():
|
|
135
|
+
# No existing file, return source content wrapped in markers
|
|
136
|
+
return f"{start_marker}\n{source_content.strip()}\n{end_marker}\n"
|
|
137
|
+
|
|
138
|
+
existing_content = target_path.read_text()
|
|
139
|
+
|
|
140
|
+
# Check if markers already exist
|
|
141
|
+
if start_marker in existing_content:
|
|
142
|
+
if force:
|
|
143
|
+
# Replace existing section
|
|
144
|
+
start_idx = existing_content.find(start_marker)
|
|
145
|
+
end_idx = existing_content.find(end_marker)
|
|
146
|
+
if end_idx != -1:
|
|
147
|
+
end_idx += len(end_marker)
|
|
148
|
+
existing_content = (
|
|
149
|
+
existing_content[:start_idx] + existing_content[end_idx:]
|
|
150
|
+
).strip()
|
|
151
|
+
else:
|
|
152
|
+
# Already exists and not forced, return existing
|
|
153
|
+
return existing_content
|
|
154
|
+
|
|
155
|
+
# Append new section with markers
|
|
156
|
+
merged_content = existing_content.strip() + "\n\n" + start_marker + "\n"
|
|
157
|
+
merged_content += source_content.strip() + "\n"
|
|
158
|
+
merged_content += end_marker + "\n"
|
|
159
|
+
|
|
160
|
+
self.logger.info("Smart appended file with markers", path=str(target_path))
|
|
161
|
+
return merged_content
|
|
162
|
+
|
|
163
|
+
def smart_merge_gitignore(
|
|
164
|
+
self,
|
|
165
|
+
patterns: list[str],
|
|
166
|
+
target_path: str | t.Any,
|
|
167
|
+
) -> str:
|
|
168
|
+
"""Smart merge .gitignore patterns avoiding and cleaning out duplicates."""
|
|
169
|
+
target_path = Path(target_path)
|
|
170
|
+
|
|
171
|
+
if not target_path.exists():
|
|
172
|
+
return self._create_new_gitignore(target_path, patterns)
|
|
173
|
+
|
|
174
|
+
lines = target_path.read_text().splitlines()
|
|
175
|
+
|
|
176
|
+
# Parse existing content and extract patterns
|
|
177
|
+
parsed_content = self._parse_existing_gitignore_content(lines)
|
|
178
|
+
|
|
179
|
+
# Build merged content
|
|
180
|
+
merged_content = self._build_merged_gitignore_content(parsed_content, patterns)
|
|
181
|
+
|
|
182
|
+
# Write and log results
|
|
183
|
+
target_path.write_text(merged_content)
|
|
184
|
+
new_patterns_count = len(
|
|
185
|
+
[p for p in patterns if p not in parsed_content.existing_patterns]
|
|
186
|
+
)
|
|
187
|
+
all_patterns_count = len(parsed_content.existing_patterns) + new_patterns_count
|
|
188
|
+
|
|
189
|
+
self.logger.info(
|
|
190
|
+
"Smart merged .gitignore (cleaned duplicates)",
|
|
191
|
+
new_patterns_count=new_patterns_count,
|
|
192
|
+
total_crackerjack_patterns=all_patterns_count,
|
|
193
|
+
)
|
|
194
|
+
return merged_content
|
|
195
|
+
|
|
196
|
+
def _create_new_gitignore(self, target_path: Path, patterns: list[str]) -> str:
|
|
197
|
+
"""Create a new .gitignore file with patterns."""
|
|
198
|
+
merged_content = "# Crackerjack patterns\n"
|
|
199
|
+
for pattern in patterns:
|
|
200
|
+
merged_content += f"{pattern}\n"
|
|
201
|
+
target_path.write_text(merged_content)
|
|
202
|
+
self.logger.info("Created .gitignore", new_patterns_count=len(patterns))
|
|
203
|
+
return merged_content
|
|
204
|
+
|
|
205
|
+
def _parse_existing_gitignore_content(self, lines: list[str]) -> t.Any:
|
|
206
|
+
"""Parse existing .gitignore content, extracting patterns and non-Crackerjack lines."""
|
|
207
|
+
|
|
208
|
+
# Using a simple namespace class to group related data
|
|
209
|
+
class ParsedContent:
|
|
210
|
+
def __init__(self):
|
|
211
|
+
self.cleaned_lines = []
|
|
212
|
+
self.existing_patterns = set()
|
|
213
|
+
|
|
214
|
+
parsed = ParsedContent()
|
|
215
|
+
parser_state = self._init_parser_state()
|
|
216
|
+
|
|
217
|
+
for line in lines:
|
|
218
|
+
parser_state = self._process_gitignore_line(line, parsed, parser_state)
|
|
219
|
+
|
|
220
|
+
return parsed
|
|
221
|
+
|
|
222
|
+
def _init_parser_state(self) -> dict[str, bool]:
|
|
223
|
+
"""Initialize parser state for gitignore parsing."""
|
|
224
|
+
return {
|
|
225
|
+
"inside_crackerjack_section": False,
|
|
226
|
+
"skip_empty_after_crackerjack": False,
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
def _process_gitignore_line(
|
|
230
|
+
self, line: str, parsed: t.Any, state: dict[str, bool]
|
|
231
|
+
) -> dict[str, bool]:
|
|
232
|
+
"""Process a single line during gitignore parsing."""
|
|
233
|
+
stripped = line.strip()
|
|
234
|
+
|
|
235
|
+
# Handle Crackerjack section headers
|
|
236
|
+
if self._is_crackerjack_header(stripped):
|
|
237
|
+
return self._handle_crackerjack_header(state)
|
|
238
|
+
|
|
239
|
+
# Handle empty lines after headers
|
|
240
|
+
if self._should_skip_empty_line(stripped, state):
|
|
241
|
+
state["skip_empty_after_crackerjack"] = False
|
|
242
|
+
return state
|
|
243
|
+
|
|
244
|
+
state["skip_empty_after_crackerjack"] = False
|
|
245
|
+
|
|
246
|
+
# Process patterns and lines
|
|
247
|
+
self._collect_pattern_if_present(stripped, parsed, state)
|
|
248
|
+
self._add_line_if_non_crackerjack(line, parsed, state)
|
|
249
|
+
|
|
250
|
+
return state
|
|
251
|
+
|
|
252
|
+
def _handle_crackerjack_header(self, state: dict[str, bool]) -> dict[str, bool]:
|
|
253
|
+
"""Handle Crackerjack section header detection."""
|
|
254
|
+
if not state["inside_crackerjack_section"]:
|
|
255
|
+
state["inside_crackerjack_section"] = True
|
|
256
|
+
state["skip_empty_after_crackerjack"] = True
|
|
257
|
+
return state
|
|
258
|
+
|
|
259
|
+
def _should_skip_empty_line(self, stripped: str, state: dict[str, bool]) -> bool:
|
|
260
|
+
"""Check if empty line should be skipped after Crackerjack header."""
|
|
261
|
+
return state["skip_empty_after_crackerjack"] and not stripped
|
|
262
|
+
|
|
263
|
+
def _collect_pattern_if_present(
|
|
264
|
+
self, stripped: str, parsed: t.Any, state: dict[str, bool]
|
|
265
|
+
) -> None:
|
|
266
|
+
"""Collect gitignore pattern if present on this line."""
|
|
267
|
+
if stripped and not stripped.startswith("#"):
|
|
268
|
+
parsed.existing_patterns.add(stripped)
|
|
269
|
+
|
|
270
|
+
def _add_line_if_non_crackerjack(
|
|
271
|
+
self, line: str, parsed: t.Any, state: dict[str, bool]
|
|
272
|
+
) -> None:
|
|
273
|
+
"""Add line to cleaned output if not in Crackerjack section."""
|
|
274
|
+
if not state["inside_crackerjack_section"]:
|
|
275
|
+
parsed.cleaned_lines.append(line)
|
|
276
|
+
|
|
277
|
+
def _is_crackerjack_header(self, line: str) -> bool:
|
|
278
|
+
"""Check if a line is a Crackerjack section header."""
|
|
279
|
+
return line in ("# Crackerjack patterns", "# Crackerjack generated files")
|
|
280
|
+
|
|
281
|
+
def _build_merged_gitignore_content(
|
|
282
|
+
self, parsed_content: t.Any, new_patterns: list[str]
|
|
283
|
+
) -> str:
|
|
284
|
+
"""Build the final merged .gitignore content."""
|
|
285
|
+
# Remove trailing empty line if exists
|
|
286
|
+
if parsed_content.cleaned_lines and not parsed_content.cleaned_lines[-1]:
|
|
287
|
+
parsed_content.cleaned_lines.pop()
|
|
288
|
+
|
|
289
|
+
merged_content = "\n".join(parsed_content.cleaned_lines)
|
|
290
|
+
if merged_content:
|
|
291
|
+
merged_content += "\n"
|
|
292
|
+
|
|
293
|
+
# Add consolidated Crackerjack section
|
|
294
|
+
all_crackerjack_patterns = self._get_consolidated_patterns(
|
|
295
|
+
parsed_content.existing_patterns, new_patterns
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
if all_crackerjack_patterns:
|
|
299
|
+
merged_content += "\n# Crackerjack patterns\n"
|
|
300
|
+
for pattern in sorted(all_crackerjack_patterns):
|
|
301
|
+
merged_content += f"{pattern}\n"
|
|
302
|
+
|
|
303
|
+
return merged_content
|
|
304
|
+
|
|
305
|
+
def _get_consolidated_patterns(
|
|
306
|
+
self, existing_patterns: set[str], new_patterns: list[str]
|
|
307
|
+
) -> list[str]:
|
|
308
|
+
"""Get consolidated list of all Crackerjack patterns."""
|
|
309
|
+
new_patterns_to_add = [p for p in new_patterns if p not in existing_patterns]
|
|
310
|
+
return list(existing_patterns) + new_patterns_to_add
|
|
311
|
+
|
|
312
|
+
def write_pyproject_config(
|
|
313
|
+
self,
|
|
314
|
+
config: dict[str, t.Any],
|
|
315
|
+
target_path: str | t.Any,
|
|
316
|
+
) -> None:
|
|
317
|
+
"""Write pyproject.toml config with proper formatting."""
|
|
318
|
+
target_path = Path(target_path)
|
|
319
|
+
|
|
320
|
+
# Use BytesIO for proper TOML encoding
|
|
321
|
+
buffer = io.BytesIO()
|
|
322
|
+
tomli_w.dump(config, buffer)
|
|
323
|
+
content = buffer.getvalue().decode("utf-8")
|
|
324
|
+
|
|
325
|
+
# Clean trailing whitespace
|
|
326
|
+
content = FileSystemService.clean_trailing_whitespace_and_newlines(content)
|
|
327
|
+
|
|
328
|
+
with target_path.open("w", encoding="utf-8") as f:
|
|
329
|
+
f.write(content)
|
|
330
|
+
|
|
331
|
+
self.logger.debug("Wrote pyproject.toml config", path=str(target_path))
|
|
332
|
+
|
|
333
|
+
def write_pre_commit_config(
|
|
334
|
+
self,
|
|
335
|
+
config: dict[str, t.Any],
|
|
336
|
+
target_path: str | t.Any,
|
|
337
|
+
) -> None:
|
|
338
|
+
"""Write .pre-commit-config.yaml with proper formatting."""
|
|
339
|
+
target_path = Path(target_path)
|
|
340
|
+
|
|
341
|
+
yaml_content = yaml.dump(
|
|
342
|
+
config,
|
|
343
|
+
default_flow_style=False,
|
|
344
|
+
sort_keys=False,
|
|
345
|
+
width=float("inf"),
|
|
346
|
+
)
|
|
347
|
+
content = (
|
|
348
|
+
yaml_content.decode() if isinstance(yaml_content, bytes) else yaml_content
|
|
349
|
+
)
|
|
350
|
+
content = content or ""
|
|
351
|
+
|
|
352
|
+
# Clean trailing whitespace
|
|
353
|
+
content = FileSystemService.clean_trailing_whitespace_and_newlines(content)
|
|
354
|
+
|
|
355
|
+
with target_path.open("w") as f:
|
|
356
|
+
f.write(content)
|
|
357
|
+
|
|
358
|
+
self.logger.debug("Wrote .pre-commit-config.yaml", path=str(target_path))
|
|
359
|
+
|
|
360
|
+
def _ensure_crackerjack_dev_dependency(
|
|
361
|
+
self,
|
|
362
|
+
target_config: dict[str, t.Any],
|
|
363
|
+
source_config: dict[str, t.Any],
|
|
364
|
+
) -> None:
|
|
365
|
+
"""Ensure crackerjack is in dev dependencies."""
|
|
366
|
+
if "dependency-groups" not in target_config:
|
|
367
|
+
target_config["dependency-groups"] = {}
|
|
368
|
+
|
|
369
|
+
if "dev" not in target_config["dependency-groups"]:
|
|
370
|
+
target_config["dependency-groups"]["dev"] = []
|
|
371
|
+
|
|
372
|
+
dev_deps = target_config["dependency-groups"]["dev"]
|
|
373
|
+
if "crackerjack" not in str(dev_deps):
|
|
374
|
+
dev_deps.append("crackerjack")
|
|
375
|
+
self.logger.debug("Added crackerjack to dev dependencies")
|
|
376
|
+
|
|
377
|
+
def _merge_tool_configurations(
|
|
378
|
+
self,
|
|
379
|
+
target_config: dict[str, t.Any],
|
|
380
|
+
source_config: dict[str, t.Any],
|
|
381
|
+
project_name: str,
|
|
382
|
+
) -> None:
|
|
383
|
+
"""Merge tool configurations from source to target."""
|
|
384
|
+
source_tools = source_config.get("tool", {})
|
|
385
|
+
|
|
386
|
+
if "tool" not in target_config:
|
|
387
|
+
target_config["tool"] = {}
|
|
388
|
+
|
|
389
|
+
target_tools = target_config["tool"]
|
|
390
|
+
|
|
391
|
+
tools_to_merge = [
|
|
392
|
+
"ruff",
|
|
393
|
+
"pyright",
|
|
394
|
+
"bandit",
|
|
395
|
+
"vulture",
|
|
396
|
+
"refurb",
|
|
397
|
+
"complexipy",
|
|
398
|
+
"codespell",
|
|
399
|
+
"creosote",
|
|
400
|
+
]
|
|
401
|
+
|
|
402
|
+
for tool_name in tools_to_merge:
|
|
403
|
+
if tool_name in source_tools:
|
|
404
|
+
if tool_name not in target_tools:
|
|
405
|
+
target_tools[tool_name] = self._replace_project_name_in_tool_config(
|
|
406
|
+
source_tools[tool_name], project_name
|
|
407
|
+
)
|
|
408
|
+
self.console.print(
|
|
409
|
+
f"[green]➕[/green] Added [tool.{tool_name}] configuration"
|
|
410
|
+
)
|
|
411
|
+
else:
|
|
412
|
+
self._merge_tool_settings(
|
|
413
|
+
target_tools[tool_name],
|
|
414
|
+
source_tools[tool_name],
|
|
415
|
+
tool_name,
|
|
416
|
+
project_name,
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
# Merge pytest markers
|
|
420
|
+
self._merge_pytest_markers(target_tools, source_tools)
|
|
421
|
+
|
|
422
|
+
def _merge_tool_settings(
|
|
423
|
+
self,
|
|
424
|
+
target_tool: dict[str, t.Any],
|
|
425
|
+
source_tool: dict[str, t.Any],
|
|
426
|
+
tool_name: str,
|
|
427
|
+
project_name: str,
|
|
428
|
+
) -> None:
|
|
429
|
+
"""Merge individual tool settings."""
|
|
430
|
+
updated_keys = []
|
|
431
|
+
|
|
432
|
+
for key, value in source_tool.items():
|
|
433
|
+
if key not in target_tool:
|
|
434
|
+
target_tool[key] = self._replace_project_name_in_config_value(
|
|
435
|
+
value, project_name
|
|
436
|
+
)
|
|
437
|
+
updated_keys.append(key)
|
|
438
|
+
|
|
439
|
+
if updated_keys:
|
|
440
|
+
self.console.print(
|
|
441
|
+
f"[yellow]🔄[/yellow] Updated [tool.{tool_name}] with: {', '.join(updated_keys)}"
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
def _merge_pytest_markers(
|
|
445
|
+
self,
|
|
446
|
+
target_tools: dict[str, t.Any],
|
|
447
|
+
source_tools: dict[str, t.Any],
|
|
448
|
+
) -> None:
|
|
449
|
+
"""Merge pytest markers avoiding duplicates."""
|
|
450
|
+
if "pytest" not in source_tools or "pytest" not in target_tools:
|
|
451
|
+
return
|
|
452
|
+
|
|
453
|
+
source_pytest = source_tools["pytest"]
|
|
454
|
+
target_pytest = target_tools["pytest"]
|
|
455
|
+
|
|
456
|
+
if "ini_options" not in source_pytest or "ini_options" not in target_pytest:
|
|
457
|
+
return
|
|
458
|
+
|
|
459
|
+
source_markers = source_pytest["ini_options"].get("markers", [])
|
|
460
|
+
target_markers = target_pytest["ini_options"].get("markers", [])
|
|
461
|
+
|
|
462
|
+
# Extract existing marker names
|
|
463
|
+
existing_marker_names = {marker.split(": ")[0] for marker in target_markers}
|
|
464
|
+
new_markers = [
|
|
465
|
+
marker
|
|
466
|
+
for marker in source_markers
|
|
467
|
+
if marker.split(": ")[0] not in existing_marker_names
|
|
468
|
+
]
|
|
469
|
+
|
|
470
|
+
if new_markers:
|
|
471
|
+
target_markers.extend(new_markers)
|
|
472
|
+
self.console.print(
|
|
473
|
+
f"[green]➕[/green] Added pytest markers: {len(new_markers)}"
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
def _remove_fixed_coverage_requirements(
|
|
477
|
+
self,
|
|
478
|
+
target_config: dict[str, t.Any],
|
|
479
|
+
) -> None:
|
|
480
|
+
"""Remove fixed coverage requirements to use ratchet system."""
|
|
481
|
+
target_coverage = (
|
|
482
|
+
target_config.get("tool", {}).get("pytest", {}).get("ini_options", {})
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
# Remove --cov-fail-under from addopts
|
|
486
|
+
addopts = target_coverage.get("addopts", "")
|
|
487
|
+
if isinstance(addopts, str):
|
|
488
|
+
original_addopts = addopts
|
|
489
|
+
|
|
490
|
+
# Remove coverage fail-under flags
|
|
491
|
+
from crackerjack.services.regex_patterns import remove_coverage_fail_under
|
|
492
|
+
|
|
493
|
+
addopts = remove_coverage_fail_under(addopts).strip()
|
|
494
|
+
addopts = " ".join(addopts.split()) # Normalize whitespace
|
|
495
|
+
|
|
496
|
+
if original_addopts != addopts:
|
|
497
|
+
target_coverage["addopts"] = addopts
|
|
498
|
+
self.console.print(
|
|
499
|
+
"[green]🔄[/green] Removed fixed coverage requirement (using ratchet system)"
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
# Reset coverage.report.fail_under to 0
|
|
503
|
+
coverage_report = (
|
|
504
|
+
target_config.get("tool", {}).get("coverage", {}).get("report", {})
|
|
505
|
+
)
|
|
506
|
+
if "fail_under" in coverage_report:
|
|
507
|
+
original_fail_under = coverage_report["fail_under"]
|
|
508
|
+
coverage_report["fail_under"] = 0
|
|
509
|
+
self.console.print(
|
|
510
|
+
f"[green]🔄[/green] Reset coverage.report.fail_under from {original_fail_under} to 0 (ratchet system)"
|
|
511
|
+
)
|
|
512
|
+
|
|
513
|
+
def _replace_project_name_in_tool_config(
|
|
514
|
+
self, tool_config: dict[str, t.Any], project_name: str
|
|
515
|
+
) -> dict[str, t.Any]:
|
|
516
|
+
"""Replace project name in tool configuration."""
|
|
517
|
+
if project_name == "crackerjack":
|
|
518
|
+
return tool_config
|
|
519
|
+
|
|
520
|
+
result = copy.deepcopy(tool_config)
|
|
521
|
+
return self._replace_project_name_in_config_value(result, project_name)
|
|
522
|
+
|
|
523
|
+
def _replace_project_name_in_config_value(
|
|
524
|
+
self, value: t.Any, project_name: str
|
|
525
|
+
) -> t.Any:
|
|
526
|
+
"""Recursively replace project name in configuration values."""
|
|
527
|
+
if project_name == "crackerjack":
|
|
528
|
+
return value
|
|
529
|
+
|
|
530
|
+
if isinstance(value, str):
|
|
531
|
+
return value.replace("crackerjack", project_name)
|
|
532
|
+
elif isinstance(value, list):
|
|
533
|
+
return [
|
|
534
|
+
self._replace_project_name_in_config_value(item, project_name)
|
|
535
|
+
for item in value
|
|
536
|
+
]
|
|
537
|
+
elif isinstance(value, dict):
|
|
538
|
+
return {
|
|
539
|
+
key: self._replace_project_name_in_config_value(val, project_name)
|
|
540
|
+
for key, val in value.items()
|
|
541
|
+
}
|
|
542
|
+
return value
|
|
@@ -110,14 +110,13 @@ class ContextualAIAssistant:
|
|
|
110
110
|
category="testing",
|
|
111
111
|
priority="high",
|
|
112
112
|
title="Add Test Suite",
|
|
113
|
-
description="No test directory found. Adding tests improves code reliability and enables CI/CD.",
|
|
113
|
+
description="No test directory found. Adding tests improves code reliability and enables CI / CD.",
|
|
114
114
|
action_command="python -m crackerjack -t",
|
|
115
|
-
reasoning="Projects without tests have 40% more bugs in production",
|
|
115
|
+
reasoning="Projects without tests have 40 % more bugs in production",
|
|
116
116
|
confidence=0.9,
|
|
117
117
|
),
|
|
118
118
|
)
|
|
119
119
|
elif context.test_coverage < 75:
|
|
120
|
-
# Calculate next milestone
|
|
121
120
|
milestones = [15, 20, 25, 30, 40, 50, 60, 70, 80, 90, 100]
|
|
122
121
|
next_milestone = next(
|
|
123
122
|
(m for m in milestones if m > context.test_coverage), 100
|
|
@@ -127,10 +126,10 @@ class ContextualAIAssistant:
|
|
|
127
126
|
AIRecommendation(
|
|
128
127
|
category="testing",
|
|
129
128
|
priority="medium",
|
|
130
|
-
title="Progress Toward 100% Coverage",
|
|
131
|
-
description=f"Current coverage: {context.test_coverage
|
|
129
|
+
title="Progress Toward 100 % Coverage",
|
|
130
|
+
description=f"Current coverage: {context.test_coverage: .1f}%. Next milestone: {next_milestone}% on the journey to 100 %.",
|
|
132
131
|
action_command="python -m crackerjack -t",
|
|
133
|
-
reasoning="Coverage ratchet system prevents regression and targets 100% coverage incrementally",
|
|
132
|
+
reasoning="Coverage ratchet system prevents regression and targets 100 % coverage incrementally",
|
|
134
133
|
confidence=0.85,
|
|
135
134
|
),
|
|
136
135
|
)
|
|
@@ -224,8 +223,8 @@ class ContextualAIAssistant:
|
|
|
224
223
|
category="workflow",
|
|
225
224
|
priority="medium",
|
|
226
225
|
title="Set Up CI / CD Pipeline",
|
|
227
|
-
description="No CI/CD configuration found. Automated testing and deployment improve reliability.",
|
|
228
|
-
reasoning="CI/CD prevents 60% of deployment issues and improves team productivity",
|
|
226
|
+
description="No CI / CD configuration found. Automated testing and deployment improve reliability.",
|
|
227
|
+
reasoning="CI / CD prevents 60 % of deployment issues and improves team productivity",
|
|
229
228
|
confidence=0.8,
|
|
230
229
|
),
|
|
231
230
|
)
|
|
@@ -328,7 +327,7 @@ class ContextualAIAssistant:
|
|
|
328
327
|
|
|
329
328
|
def _has_ci_cd_config(self) -> bool:
|
|
330
329
|
ci_files = [
|
|
331
|
-
".github/workflows",
|
|
330
|
+
".github / workflows",
|
|
332
331
|
".gitlab-ci.yml",
|
|
333
332
|
"azure-pipelines.yml",
|
|
334
333
|
"Jenkinsfile",
|
|
@@ -375,7 +374,7 @@ class ContextualAIAssistant:
|
|
|
375
374
|
|
|
376
375
|
with suppress(Exception):
|
|
377
376
|
result = subprocess.run(
|
|
378
|
-
["git", "log", "-1", "--format=%ct"],
|
|
377
|
+
["git", "log", "- 1", "--format=% ct"],
|
|
379
378
|
check=False,
|
|
380
379
|
capture_output=True,
|
|
381
380
|
text=True,
|
|
@@ -395,7 +394,7 @@ class ContextualAIAssistant:
|
|
|
395
394
|
|
|
396
395
|
with suppress(Exception):
|
|
397
396
|
result = subprocess.run(
|
|
398
|
-
["uv", "run", "bandit", "-r", ".", "-f", "json"],
|
|
397
|
+
["uv", "run", "bandit", "- r", ".", "- f", "json"],
|
|
399
398
|
check=False,
|
|
400
399
|
capture_output=True,
|
|
401
400
|
text=True,
|
|
@@ -441,12 +440,12 @@ class ContextualAIAssistant:
|
|
|
441
440
|
def display_recommendations(self, recommendations: list[AIRecommendation]) -> None:
|
|
442
441
|
if not recommendations:
|
|
443
442
|
self.console.print(
|
|
444
|
-
"[green]✨ Great job! No immediate recommendations.[/green]",
|
|
443
|
+
"[green]✨ Great job ! No immediate recommendations.[/ green]",
|
|
445
444
|
)
|
|
446
445
|
return
|
|
447
446
|
|
|
448
|
-
self.console.print("\n[bold cyan]🤖 AI Assistant Recommendations[/bold cyan]")
|
|
449
|
-
self.console.print("[dim]Based on your current project context[/dim]\n")
|
|
447
|
+
self.console.print("\n[bold cyan]🤖 AI Assistant Recommendations[/ bold cyan]")
|
|
448
|
+
self.console.print("[dim]Based on your current project context[/ dim]\n")
|
|
450
449
|
|
|
451
450
|
for i, rec in enumerate(recommendations, 1):
|
|
452
451
|
priority_color = {"high": "red", "medium": "yellow", "low": "blue"}.get(
|
|
@@ -466,23 +465,23 @@ class ContextualAIAssistant:
|
|
|
466
465
|
}.get(rec.category, "💡")
|
|
467
466
|
|
|
468
467
|
self.console.print(
|
|
469
|
-
f"[bold]{i}. {category_emoji} {rec.title}[/bold] [{priority_color}]({rec.priority})[/{priority_color}]",
|
|
468
|
+
f"[bold]{i}. {category_emoji} {rec.title}[/ bold] [{priority_color}]({rec.priority})[/{priority_color}]",
|
|
470
469
|
)
|
|
471
470
|
self.console.print(f" {rec.description}")
|
|
472
471
|
|
|
473
472
|
if rec.action_command:
|
|
474
473
|
self.console.print(
|
|
475
|
-
f" [dim]Run:[/dim] [cyan]{rec.action_command}[/cyan]",
|
|
474
|
+
f" [dim]Run: [/ dim] [cyan]{rec.action_command}[/ cyan]",
|
|
476
475
|
)
|
|
477
476
|
|
|
478
477
|
if rec.reasoning:
|
|
479
|
-
self.console.print(f" [dim italic]💭 {rec.reasoning}[/dim italic]")
|
|
478
|
+
self.console.print(f" [dim italic]💭 {rec.reasoning}[/ dim italic]")
|
|
480
479
|
|
|
481
480
|
confidence_bar = "█" * int(rec.confidence * 10) + "▒" * (
|
|
482
481
|
10 - int(rec.confidence * 10)
|
|
483
482
|
)
|
|
484
483
|
self.console.print(
|
|
485
|
-
f" [dim]Confidence: [{confidence_bar}] {rec.confidence
|
|
484
|
+
f" [dim]Confidence: [{confidence_bar}] {rec.confidence: .1 %}[/ dim]",
|
|
486
485
|
)
|
|
487
486
|
|
|
488
487
|
if i < len(recommendations):
|
|
@@ -491,7 +490,6 @@ class ContextualAIAssistant:
|
|
|
491
490
|
def get_quick_help(self, query: str) -> str:
|
|
492
491
|
query_lower = query.lower()
|
|
493
492
|
|
|
494
|
-
# Check for more specific patterns first
|
|
495
493
|
if "coverage" in query_lower:
|
|
496
494
|
return "Check test coverage with: python -m crackerjack -t\nView HTML report: uv run coverage html"
|
|
497
495
|
|