crackerjack 0.33.0__py3-none-any.whl → 0.33.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/__main__.py +1350 -34
- crackerjack/adapters/__init__.py +17 -0
- crackerjack/adapters/lsp_client.py +358 -0
- crackerjack/adapters/rust_tool_adapter.py +194 -0
- crackerjack/adapters/rust_tool_manager.py +193 -0
- crackerjack/adapters/skylos_adapter.py +231 -0
- crackerjack/adapters/zuban_adapter.py +560 -0
- crackerjack/agents/base.py +7 -3
- crackerjack/agents/coordinator.py +271 -33
- crackerjack/agents/documentation_agent.py +9 -15
- crackerjack/agents/dry_agent.py +3 -15
- crackerjack/agents/formatting_agent.py +1 -1
- crackerjack/agents/import_optimization_agent.py +36 -180
- crackerjack/agents/performance_agent.py +17 -98
- crackerjack/agents/performance_helpers.py +7 -31
- crackerjack/agents/proactive_agent.py +1 -3
- crackerjack/agents/refactoring_agent.py +16 -85
- crackerjack/agents/refactoring_helpers.py +7 -42
- crackerjack/agents/security_agent.py +9 -48
- crackerjack/agents/test_creation_agent.py +356 -513
- crackerjack/agents/test_specialist_agent.py +0 -4
- crackerjack/api.py +6 -25
- crackerjack/cli/cache_handlers.py +204 -0
- crackerjack/cli/cache_handlers_enhanced.py +683 -0
- crackerjack/cli/facade.py +100 -0
- crackerjack/cli/handlers.py +224 -9
- crackerjack/cli/interactive.py +6 -4
- crackerjack/cli/options.py +642 -55
- crackerjack/cli/utils.py +2 -1
- crackerjack/code_cleaner.py +58 -117
- crackerjack/config/global_lock_config.py +8 -48
- crackerjack/config/hooks.py +53 -62
- crackerjack/core/async_workflow_orchestrator.py +24 -34
- crackerjack/core/autofix_coordinator.py +3 -17
- crackerjack/core/enhanced_container.py +4 -13
- crackerjack/core/file_lifecycle.py +12 -89
- crackerjack/core/performance.py +2 -2
- crackerjack/core/performance_monitor.py +15 -55
- crackerjack/core/phase_coordinator.py +104 -204
- crackerjack/core/resource_manager.py +14 -90
- crackerjack/core/service_watchdog.py +62 -95
- crackerjack/core/session_coordinator.py +149 -0
- crackerjack/core/timeout_manager.py +14 -72
- crackerjack/core/websocket_lifecycle.py +13 -78
- crackerjack/core/workflow_orchestrator.py +171 -174
- crackerjack/docs/INDEX.md +11 -0
- crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
- crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
- crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
- crackerjack/docs/generated/api/SERVICES.md +1252 -0
- crackerjack/documentation/__init__.py +31 -0
- crackerjack/documentation/ai_templates.py +756 -0
- crackerjack/documentation/dual_output_generator.py +765 -0
- crackerjack/documentation/mkdocs_integration.py +518 -0
- crackerjack/documentation/reference_generator.py +977 -0
- crackerjack/dynamic_config.py +55 -50
- crackerjack/executors/async_hook_executor.py +10 -15
- crackerjack/executors/cached_hook_executor.py +117 -43
- crackerjack/executors/hook_executor.py +8 -34
- crackerjack/executors/hook_lock_manager.py +26 -183
- crackerjack/executors/individual_hook_executor.py +13 -11
- crackerjack/executors/lsp_aware_hook_executor.py +270 -0
- crackerjack/executors/tool_proxy.py +417 -0
- crackerjack/hooks/lsp_hook.py +79 -0
- crackerjack/intelligence/adaptive_learning.py +25 -10
- crackerjack/intelligence/agent_orchestrator.py +2 -5
- crackerjack/intelligence/agent_registry.py +34 -24
- crackerjack/intelligence/agent_selector.py +5 -7
- crackerjack/interactive.py +17 -6
- crackerjack/managers/async_hook_manager.py +0 -1
- crackerjack/managers/hook_manager.py +79 -1
- crackerjack/managers/publish_manager.py +44 -8
- crackerjack/managers/test_command_builder.py +1 -15
- crackerjack/managers/test_executor.py +1 -3
- crackerjack/managers/test_manager.py +98 -7
- crackerjack/managers/test_manager_backup.py +10 -9
- crackerjack/mcp/cache.py +2 -2
- crackerjack/mcp/client_runner.py +1 -1
- crackerjack/mcp/context.py +191 -68
- crackerjack/mcp/dashboard.py +7 -5
- crackerjack/mcp/enhanced_progress_monitor.py +31 -28
- crackerjack/mcp/file_monitor.py +30 -23
- crackerjack/mcp/progress_components.py +31 -21
- crackerjack/mcp/progress_monitor.py +50 -53
- crackerjack/mcp/rate_limiter.py +6 -6
- crackerjack/mcp/server_core.py +17 -16
- crackerjack/mcp/service_watchdog.py +2 -1
- crackerjack/mcp/state.py +4 -7
- crackerjack/mcp/task_manager.py +11 -9
- crackerjack/mcp/tools/core_tools.py +173 -32
- crackerjack/mcp/tools/error_analyzer.py +3 -2
- crackerjack/mcp/tools/execution_tools.py +8 -10
- crackerjack/mcp/tools/execution_tools_backup.py +42 -30
- crackerjack/mcp/tools/intelligence_tool_registry.py +7 -5
- crackerjack/mcp/tools/intelligence_tools.py +5 -2
- crackerjack/mcp/tools/monitoring_tools.py +33 -70
- crackerjack/mcp/tools/proactive_tools.py +24 -11
- crackerjack/mcp/tools/progress_tools.py +5 -8
- crackerjack/mcp/tools/utility_tools.py +20 -14
- crackerjack/mcp/tools/workflow_executor.py +62 -40
- crackerjack/mcp/websocket/app.py +8 -0
- crackerjack/mcp/websocket/endpoints.py +352 -357
- crackerjack/mcp/websocket/jobs.py +40 -57
- crackerjack/mcp/websocket/monitoring_endpoints.py +2935 -0
- crackerjack/mcp/websocket/server.py +7 -25
- crackerjack/mcp/websocket/websocket_handler.py +6 -17
- crackerjack/mixins/__init__.py +0 -2
- crackerjack/mixins/error_handling.py +1 -70
- crackerjack/models/config.py +12 -1
- crackerjack/models/config_adapter.py +49 -1
- crackerjack/models/protocols.py +122 -122
- crackerjack/models/resource_protocols.py +55 -210
- crackerjack/monitoring/ai_agent_watchdog.py +13 -13
- crackerjack/monitoring/metrics_collector.py +426 -0
- crackerjack/monitoring/regression_prevention.py +8 -8
- crackerjack/monitoring/websocket_server.py +643 -0
- crackerjack/orchestration/advanced_orchestrator.py +11 -6
- crackerjack/orchestration/coverage_improvement.py +3 -3
- crackerjack/orchestration/execution_strategies.py +26 -6
- crackerjack/orchestration/test_progress_streamer.py +8 -5
- crackerjack/plugins/base.py +2 -2
- crackerjack/plugins/hooks.py +7 -0
- crackerjack/plugins/managers.py +11 -8
- crackerjack/security/__init__.py +0 -1
- crackerjack/security/audit.py +6 -35
- crackerjack/services/anomaly_detector.py +392 -0
- crackerjack/services/api_extractor.py +615 -0
- crackerjack/services/backup_service.py +2 -2
- crackerjack/services/bounded_status_operations.py +15 -152
- crackerjack/services/cache.py +127 -1
- crackerjack/services/changelog_automation.py +395 -0
- crackerjack/services/config.py +15 -9
- crackerjack/services/config_merge.py +19 -80
- crackerjack/services/config_template.py +506 -0
- crackerjack/services/contextual_ai_assistant.py +48 -22
- crackerjack/services/coverage_badge_service.py +171 -0
- crackerjack/services/coverage_ratchet.py +27 -25
- crackerjack/services/debug.py +3 -3
- crackerjack/services/dependency_analyzer.py +460 -0
- crackerjack/services/dependency_monitor.py +14 -11
- crackerjack/services/documentation_generator.py +491 -0
- crackerjack/services/documentation_service.py +675 -0
- crackerjack/services/enhanced_filesystem.py +6 -5
- crackerjack/services/enterprise_optimizer.py +865 -0
- crackerjack/services/error_pattern_analyzer.py +676 -0
- crackerjack/services/file_hasher.py +1 -1
- crackerjack/services/git.py +8 -25
- crackerjack/services/health_metrics.py +10 -8
- crackerjack/services/heatmap_generator.py +735 -0
- crackerjack/services/initialization.py +11 -30
- crackerjack/services/input_validator.py +5 -97
- crackerjack/services/intelligent_commit.py +327 -0
- crackerjack/services/log_manager.py +15 -12
- crackerjack/services/logging.py +4 -3
- crackerjack/services/lsp_client.py +628 -0
- crackerjack/services/memory_optimizer.py +19 -87
- crackerjack/services/metrics.py +42 -33
- crackerjack/services/parallel_executor.py +9 -67
- crackerjack/services/pattern_cache.py +1 -1
- crackerjack/services/pattern_detector.py +6 -6
- crackerjack/services/performance_benchmarks.py +18 -59
- crackerjack/services/performance_cache.py +20 -81
- crackerjack/services/performance_monitor.py +27 -95
- crackerjack/services/predictive_analytics.py +510 -0
- crackerjack/services/quality_baseline.py +234 -0
- crackerjack/services/quality_baseline_enhanced.py +646 -0
- crackerjack/services/quality_intelligence.py +785 -0
- crackerjack/services/regex_patterns.py +605 -524
- crackerjack/services/regex_utils.py +43 -123
- crackerjack/services/secure_path_utils.py +5 -164
- crackerjack/services/secure_status_formatter.py +30 -141
- crackerjack/services/secure_subprocess.py +11 -92
- crackerjack/services/security.py +9 -41
- crackerjack/services/security_logger.py +12 -24
- crackerjack/services/server_manager.py +124 -16
- crackerjack/services/status_authentication.py +16 -159
- crackerjack/services/status_security_manager.py +4 -131
- crackerjack/services/thread_safe_status_collector.py +19 -125
- crackerjack/services/unified_config.py +21 -13
- crackerjack/services/validation_rate_limiter.py +5 -54
- crackerjack/services/version_analyzer.py +459 -0
- crackerjack/services/version_checker.py +1 -1
- crackerjack/services/websocket_resource_limiter.py +10 -144
- crackerjack/services/zuban_lsp_service.py +390 -0
- crackerjack/slash_commands/__init__.py +2 -7
- crackerjack/slash_commands/run.md +2 -2
- crackerjack/tools/validate_input_validator_patterns.py +14 -40
- crackerjack/tools/validate_regex_patterns.py +19 -48
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.1.dist-info}/METADATA +196 -25
- crackerjack-0.33.1.dist-info/RECORD +229 -0
- crackerjack/CLAUDE.md +0 -207
- crackerjack/RULES.md +0 -380
- crackerjack/py313.py +0 -234
- crackerjack-0.33.0.dist-info/RECORD +0 -187
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.1.dist-info}/WHEEL +0 -0
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.1.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.1.dist-info}/licenses/LICENSE +0 -0
crackerjack/config/hooks.py
CHANGED
|
@@ -15,14 +15,10 @@ class RetryPolicy(Enum):
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class SecurityLevel(Enum):
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
)
|
|
23
|
-
HIGH = "high" # Important but can be bypassed with warning
|
|
24
|
-
MEDIUM = "medium" # Standard checks, bypassable
|
|
25
|
-
LOW = "low" # Formatting/style, always bypassable
|
|
18
|
+
CRITICAL = "critical"
|
|
19
|
+
HIGH = "high"
|
|
20
|
+
MEDIUM = "medium"
|
|
21
|
+
LOW = "low"
|
|
26
22
|
|
|
27
23
|
|
|
28
24
|
@dataclass
|
|
@@ -35,14 +31,12 @@ class HookDefinition:
|
|
|
35
31
|
is_formatting: bool = False
|
|
36
32
|
manual_stage: bool = False
|
|
37
33
|
config_path: Path | None = None
|
|
38
|
-
security_level: SecurityLevel = SecurityLevel.MEDIUM
|
|
34
|
+
security_level: SecurityLevel = SecurityLevel.MEDIUM
|
|
39
35
|
|
|
40
36
|
def get_command(self) -> list[str]:
|
|
41
|
-
# Use direct pre-commit execution (pre-commit manages its own environments)
|
|
42
37
|
import shutil
|
|
43
38
|
from pathlib import Path
|
|
44
39
|
|
|
45
|
-
# Find pre-commit executable - prefer project venv, fallback to system
|
|
46
40
|
pre_commit_path = None
|
|
47
41
|
current_dir = Path.cwd()
|
|
48
42
|
project_pre_commit = current_dir / ".venv" / "bin" / "pre-commit"
|
|
@@ -51,7 +45,6 @@ class HookDefinition:
|
|
|
51
45
|
else:
|
|
52
46
|
pre_commit_path = shutil.which("pre-commit") or "pre-commit"
|
|
53
47
|
|
|
54
|
-
# Build command for direct pre-commit execution
|
|
55
48
|
cmd = [pre_commit_path, "run"]
|
|
56
49
|
if self.config_path:
|
|
57
50
|
cmd.extend(["-c", str(self.config_path)])
|
|
@@ -74,127 +67,127 @@ class HookStrategy:
|
|
|
74
67
|
FAST_HOOKS = [
|
|
75
68
|
HookDefinition(
|
|
76
69
|
name="validate-regex-patterns",
|
|
77
|
-
command=[],
|
|
78
|
-
is_formatting=True,
|
|
70
|
+
command=[],
|
|
71
|
+
is_formatting=True,
|
|
79
72
|
timeout=30,
|
|
80
|
-
retry_on_failure=True,
|
|
81
|
-
security_level=SecurityLevel.HIGH,
|
|
73
|
+
retry_on_failure=True,
|
|
74
|
+
security_level=SecurityLevel.HIGH,
|
|
82
75
|
),
|
|
83
76
|
HookDefinition(
|
|
84
77
|
name="trailing-whitespace",
|
|
85
|
-
command=[],
|
|
78
|
+
command=[],
|
|
86
79
|
is_formatting=True,
|
|
87
80
|
retry_on_failure=True,
|
|
88
|
-
security_level=SecurityLevel.LOW,
|
|
81
|
+
security_level=SecurityLevel.LOW,
|
|
89
82
|
),
|
|
90
83
|
HookDefinition(
|
|
91
84
|
name="end-of-file-fixer",
|
|
92
|
-
command=[],
|
|
85
|
+
command=[],
|
|
93
86
|
is_formatting=True,
|
|
94
87
|
retry_on_failure=True,
|
|
95
|
-
security_level=SecurityLevel.LOW,
|
|
88
|
+
security_level=SecurityLevel.LOW,
|
|
96
89
|
),
|
|
97
90
|
HookDefinition(
|
|
98
91
|
name="check-yaml",
|
|
99
|
-
command=[],
|
|
100
|
-
security_level=SecurityLevel.MEDIUM,
|
|
92
|
+
command=[],
|
|
93
|
+
security_level=SecurityLevel.MEDIUM,
|
|
101
94
|
),
|
|
102
95
|
HookDefinition(
|
|
103
96
|
name="check-toml",
|
|
104
|
-
command=[],
|
|
105
|
-
security_level=SecurityLevel.MEDIUM,
|
|
97
|
+
command=[],
|
|
98
|
+
security_level=SecurityLevel.MEDIUM,
|
|
106
99
|
),
|
|
107
100
|
HookDefinition(
|
|
108
101
|
name="check-added-large-files",
|
|
109
|
-
command=[],
|
|
110
|
-
security_level=SecurityLevel.HIGH,
|
|
102
|
+
command=[],
|
|
103
|
+
security_level=SecurityLevel.HIGH,
|
|
111
104
|
),
|
|
112
105
|
HookDefinition(
|
|
113
106
|
name="uv-lock",
|
|
114
|
-
command=[],
|
|
115
|
-
security_level=SecurityLevel.HIGH,
|
|
107
|
+
command=[],
|
|
108
|
+
security_level=SecurityLevel.HIGH,
|
|
116
109
|
),
|
|
117
110
|
HookDefinition(
|
|
118
111
|
name="gitleaks",
|
|
119
|
-
command=[],
|
|
120
|
-
security_level=SecurityLevel.CRITICAL,
|
|
112
|
+
command=[],
|
|
113
|
+
security_level=SecurityLevel.CRITICAL,
|
|
121
114
|
),
|
|
122
115
|
HookDefinition(
|
|
123
116
|
name="codespell",
|
|
124
|
-
command=[],
|
|
125
|
-
security_level=SecurityLevel.LOW,
|
|
117
|
+
command=[],
|
|
118
|
+
security_level=SecurityLevel.LOW,
|
|
126
119
|
),
|
|
127
120
|
HookDefinition(
|
|
128
121
|
name="ruff-check",
|
|
129
|
-
command=[],
|
|
130
|
-
is_formatting=True,
|
|
131
|
-
retry_on_failure=True,
|
|
132
|
-
security_level=SecurityLevel.MEDIUM,
|
|
122
|
+
command=[],
|
|
123
|
+
is_formatting=True,
|
|
124
|
+
retry_on_failure=True,
|
|
125
|
+
security_level=SecurityLevel.MEDIUM,
|
|
133
126
|
),
|
|
134
127
|
HookDefinition(
|
|
135
128
|
name="ruff-format",
|
|
136
|
-
command=[],
|
|
129
|
+
command=[],
|
|
137
130
|
is_formatting=True,
|
|
138
131
|
retry_on_failure=True,
|
|
139
|
-
security_level=SecurityLevel.LOW,
|
|
132
|
+
security_level=SecurityLevel.LOW,
|
|
140
133
|
),
|
|
141
134
|
HookDefinition(
|
|
142
135
|
name="mdformat",
|
|
143
|
-
command=[],
|
|
136
|
+
command=[],
|
|
144
137
|
is_formatting=True,
|
|
145
138
|
retry_on_failure=True,
|
|
146
|
-
security_level=SecurityLevel.LOW,
|
|
139
|
+
security_level=SecurityLevel.LOW,
|
|
147
140
|
),
|
|
148
141
|
]
|
|
149
142
|
|
|
150
143
|
COMPREHENSIVE_HOOKS = [
|
|
151
144
|
HookDefinition(
|
|
152
|
-
name="
|
|
153
|
-
command=[],
|
|
154
|
-
timeout=
|
|
145
|
+
name="zuban",
|
|
146
|
+
command=[],
|
|
147
|
+
timeout=30,
|
|
155
148
|
stage=HookStage.COMPREHENSIVE,
|
|
156
149
|
manual_stage=True,
|
|
157
|
-
security_level=SecurityLevel.CRITICAL,
|
|
150
|
+
security_level=SecurityLevel.CRITICAL,
|
|
158
151
|
),
|
|
159
152
|
HookDefinition(
|
|
160
153
|
name="bandit",
|
|
161
|
-
command=[],
|
|
162
|
-
timeout=300,
|
|
154
|
+
command=[],
|
|
155
|
+
timeout=300,
|
|
163
156
|
stage=HookStage.COMPREHENSIVE,
|
|
164
157
|
manual_stage=True,
|
|
165
|
-
security_level=SecurityLevel.CRITICAL,
|
|
158
|
+
security_level=SecurityLevel.CRITICAL,
|
|
166
159
|
),
|
|
167
160
|
HookDefinition(
|
|
168
|
-
name="
|
|
169
|
-
command=[],
|
|
170
|
-
timeout=
|
|
161
|
+
name="skylos",
|
|
162
|
+
command=[],
|
|
163
|
+
timeout=30,
|
|
171
164
|
stage=HookStage.COMPREHENSIVE,
|
|
172
165
|
manual_stage=True,
|
|
173
|
-
security_level=SecurityLevel.MEDIUM,
|
|
166
|
+
security_level=SecurityLevel.MEDIUM,
|
|
174
167
|
),
|
|
175
168
|
HookDefinition(
|
|
176
169
|
name="refurb",
|
|
177
|
-
command=[],
|
|
178
|
-
timeout=300,
|
|
170
|
+
command=[],
|
|
171
|
+
timeout=300,
|
|
179
172
|
stage=HookStage.COMPREHENSIVE,
|
|
180
173
|
manual_stage=True,
|
|
181
|
-
security_level=SecurityLevel.MEDIUM,
|
|
174
|
+
security_level=SecurityLevel.MEDIUM,
|
|
182
175
|
),
|
|
183
176
|
HookDefinition(
|
|
184
177
|
name="creosote",
|
|
185
|
-
command=[],
|
|
186
|
-
timeout=300,
|
|
178
|
+
command=[],
|
|
179
|
+
timeout=300,
|
|
187
180
|
stage=HookStage.COMPREHENSIVE,
|
|
188
181
|
manual_stage=True,
|
|
189
|
-
security_level=SecurityLevel.HIGH,
|
|
182
|
+
security_level=SecurityLevel.HIGH,
|
|
190
183
|
),
|
|
191
184
|
HookDefinition(
|
|
192
185
|
name="complexipy",
|
|
193
|
-
command=[],
|
|
186
|
+
command=[],
|
|
194
187
|
timeout=60,
|
|
195
188
|
stage=HookStage.COMPREHENSIVE,
|
|
196
189
|
manual_stage=True,
|
|
197
|
-
security_level=SecurityLevel.MEDIUM,
|
|
190
|
+
security_level=SecurityLevel.MEDIUM,
|
|
198
191
|
),
|
|
199
192
|
]
|
|
200
193
|
|
|
@@ -209,7 +202,7 @@ FAST_STRATEGY = HookStrategy(
|
|
|
209
202
|
COMPREHENSIVE_STRATEGY = HookStrategy(
|
|
210
203
|
name="comprehensive",
|
|
211
204
|
hooks=COMPREHENSIVE_HOOKS,
|
|
212
|
-
timeout=300,
|
|
205
|
+
timeout=300,
|
|
213
206
|
retry_policy=RetryPolicy.NONE,
|
|
214
207
|
)
|
|
215
208
|
|
|
@@ -223,5 +216,3 @@ class HookConfigLoader:
|
|
|
223
216
|
return COMPREHENSIVE_STRATEGY
|
|
224
217
|
msg = f"Unknown hook strategy: {name}"
|
|
225
218
|
raise ValueError(msg)
|
|
226
|
-
|
|
227
|
-
# Removed unused method: get_all_strategies
|
|
@@ -84,18 +84,15 @@ class AsyncWorkflowPipeline:
|
|
|
84
84
|
return success
|
|
85
85
|
|
|
86
86
|
async def _cleanup_active_tasks(self) -> None:
|
|
87
|
-
"""Clean up all active tasks."""
|
|
88
87
|
if not self._active_tasks:
|
|
89
88
|
return
|
|
90
89
|
|
|
91
90
|
self.logger.info(f"Cleaning up {len(self._active_tasks)} active tasks")
|
|
92
91
|
|
|
93
|
-
# Cancel all active tasks
|
|
94
92
|
for task in self._active_tasks:
|
|
95
93
|
if not task.done():
|
|
96
94
|
task.cancel()
|
|
97
95
|
|
|
98
|
-
# Wait for tasks to complete with timeout
|
|
99
96
|
if self._active_tasks:
|
|
100
97
|
try:
|
|
101
98
|
await asyncio.wait_for(
|
|
@@ -111,17 +108,19 @@ class AsyncWorkflowPipeline:
|
|
|
111
108
|
if not options.clean:
|
|
112
109
|
return True
|
|
113
110
|
|
|
114
|
-
|
|
111
|
+
result = await self.timeout_manager.with_timeout(
|
|
115
112
|
"file_operations",
|
|
116
|
-
asyncio.to_thread(self.phases.run_cleaning_phase, options),
|
|
113
|
+
asyncio.to_thread(self.phases.run_cleaning_phase, options), # type: ignore[arg-type]
|
|
117
114
|
strategy=TimeoutStrategy.RETRY_WITH_BACKOFF,
|
|
118
115
|
)
|
|
116
|
+
return bool(result)
|
|
119
117
|
|
|
120
118
|
async def _execute_quality_phase_async(self, options: OptionsProtocol) -> bool:
|
|
121
119
|
if hasattr(options, "fast") and options.fast:
|
|
122
120
|
return await self._run_fast_hooks_async(options)
|
|
123
121
|
if hasattr(options, "comp") and options.comp:
|
|
124
122
|
return await self._run_comprehensive_hooks_async(options)
|
|
123
|
+
print(f"DEBUG: options.test = {options.test}")
|
|
125
124
|
if options.test:
|
|
126
125
|
return await self._execute_test_workflow_async(options)
|
|
127
126
|
return await self._execute_standard_hooks_workflow_async(options)
|
|
@@ -129,21 +128,17 @@ class AsyncWorkflowPipeline:
|
|
|
129
128
|
async def _execute_test_workflow_async(self, options: OptionsProtocol) -> bool:
|
|
130
129
|
overall_success = True
|
|
131
130
|
|
|
132
|
-
# Fast hooks with timeout
|
|
133
131
|
if not await self._run_fast_hooks_async(options):
|
|
134
132
|
overall_success = False
|
|
135
133
|
self.session.fail_task("workflow", "Fast hooks failed")
|
|
136
134
|
return False
|
|
137
135
|
|
|
138
|
-
# Run tests and comprehensive hooks in parallel
|
|
139
136
|
try:
|
|
140
137
|
test_task, hooks_task = self._create_parallel_tasks(options)
|
|
141
138
|
done, pending = await self._execute_parallel_tasks(test_task, hooks_task)
|
|
142
139
|
|
|
143
|
-
# Cancel any pending tasks
|
|
144
140
|
await self._cleanup_pending_tasks(pending)
|
|
145
141
|
|
|
146
|
-
# Process and validate results
|
|
147
142
|
test_success, hooks_success = await self._process_task_results(
|
|
148
143
|
done, test_task, hooks_task
|
|
149
144
|
)
|
|
@@ -160,7 +155,6 @@ class AsyncWorkflowPipeline:
|
|
|
160
155
|
def _create_parallel_tasks(
|
|
161
156
|
self, options: OptionsProtocol
|
|
162
157
|
) -> tuple[asyncio.Task[bool], asyncio.Task[bool]]:
|
|
163
|
-
"""Create test and hooks tasks with timeout handling."""
|
|
164
158
|
test_task = asyncio.create_task(
|
|
165
159
|
self.timeout_manager.with_timeout(
|
|
166
160
|
"test_execution",
|
|
@@ -180,11 +174,10 @@ class AsyncWorkflowPipeline:
|
|
|
180
174
|
async def _execute_parallel_tasks(
|
|
181
175
|
self, test_task: asyncio.Task[bool], hooks_task: asyncio.Task[bool]
|
|
182
176
|
) -> tuple[set[asyncio.Task[bool]], set[asyncio.Task[bool]]]:
|
|
183
|
-
"""Execute tasks in parallel with combined timeout."""
|
|
184
177
|
combined_timeout = (
|
|
185
178
|
self.timeout_manager.get_timeout("test_execution")
|
|
186
179
|
+ self.timeout_manager.get_timeout("comprehensive_hooks")
|
|
187
|
-
+ 60
|
|
180
|
+
+ 60
|
|
188
181
|
)
|
|
189
182
|
|
|
190
183
|
done, pending = await asyncio.wait(
|
|
@@ -196,7 +189,6 @@ class AsyncWorkflowPipeline:
|
|
|
196
189
|
return done, pending
|
|
197
190
|
|
|
198
191
|
async def _cleanup_pending_tasks(self, pending: set[asyncio.Task[t.Any]]) -> None:
|
|
199
|
-
"""Clean up any pending tasks."""
|
|
200
192
|
for task in pending:
|
|
201
193
|
task.cancel()
|
|
202
194
|
try:
|
|
@@ -210,7 +202,6 @@ class AsyncWorkflowPipeline:
|
|
|
210
202
|
test_task: asyncio.Task[bool],
|
|
211
203
|
hooks_task: asyncio.Task[bool],
|
|
212
204
|
) -> tuple[bool, bool]:
|
|
213
|
-
"""Process results from completed tasks."""
|
|
214
205
|
test_success = hooks_success = False
|
|
215
206
|
|
|
216
207
|
for task in done:
|
|
@@ -232,7 +223,6 @@ class AsyncWorkflowPipeline:
|
|
|
232
223
|
def _validate_workflow_results(
|
|
233
224
|
self, test_success: bool, hooks_success: bool, overall_success: bool
|
|
234
225
|
) -> bool:
|
|
235
|
-
"""Validate workflow results and handle failures."""
|
|
236
226
|
if not test_success:
|
|
237
227
|
overall_success = False
|
|
238
228
|
self.session.fail_task("workflow", "Testing failed")
|
|
@@ -261,7 +251,6 @@ class AsyncWorkflowPipeline:
|
|
|
261
251
|
timeout: float = 300.0,
|
|
262
252
|
task_name: str = "workflow_task",
|
|
263
253
|
) -> asyncio.Task[t.Any]:
|
|
264
|
-
"""Create a managed task with automatic cleanup."""
|
|
265
254
|
task = asyncio.create_task(coro, name=task_name)
|
|
266
255
|
|
|
267
256
|
if self.resource_context:
|
|
@@ -271,32 +260,36 @@ class AsyncWorkflowPipeline:
|
|
|
271
260
|
return task
|
|
272
261
|
|
|
273
262
|
async def _run_fast_hooks_async(self, options: OptionsProtocol) -> bool:
|
|
274
|
-
|
|
263
|
+
result = await self.timeout_manager.with_timeout(
|
|
275
264
|
"fast_hooks",
|
|
276
|
-
asyncio.to_thread(self.phases.run_fast_hooks_only, options),
|
|
265
|
+
asyncio.to_thread(self.phases.run_fast_hooks_only, options), # type: ignore[arg-type]
|
|
277
266
|
strategy=TimeoutStrategy.RETRY_WITH_BACKOFF,
|
|
278
267
|
)
|
|
268
|
+
return bool(result)
|
|
279
269
|
|
|
280
270
|
async def _run_comprehensive_hooks_async(self, options: OptionsProtocol) -> bool:
|
|
281
|
-
|
|
271
|
+
result = await self.timeout_manager.with_timeout(
|
|
282
272
|
"comprehensive_hooks",
|
|
283
|
-
asyncio.to_thread(self.phases.run_comprehensive_hooks_only, options),
|
|
273
|
+
asyncio.to_thread(self.phases.run_comprehensive_hooks_only, options), # type: ignore[arg-type]
|
|
284
274
|
strategy=TimeoutStrategy.GRACEFUL_DEGRADATION,
|
|
285
275
|
)
|
|
276
|
+
return bool(result)
|
|
286
277
|
|
|
287
278
|
async def _run_hooks_phase_async(self, options: OptionsProtocol) -> bool:
|
|
288
|
-
|
|
279
|
+
result = await self.timeout_manager.with_timeout(
|
|
289
280
|
"comprehensive_hooks",
|
|
290
|
-
asyncio.to_thread(self.phases.run_hooks_phase, options),
|
|
281
|
+
asyncio.to_thread(self.phases.run_hooks_phase, options), # type: ignore[arg-type]
|
|
291
282
|
strategy=TimeoutStrategy.GRACEFUL_DEGRADATION,
|
|
292
283
|
)
|
|
284
|
+
return bool(result)
|
|
293
285
|
|
|
294
286
|
async def _run_testing_phase_async(self, options: OptionsProtocol) -> bool:
|
|
295
|
-
|
|
287
|
+
result = await self.timeout_manager.with_timeout(
|
|
296
288
|
"test_execution",
|
|
297
|
-
asyncio.to_thread(self.phases.run_testing_phase, options),
|
|
289
|
+
asyncio.to_thread(self.phases.run_testing_phase, options), # type: ignore[arg-type]
|
|
298
290
|
strategy=TimeoutStrategy.GRACEFUL_DEGRADATION,
|
|
299
291
|
)
|
|
292
|
+
return bool(result)
|
|
300
293
|
|
|
301
294
|
async def _execute_ai_agent_workflow_async(
|
|
302
295
|
self, options: OptionsProtocol, max_iterations: int = 10
|
|
@@ -326,7 +319,6 @@ class AsyncWorkflowPipeline:
|
|
|
326
319
|
self.console.print(f"\n🔄 Iteration {iteration}/{max_iterations}")
|
|
327
320
|
|
|
328
321
|
try:
|
|
329
|
-
# Each iteration has its own timeout
|
|
330
322
|
iteration_result = await self.timeout_manager.with_timeout(
|
|
331
323
|
"workflow_iteration",
|
|
332
324
|
self._execute_single_iteration(options, iteration),
|
|
@@ -342,7 +334,7 @@ class AsyncWorkflowPipeline:
|
|
|
342
334
|
except Exception as e:
|
|
343
335
|
self.logger.error(f"Iteration {iteration} failed with error: {e}")
|
|
344
336
|
self.console.print(f"⚠️ Iteration {iteration} failed: {e}")
|
|
345
|
-
|
|
337
|
+
|
|
346
338
|
if iteration == max_iterations:
|
|
347
339
|
return False
|
|
348
340
|
|
|
@@ -419,7 +411,7 @@ class AsyncWorkflowPipeline:
|
|
|
419
411
|
|
|
420
412
|
from crackerjack.agents.base import Issue, IssueType, Priority
|
|
421
413
|
|
|
422
|
-
match = re.search(
|
|
414
|
+
match = re.search(
|
|
423
415
|
r"refurb: \s *(.+?): (\d +): (\d +)\s +\[(\w +)\]: \s *(.+)", issue
|
|
424
416
|
)
|
|
425
417
|
if match:
|
|
@@ -515,7 +507,7 @@ class AsyncWorkflowPipeline:
|
|
|
515
507
|
try:
|
|
516
508
|
hook_results = await self.timeout_manager.with_timeout(
|
|
517
509
|
"comprehensive_hooks",
|
|
518
|
-
asyncio.to_thread(self.phases.hook_manager.run_comprehensive_hooks),
|
|
510
|
+
asyncio.to_thread(self.phases.hook_manager.run_comprehensive_hooks), # type: ignore[arg-type]
|
|
519
511
|
strategy=TimeoutStrategy.GRACEFUL_DEGRADATION,
|
|
520
512
|
)
|
|
521
513
|
|
|
@@ -550,11 +542,12 @@ class AsyncWorkflowPipeline:
|
|
|
550
542
|
)
|
|
551
543
|
|
|
552
544
|
try:
|
|
553
|
-
|
|
545
|
+
result = await self.timeout_manager.with_timeout(
|
|
554
546
|
"ai_agent_processing",
|
|
555
547
|
self._execute_ai_fix_workflow(test_issues, hook_issues, iteration),
|
|
556
548
|
strategy=TimeoutStrategy.GRACEFUL_DEGRADATION,
|
|
557
549
|
)
|
|
550
|
+
return bool(result)
|
|
558
551
|
except Exception as e:
|
|
559
552
|
return self._handle_ai_fix_error(e)
|
|
560
553
|
|
|
@@ -569,7 +562,6 @@ class AsyncWorkflowPipeline:
|
|
|
569
562
|
|
|
570
563
|
coordinator = self._create_agent_coordinator()
|
|
571
564
|
|
|
572
|
-
# Apply timeout to AI coordinator processing
|
|
573
565
|
fix_result = await self.timeout_manager.with_timeout(
|
|
574
566
|
"ai_agent_processing",
|
|
575
567
|
coordinator.handle_issues(structured_issues),
|
|
@@ -577,9 +569,9 @@ class AsyncWorkflowPipeline:
|
|
|
577
569
|
)
|
|
578
570
|
|
|
579
571
|
self._report_fix_results(fix_result, iteration)
|
|
580
|
-
return fix_result.success
|
|
572
|
+
return bool(fix_result.success if fix_result else False)
|
|
581
573
|
|
|
582
|
-
def _create_agent_coordinator(self):
|
|
574
|
+
def _create_agent_coordinator(self) -> t.Any:
|
|
583
575
|
from crackerjack.agents.base import AgentContext
|
|
584
576
|
from crackerjack.agents.coordinator import AgentCoordinator
|
|
585
577
|
|
|
@@ -626,7 +618,6 @@ class AsyncWorkflowOrchestrator:
|
|
|
626
618
|
self.verbose = verbose
|
|
627
619
|
self.debug = debug
|
|
628
620
|
|
|
629
|
-
# Initialize logging first so container creation respects log levels
|
|
630
621
|
self._initialize_logging()
|
|
631
622
|
|
|
632
623
|
from crackerjack.models.protocols import (
|
|
@@ -676,7 +667,6 @@ class AsyncWorkflowOrchestrator:
|
|
|
676
667
|
session_id = getattr(self, "web_job_id", None) or str(int(time.time()))[:8]
|
|
677
668
|
debug_log_file = log_manager.create_debug_log_file(session_id)
|
|
678
669
|
|
|
679
|
-
# Set log level based on debug flag only - verbose should not enable DEBUG logs
|
|
680
670
|
log_level = "DEBUG" if self.debug else "INFO"
|
|
681
671
|
setup_structured_logging(
|
|
682
672
|
level=log_level, json_output=False, log_file=debug_log_file
|
|
@@ -11,8 +11,7 @@ class AutofixCoordinator:
|
|
|
11
11
|
self.console = console
|
|
12
12
|
self.pkg_path = pkg_path
|
|
13
13
|
self.logger = get_logger("crackerjack.autofix")
|
|
14
|
-
|
|
15
|
-
# We use setattr to avoid type checker issues since BoundLogger doesn't have a name attribute
|
|
14
|
+
|
|
16
15
|
setattr(self.logger, "name", "crackerjack.autofix")
|
|
17
16
|
|
|
18
17
|
def apply_autofix_for_hooks(self, mode: str, hook_results: list[object]) -> bool:
|
|
@@ -62,11 +61,9 @@ class AutofixCoordinator:
|
|
|
62
61
|
|
|
63
62
|
hook_specific_fixes = self._get_hook_specific_fixes(failed_hooks)
|
|
64
63
|
|
|
65
|
-
# Run fast fixes first
|
|
66
64
|
if not self._execute_fast_fixes():
|
|
67
65
|
return False
|
|
68
66
|
|
|
69
|
-
# Apply hook-specific fixes
|
|
70
67
|
all_successful = True
|
|
71
68
|
for cmd, description in hook_specific_fixes:
|
|
72
69
|
if not self._run_fix_command(cmd, description):
|
|
@@ -156,19 +153,16 @@ class AutofixCoordinator:
|
|
|
156
153
|
"removed",
|
|
157
154
|
]
|
|
158
155
|
|
|
159
|
-
# Handle case where result might be a Mock object in tests
|
|
160
156
|
if hasattr(result, "stdout") and hasattr(result, "stderr"):
|
|
161
|
-
# Handle the case where stdout/stderr might be Mock objects
|
|
162
157
|
stdout = getattr(result, "stdout", "") or ""
|
|
163
158
|
stderr = getattr(result, "stderr", "") or ""
|
|
164
|
-
|
|
159
|
+
|
|
165
160
|
if not isinstance(stdout, str):
|
|
166
161
|
stdout = str(stdout)
|
|
167
162
|
if not isinstance(stderr, str):
|
|
168
163
|
stderr = str(stderr)
|
|
169
164
|
output = stdout + stderr
|
|
170
165
|
else:
|
|
171
|
-
# For test mocks or other objects
|
|
172
166
|
output = str(result)
|
|
173
167
|
|
|
174
168
|
output_lower = output.lower()
|
|
@@ -179,31 +173,25 @@ class AutofixCoordinator:
|
|
|
179
173
|
if not cmd:
|
|
180
174
|
return False
|
|
181
175
|
|
|
182
|
-
# Handle CompletedProcess objects or Mock objects with returncode attribute
|
|
183
176
|
if hasattr(result, "returncode"):
|
|
184
177
|
return self._check_process_result_success(result)
|
|
185
178
|
|
|
186
|
-
# Check for string patterns in result
|
|
187
179
|
if isinstance(result, str):
|
|
188
180
|
return self._check_string_result_success(result)
|
|
189
181
|
|
|
190
182
|
return False
|
|
191
183
|
|
|
192
184
|
def _check_process_result_success(self, result: object) -> bool:
|
|
193
|
-
"""Check if a process result indicates success."""
|
|
194
185
|
if getattr(result, "returncode", 1) == 0:
|
|
195
186
|
return True
|
|
196
187
|
|
|
197
|
-
# Check output for success patterns if return code is non-zero
|
|
198
188
|
output = self._extract_process_output(result)
|
|
199
189
|
return self._has_success_patterns(output)
|
|
200
190
|
|
|
201
191
|
def _extract_process_output(self, result: object) -> str:
|
|
202
|
-
"""Extract and normalize stdout and stderr from process result."""
|
|
203
192
|
stdout = getattr(result, "stdout", "") or ""
|
|
204
193
|
stderr = getattr(result, "stderr", "") or ""
|
|
205
194
|
|
|
206
|
-
# Convert to strings if they're not already
|
|
207
195
|
if not isinstance(stdout, str):
|
|
208
196
|
stdout = str(stdout)
|
|
209
197
|
if not isinstance(stderr, str):
|
|
@@ -212,11 +200,9 @@ class AutofixCoordinator:
|
|
|
212
200
|
return stdout + stderr
|
|
213
201
|
|
|
214
202
|
def _check_string_result_success(self, result: str) -> bool:
|
|
215
|
-
"""Check if a string result indicates success."""
|
|
216
203
|
return self._has_success_patterns(result)
|
|
217
204
|
|
|
218
205
|
def _has_success_patterns(self, output: str) -> bool:
|
|
219
|
-
"""Check if output contains success patterns."""
|
|
220
206
|
if not output:
|
|
221
207
|
return False
|
|
222
208
|
|
|
@@ -273,7 +259,7 @@ class AutofixCoordinator:
|
|
|
273
259
|
raw_output = getattr(result, "raw_output", None)
|
|
274
260
|
if raw_output:
|
|
275
261
|
output_lower = raw_output.lower()
|
|
276
|
-
|
|
262
|
+
|
|
277
263
|
if (
|
|
278
264
|
"importerror" in output_lower
|
|
279
265
|
or "modulenotfounderror" in output_lower
|
|
@@ -44,7 +44,7 @@ class ServiceDescriptor:
|
|
|
44
44
|
created_count: int = 0
|
|
45
45
|
dependencies: list[type] = field(default_factory=list)
|
|
46
46
|
|
|
47
|
-
def __post_init__(self):
|
|
47
|
+
def __post_init__(self) -> None:
|
|
48
48
|
if self.implementation is self.factory is self.instance is None:
|
|
49
49
|
msg = "Must provide either implementation, factory, or instance"
|
|
50
50
|
raise ValueError(msg)
|
|
@@ -115,7 +115,6 @@ class DependencyResolver:
|
|
|
115
115
|
dependency = self.container.get(param.annotation)
|
|
116
116
|
kwargs[param_name] = dependency
|
|
117
117
|
except Exception as e:
|
|
118
|
-
# Only log as warning for required parameters, debug for optional ones with defaults
|
|
119
118
|
if param.default == inspect.Parameter.empty:
|
|
120
119
|
self.logger.warning(
|
|
121
120
|
"Could not inject dependency",
|
|
@@ -147,7 +146,7 @@ class DependencyResolver:
|
|
|
147
146
|
|
|
148
147
|
def _build_constructor_kwargs(self, implementation: type) -> dict[str, Any]:
|
|
149
148
|
init_sig = inspect.signature(implementation.__init__)
|
|
150
|
-
kwargs = {}
|
|
149
|
+
kwargs: dict[str, Any] = {}
|
|
151
150
|
|
|
152
151
|
for param_name, param in init_sig.parameters.items():
|
|
153
152
|
if param_name == "self":
|
|
@@ -301,7 +300,7 @@ class EnhancedDependencyContainer:
|
|
|
301
300
|
self._current_scope = scope
|
|
302
301
|
|
|
303
302
|
def get_service_info(self) -> dict[str, Any]:
|
|
304
|
-
info = {}
|
|
303
|
+
info: dict[str, Any] = {}
|
|
305
304
|
|
|
306
305
|
with self._lock:
|
|
307
306
|
for key, descriptor in self._services.items():
|
|
@@ -392,7 +391,7 @@ class EnhancedDependencyContainer:
|
|
|
392
391
|
def _get_service_key(self, interface: type) -> str:
|
|
393
392
|
return f"{interface.__module__}.{interface.__name__}"
|
|
394
393
|
|
|
395
|
-
def __enter__(self):
|
|
394
|
+
def __enter__(self) -> "EnhancedDependencyContainer":
|
|
396
395
|
return self
|
|
397
396
|
|
|
398
397
|
def __exit__(
|
|
@@ -476,11 +475,9 @@ class ServiceCollectionBuilder:
|
|
|
476
475
|
return self
|
|
477
476
|
|
|
478
477
|
def add_service_protocols(self) -> "ServiceCollectionBuilder":
|
|
479
|
-
"""Add registrations for service protocols that don't have explicit builders."""
|
|
480
478
|
console = self.console or Console(force_terminal=True)
|
|
481
479
|
pkg_path = self.pkg_path or Path.cwd()
|
|
482
480
|
|
|
483
|
-
# Register CoverageRatchetProtocol
|
|
484
481
|
def create_coverage_ratchet() -> CoverageRatchetProtocol:
|
|
485
482
|
from crackerjack.services.coverage_ratchet import CoverageRatchetService
|
|
486
483
|
|
|
@@ -491,7 +488,6 @@ class ServiceCollectionBuilder:
|
|
|
491
488
|
factory=create_coverage_ratchet,
|
|
492
489
|
)
|
|
493
490
|
|
|
494
|
-
# Register ConfigurationServiceProtocol
|
|
495
491
|
def create_configuration_service() -> ConfigurationServiceProtocol:
|
|
496
492
|
from crackerjack.services.config import ConfigurationService
|
|
497
493
|
|
|
@@ -502,7 +498,6 @@ class ServiceCollectionBuilder:
|
|
|
502
498
|
factory=create_configuration_service,
|
|
503
499
|
)
|
|
504
500
|
|
|
505
|
-
# Register SecurityServiceProtocol
|
|
506
501
|
def create_security_service() -> SecurityServiceProtocol:
|
|
507
502
|
from crackerjack.services.security import SecurityService
|
|
508
503
|
|
|
@@ -513,7 +508,6 @@ class ServiceCollectionBuilder:
|
|
|
513
508
|
factory=create_security_service,
|
|
514
509
|
)
|
|
515
510
|
|
|
516
|
-
# Register InitializationServiceProtocol
|
|
517
511
|
def create_initialization_service() -> InitializationServiceProtocol:
|
|
518
512
|
from crackerjack.services.filesystem import FileSystemService
|
|
519
513
|
from crackerjack.services.git import GitService
|
|
@@ -536,19 +530,16 @@ class ServiceCollectionBuilder:
|
|
|
536
530
|
|
|
537
531
|
from crackerjack.services.unified_config import UnifiedConfigurationService
|
|
538
532
|
|
|
539
|
-
# Register concrete class for backwards compatibility
|
|
540
533
|
self.container.register_singleton(
|
|
541
534
|
UnifiedConfigurationService,
|
|
542
535
|
factory=lambda: UnifiedConfigurationService(console, pkg_path),
|
|
543
536
|
)
|
|
544
537
|
|
|
545
|
-
# Register protocol interface
|
|
546
538
|
self.container.register_singleton(
|
|
547
539
|
UnifiedConfigurationServiceProtocol,
|
|
548
540
|
factory=lambda: self.container.get(UnifiedConfigurationService),
|
|
549
541
|
)
|
|
550
542
|
|
|
551
|
-
# Register ConfigMergeService for smart configuration merging
|
|
552
543
|
from crackerjack.services.config_merge import ConfigMergeService
|
|
553
544
|
|
|
554
545
|
def create_config_merge_service() -> ConfigMergeService:
|