crackerjack 0.37.9__py3-none-any.whl → 0.45.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (425) hide show
  1. crackerjack/README.md +19 -0
  2. crackerjack/__init__.py +30 -1
  3. crackerjack/__main__.py +342 -1263
  4. crackerjack/adapters/README.md +18 -0
  5. crackerjack/adapters/__init__.py +27 -5
  6. crackerjack/adapters/_output_paths.py +167 -0
  7. crackerjack/adapters/_qa_adapter_base.py +309 -0
  8. crackerjack/adapters/_tool_adapter_base.py +706 -0
  9. crackerjack/adapters/ai/README.md +65 -0
  10. crackerjack/adapters/ai/__init__.py +5 -0
  11. crackerjack/adapters/ai/claude.py +853 -0
  12. crackerjack/adapters/complexity/README.md +53 -0
  13. crackerjack/adapters/complexity/__init__.py +10 -0
  14. crackerjack/adapters/complexity/complexipy.py +641 -0
  15. crackerjack/adapters/dependency/__init__.py +22 -0
  16. crackerjack/adapters/dependency/pip_audit.py +418 -0
  17. crackerjack/adapters/format/README.md +72 -0
  18. crackerjack/adapters/format/__init__.py +11 -0
  19. crackerjack/adapters/format/mdformat.py +313 -0
  20. crackerjack/adapters/format/ruff.py +516 -0
  21. crackerjack/adapters/lint/README.md +47 -0
  22. crackerjack/adapters/lint/__init__.py +11 -0
  23. crackerjack/adapters/lint/codespell.py +273 -0
  24. crackerjack/adapters/lsp/README.md +49 -0
  25. crackerjack/adapters/lsp/__init__.py +27 -0
  26. crackerjack/adapters/{rust_tool_manager.py → lsp/_manager.py} +3 -3
  27. crackerjack/adapters/{skylos_adapter.py → lsp/skylos.py} +59 -7
  28. crackerjack/adapters/{zuban_adapter.py → lsp/zuban.py} +3 -6
  29. crackerjack/adapters/refactor/README.md +59 -0
  30. crackerjack/adapters/refactor/__init__.py +12 -0
  31. crackerjack/adapters/refactor/creosote.py +318 -0
  32. crackerjack/adapters/refactor/refurb.py +406 -0
  33. crackerjack/adapters/refactor/skylos.py +494 -0
  34. crackerjack/adapters/sast/README.md +132 -0
  35. crackerjack/adapters/sast/__init__.py +32 -0
  36. crackerjack/adapters/sast/_base.py +201 -0
  37. crackerjack/adapters/sast/bandit.py +423 -0
  38. crackerjack/adapters/sast/pyscn.py +405 -0
  39. crackerjack/adapters/sast/semgrep.py +241 -0
  40. crackerjack/adapters/security/README.md +111 -0
  41. crackerjack/adapters/security/__init__.py +17 -0
  42. crackerjack/adapters/security/gitleaks.py +339 -0
  43. crackerjack/adapters/type/README.md +52 -0
  44. crackerjack/adapters/type/__init__.py +12 -0
  45. crackerjack/adapters/type/pyrefly.py +402 -0
  46. crackerjack/adapters/type/ty.py +402 -0
  47. crackerjack/adapters/type/zuban.py +522 -0
  48. crackerjack/adapters/utility/README.md +51 -0
  49. crackerjack/adapters/utility/__init__.py +10 -0
  50. crackerjack/adapters/utility/checks.py +884 -0
  51. crackerjack/agents/README.md +264 -0
  52. crackerjack/agents/__init__.py +40 -12
  53. crackerjack/agents/base.py +1 -0
  54. crackerjack/agents/claude_code_bridge.py +641 -0
  55. crackerjack/agents/coordinator.py +49 -53
  56. crackerjack/agents/dry_agent.py +187 -3
  57. crackerjack/agents/enhanced_coordinator.py +279 -0
  58. crackerjack/agents/enhanced_proactive_agent.py +185 -0
  59. crackerjack/agents/error_middleware.py +53 -0
  60. crackerjack/agents/formatting_agent.py +6 -8
  61. crackerjack/agents/helpers/__init__.py +9 -0
  62. crackerjack/agents/helpers/performance/__init__.py +22 -0
  63. crackerjack/agents/helpers/performance/performance_ast_analyzer.py +357 -0
  64. crackerjack/agents/helpers/performance/performance_pattern_detector.py +909 -0
  65. crackerjack/agents/helpers/performance/performance_recommender.py +572 -0
  66. crackerjack/agents/helpers/refactoring/__init__.py +22 -0
  67. crackerjack/agents/helpers/refactoring/code_transformer.py +536 -0
  68. crackerjack/agents/helpers/refactoring/complexity_analyzer.py +344 -0
  69. crackerjack/agents/helpers/refactoring/dead_code_detector.py +437 -0
  70. crackerjack/agents/helpers/test_creation/__init__.py +19 -0
  71. crackerjack/agents/helpers/test_creation/test_ast_analyzer.py +216 -0
  72. crackerjack/agents/helpers/test_creation/test_coverage_analyzer.py +643 -0
  73. crackerjack/agents/helpers/test_creation/test_template_generator.py +1031 -0
  74. crackerjack/agents/performance_agent.py +121 -1152
  75. crackerjack/agents/refactoring_agent.py +156 -655
  76. crackerjack/agents/semantic_agent.py +479 -0
  77. crackerjack/agents/semantic_helpers.py +356 -0
  78. crackerjack/agents/test_creation_agent.py +19 -1605
  79. crackerjack/api.py +5 -7
  80. crackerjack/cli/README.md +394 -0
  81. crackerjack/cli/__init__.py +1 -1
  82. crackerjack/cli/cache_handlers.py +23 -18
  83. crackerjack/cli/cache_handlers_enhanced.py +1 -4
  84. crackerjack/cli/facade.py +70 -8
  85. crackerjack/cli/formatting.py +13 -0
  86. crackerjack/cli/handlers/__init__.py +85 -0
  87. crackerjack/cli/handlers/advanced.py +103 -0
  88. crackerjack/cli/handlers/ai_features.py +62 -0
  89. crackerjack/cli/handlers/analytics.py +479 -0
  90. crackerjack/cli/handlers/changelog.py +271 -0
  91. crackerjack/cli/handlers/config_handlers.py +16 -0
  92. crackerjack/cli/handlers/coverage.py +84 -0
  93. crackerjack/cli/handlers/documentation.py +280 -0
  94. crackerjack/cli/handlers/main_handlers.py +497 -0
  95. crackerjack/cli/handlers/monitoring.py +371 -0
  96. crackerjack/cli/handlers.py +249 -49
  97. crackerjack/cli/interactive.py +8 -5
  98. crackerjack/cli/options.py +203 -110
  99. crackerjack/cli/semantic_handlers.py +292 -0
  100. crackerjack/cli/version.py +19 -0
  101. crackerjack/code_cleaner.py +60 -24
  102. crackerjack/config/README.md +472 -0
  103. crackerjack/config/__init__.py +256 -0
  104. crackerjack/config/global_lock_config.py +191 -54
  105. crackerjack/config/hooks.py +188 -16
  106. crackerjack/config/loader.py +239 -0
  107. crackerjack/config/settings.py +141 -0
  108. crackerjack/config/tool_commands.py +331 -0
  109. crackerjack/core/README.md +393 -0
  110. crackerjack/core/async_workflow_orchestrator.py +79 -53
  111. crackerjack/core/autofix_coordinator.py +22 -9
  112. crackerjack/core/container.py +10 -9
  113. crackerjack/core/enhanced_container.py +9 -9
  114. crackerjack/core/performance.py +1 -1
  115. crackerjack/core/performance_monitor.py +5 -3
  116. crackerjack/core/phase_coordinator.py +1018 -634
  117. crackerjack/core/proactive_workflow.py +3 -3
  118. crackerjack/core/retry.py +275 -0
  119. crackerjack/core/service_watchdog.py +167 -23
  120. crackerjack/core/session_coordinator.py +187 -382
  121. crackerjack/core/timeout_manager.py +161 -44
  122. crackerjack/core/workflow/__init__.py +21 -0
  123. crackerjack/core/workflow/workflow_ai_coordinator.py +863 -0
  124. crackerjack/core/workflow/workflow_event_orchestrator.py +1107 -0
  125. crackerjack/core/workflow/workflow_issue_parser.py +714 -0
  126. crackerjack/core/workflow/workflow_phase_executor.py +1158 -0
  127. crackerjack/core/workflow/workflow_security_gates.py +400 -0
  128. crackerjack/core/workflow_orchestrator.py +1247 -953
  129. crackerjack/data/README.md +11 -0
  130. crackerjack/data/__init__.py +8 -0
  131. crackerjack/data/models.py +79 -0
  132. crackerjack/data/repository.py +210 -0
  133. crackerjack/decorators/README.md +180 -0
  134. crackerjack/decorators/__init__.py +35 -0
  135. crackerjack/decorators/error_handling.py +649 -0
  136. crackerjack/decorators/error_handling_decorators.py +334 -0
  137. crackerjack/decorators/helpers.py +58 -0
  138. crackerjack/decorators/patterns.py +281 -0
  139. crackerjack/decorators/utils.py +58 -0
  140. crackerjack/docs/README.md +11 -0
  141. crackerjack/docs/generated/api/CLI_REFERENCE.md +1 -1
  142. crackerjack/documentation/README.md +11 -0
  143. crackerjack/documentation/ai_templates.py +1 -1
  144. crackerjack/documentation/dual_output_generator.py +11 -9
  145. crackerjack/documentation/reference_generator.py +104 -59
  146. crackerjack/dynamic_config.py +52 -61
  147. crackerjack/errors.py +1 -1
  148. crackerjack/events/README.md +11 -0
  149. crackerjack/events/__init__.py +16 -0
  150. crackerjack/events/telemetry.py +175 -0
  151. crackerjack/events/workflow_bus.py +346 -0
  152. crackerjack/exceptions/README.md +301 -0
  153. crackerjack/exceptions/__init__.py +5 -0
  154. crackerjack/exceptions/config.py +4 -0
  155. crackerjack/exceptions/tool_execution_error.py +245 -0
  156. crackerjack/executors/README.md +591 -0
  157. crackerjack/executors/__init__.py +2 -0
  158. crackerjack/executors/async_hook_executor.py +539 -77
  159. crackerjack/executors/cached_hook_executor.py +3 -3
  160. crackerjack/executors/hook_executor.py +967 -102
  161. crackerjack/executors/hook_lock_manager.py +31 -22
  162. crackerjack/executors/individual_hook_executor.py +66 -32
  163. crackerjack/executors/lsp_aware_hook_executor.py +136 -57
  164. crackerjack/executors/progress_hook_executor.py +282 -0
  165. crackerjack/executors/tool_proxy.py +23 -7
  166. crackerjack/hooks/README.md +485 -0
  167. crackerjack/hooks/lsp_hook.py +8 -9
  168. crackerjack/intelligence/README.md +557 -0
  169. crackerjack/interactive.py +37 -10
  170. crackerjack/managers/README.md +369 -0
  171. crackerjack/managers/async_hook_manager.py +41 -57
  172. crackerjack/managers/hook_manager.py +449 -79
  173. crackerjack/managers/publish_manager.py +81 -36
  174. crackerjack/managers/test_command_builder.py +290 -12
  175. crackerjack/managers/test_executor.py +93 -8
  176. crackerjack/managers/test_manager.py +1082 -75
  177. crackerjack/managers/test_progress.py +118 -26
  178. crackerjack/mcp/README.md +374 -0
  179. crackerjack/mcp/cache.py +25 -2
  180. crackerjack/mcp/client_runner.py +35 -18
  181. crackerjack/mcp/context.py +9 -9
  182. crackerjack/mcp/dashboard.py +24 -8
  183. crackerjack/mcp/enhanced_progress_monitor.py +34 -23
  184. crackerjack/mcp/file_monitor.py +27 -6
  185. crackerjack/mcp/progress_components.py +45 -34
  186. crackerjack/mcp/progress_monitor.py +6 -9
  187. crackerjack/mcp/rate_limiter.py +11 -7
  188. crackerjack/mcp/server.py +2 -0
  189. crackerjack/mcp/server_core.py +187 -55
  190. crackerjack/mcp/service_watchdog.py +12 -9
  191. crackerjack/mcp/task_manager.py +2 -2
  192. crackerjack/mcp/tools/README.md +27 -0
  193. crackerjack/mcp/tools/__init__.py +2 -0
  194. crackerjack/mcp/tools/core_tools.py +75 -52
  195. crackerjack/mcp/tools/execution_tools.py +87 -31
  196. crackerjack/mcp/tools/intelligence_tools.py +2 -2
  197. crackerjack/mcp/tools/proactive_tools.py +1 -1
  198. crackerjack/mcp/tools/semantic_tools.py +584 -0
  199. crackerjack/mcp/tools/utility_tools.py +180 -132
  200. crackerjack/mcp/tools/workflow_executor.py +87 -46
  201. crackerjack/mcp/websocket/README.md +31 -0
  202. crackerjack/mcp/websocket/app.py +11 -1
  203. crackerjack/mcp/websocket/event_bridge.py +188 -0
  204. crackerjack/mcp/websocket/jobs.py +27 -4
  205. crackerjack/mcp/websocket/monitoring/__init__.py +25 -0
  206. crackerjack/mcp/websocket/monitoring/api/__init__.py +19 -0
  207. crackerjack/mcp/websocket/monitoring/api/dependencies.py +141 -0
  208. crackerjack/mcp/websocket/monitoring/api/heatmap.py +154 -0
  209. crackerjack/mcp/websocket/monitoring/api/intelligence.py +199 -0
  210. crackerjack/mcp/websocket/monitoring/api/metrics.py +203 -0
  211. crackerjack/mcp/websocket/monitoring/api/telemetry.py +101 -0
  212. crackerjack/mcp/websocket/monitoring/dashboard.py +18 -0
  213. crackerjack/mcp/websocket/monitoring/factory.py +109 -0
  214. crackerjack/mcp/websocket/monitoring/filters.py +10 -0
  215. crackerjack/mcp/websocket/monitoring/metrics.py +64 -0
  216. crackerjack/mcp/websocket/monitoring/models.py +90 -0
  217. crackerjack/mcp/websocket/monitoring/utils.py +171 -0
  218. crackerjack/mcp/websocket/monitoring/websocket_manager.py +78 -0
  219. crackerjack/mcp/websocket/monitoring/websockets/__init__.py +17 -0
  220. crackerjack/mcp/websocket/monitoring/websockets/dependencies.py +126 -0
  221. crackerjack/mcp/websocket/monitoring/websockets/heatmap.py +176 -0
  222. crackerjack/mcp/websocket/monitoring/websockets/intelligence.py +291 -0
  223. crackerjack/mcp/websocket/monitoring/websockets/metrics.py +291 -0
  224. crackerjack/mcp/websocket/monitoring_endpoints.py +16 -2930
  225. crackerjack/mcp/websocket/server.py +1 -3
  226. crackerjack/mcp/websocket/websocket_handler.py +107 -6
  227. crackerjack/models/README.md +308 -0
  228. crackerjack/models/__init__.py +10 -1
  229. crackerjack/models/config.py +639 -22
  230. crackerjack/models/config_adapter.py +6 -6
  231. crackerjack/models/protocols.py +1167 -23
  232. crackerjack/models/pydantic_models.py +320 -0
  233. crackerjack/models/qa_config.py +145 -0
  234. crackerjack/models/qa_results.py +134 -0
  235. crackerjack/models/results.py +35 -0
  236. crackerjack/models/semantic_models.py +258 -0
  237. crackerjack/models/task.py +19 -3
  238. crackerjack/models/test_models.py +60 -0
  239. crackerjack/monitoring/README.md +11 -0
  240. crackerjack/monitoring/ai_agent_watchdog.py +5 -4
  241. crackerjack/monitoring/metrics_collector.py +4 -3
  242. crackerjack/monitoring/regression_prevention.py +4 -3
  243. crackerjack/monitoring/websocket_server.py +4 -241
  244. crackerjack/orchestration/README.md +340 -0
  245. crackerjack/orchestration/__init__.py +43 -0
  246. crackerjack/orchestration/advanced_orchestrator.py +20 -67
  247. crackerjack/orchestration/cache/README.md +312 -0
  248. crackerjack/orchestration/cache/__init__.py +37 -0
  249. crackerjack/orchestration/cache/memory_cache.py +338 -0
  250. crackerjack/orchestration/cache/tool_proxy_cache.py +340 -0
  251. crackerjack/orchestration/config.py +297 -0
  252. crackerjack/orchestration/coverage_improvement.py +13 -6
  253. crackerjack/orchestration/execution_strategies.py +6 -6
  254. crackerjack/orchestration/hook_orchestrator.py +1398 -0
  255. crackerjack/orchestration/strategies/README.md +401 -0
  256. crackerjack/orchestration/strategies/__init__.py +39 -0
  257. crackerjack/orchestration/strategies/adaptive_strategy.py +630 -0
  258. crackerjack/orchestration/strategies/parallel_strategy.py +237 -0
  259. crackerjack/orchestration/strategies/sequential_strategy.py +299 -0
  260. crackerjack/orchestration/test_progress_streamer.py +1 -1
  261. crackerjack/plugins/README.md +11 -0
  262. crackerjack/plugins/hooks.py +3 -2
  263. crackerjack/plugins/loader.py +3 -3
  264. crackerjack/plugins/managers.py +1 -1
  265. crackerjack/py313.py +191 -0
  266. crackerjack/security/README.md +11 -0
  267. crackerjack/services/README.md +374 -0
  268. crackerjack/services/__init__.py +8 -21
  269. crackerjack/services/ai/README.md +295 -0
  270. crackerjack/services/ai/__init__.py +7 -0
  271. crackerjack/services/ai/advanced_optimizer.py +878 -0
  272. crackerjack/services/{contextual_ai_assistant.py → ai/contextual_ai_assistant.py} +5 -3
  273. crackerjack/services/ai/embeddings.py +444 -0
  274. crackerjack/services/ai/intelligent_commit.py +328 -0
  275. crackerjack/services/ai/predictive_analytics.py +510 -0
  276. crackerjack/services/api_extractor.py +5 -3
  277. crackerjack/services/bounded_status_operations.py +45 -5
  278. crackerjack/services/cache.py +249 -318
  279. crackerjack/services/changelog_automation.py +7 -3
  280. crackerjack/services/command_execution_service.py +305 -0
  281. crackerjack/services/config_integrity.py +83 -39
  282. crackerjack/services/config_merge.py +9 -6
  283. crackerjack/services/config_service.py +198 -0
  284. crackerjack/services/config_template.py +13 -26
  285. crackerjack/services/coverage_badge_service.py +6 -4
  286. crackerjack/services/coverage_ratchet.py +53 -27
  287. crackerjack/services/debug.py +18 -7
  288. crackerjack/services/dependency_analyzer.py +4 -4
  289. crackerjack/services/dependency_monitor.py +13 -13
  290. crackerjack/services/documentation_generator.py +4 -2
  291. crackerjack/services/documentation_service.py +62 -33
  292. crackerjack/services/enhanced_filesystem.py +81 -27
  293. crackerjack/services/enterprise_optimizer.py +1 -1
  294. crackerjack/services/error_pattern_analyzer.py +10 -10
  295. crackerjack/services/file_filter.py +221 -0
  296. crackerjack/services/file_hasher.py +5 -7
  297. crackerjack/services/file_io_service.py +361 -0
  298. crackerjack/services/file_modifier.py +615 -0
  299. crackerjack/services/filesystem.py +80 -109
  300. crackerjack/services/git.py +99 -5
  301. crackerjack/services/health_metrics.py +4 -6
  302. crackerjack/services/heatmap_generator.py +12 -3
  303. crackerjack/services/incremental_executor.py +380 -0
  304. crackerjack/services/initialization.py +101 -49
  305. crackerjack/services/log_manager.py +2 -2
  306. crackerjack/services/logging.py +120 -68
  307. crackerjack/services/lsp_client.py +12 -12
  308. crackerjack/services/memory_optimizer.py +27 -22
  309. crackerjack/services/monitoring/README.md +30 -0
  310. crackerjack/services/monitoring/__init__.py +9 -0
  311. crackerjack/services/monitoring/dependency_monitor.py +678 -0
  312. crackerjack/services/monitoring/error_pattern_analyzer.py +676 -0
  313. crackerjack/services/monitoring/health_metrics.py +716 -0
  314. crackerjack/services/monitoring/metrics.py +587 -0
  315. crackerjack/services/{performance_benchmarks.py → monitoring/performance_benchmarks.py} +100 -14
  316. crackerjack/services/{performance_cache.py → monitoring/performance_cache.py} +21 -15
  317. crackerjack/services/{performance_monitor.py → monitoring/performance_monitor.py} +10 -6
  318. crackerjack/services/parallel_executor.py +166 -55
  319. crackerjack/services/patterns/__init__.py +142 -0
  320. crackerjack/services/patterns/agents.py +107 -0
  321. crackerjack/services/patterns/code/__init__.py +15 -0
  322. crackerjack/services/patterns/code/detection.py +118 -0
  323. crackerjack/services/patterns/code/imports.py +107 -0
  324. crackerjack/services/patterns/code/paths.py +159 -0
  325. crackerjack/services/patterns/code/performance.py +119 -0
  326. crackerjack/services/patterns/code/replacement.py +36 -0
  327. crackerjack/services/patterns/core.py +212 -0
  328. crackerjack/services/patterns/documentation/__init__.py +14 -0
  329. crackerjack/services/patterns/documentation/badges_markdown.py +96 -0
  330. crackerjack/services/patterns/documentation/comments_blocks.py +83 -0
  331. crackerjack/services/patterns/documentation/docstrings.py +89 -0
  332. crackerjack/services/patterns/formatting.py +226 -0
  333. crackerjack/services/patterns/operations.py +339 -0
  334. crackerjack/services/patterns/security/__init__.py +23 -0
  335. crackerjack/services/patterns/security/code_injection.py +122 -0
  336. crackerjack/services/patterns/security/credentials.py +190 -0
  337. crackerjack/services/patterns/security/path_traversal.py +221 -0
  338. crackerjack/services/patterns/security/unsafe_operations.py +216 -0
  339. crackerjack/services/patterns/templates.py +62 -0
  340. crackerjack/services/patterns/testing/__init__.py +18 -0
  341. crackerjack/services/patterns/testing/error_patterns.py +107 -0
  342. crackerjack/services/patterns/testing/pytest_output.py +126 -0
  343. crackerjack/services/patterns/tool_output/__init__.py +16 -0
  344. crackerjack/services/patterns/tool_output/bandit.py +72 -0
  345. crackerjack/services/patterns/tool_output/other.py +97 -0
  346. crackerjack/services/patterns/tool_output/pyright.py +67 -0
  347. crackerjack/services/patterns/tool_output/ruff.py +44 -0
  348. crackerjack/services/patterns/url_sanitization.py +114 -0
  349. crackerjack/services/patterns/utilities.py +42 -0
  350. crackerjack/services/patterns/utils.py +339 -0
  351. crackerjack/services/patterns/validation.py +46 -0
  352. crackerjack/services/patterns/versioning.py +62 -0
  353. crackerjack/services/predictive_analytics.py +21 -8
  354. crackerjack/services/profiler.py +280 -0
  355. crackerjack/services/quality/README.md +415 -0
  356. crackerjack/services/quality/__init__.py +11 -0
  357. crackerjack/services/quality/anomaly_detector.py +392 -0
  358. crackerjack/services/quality/pattern_cache.py +333 -0
  359. crackerjack/services/quality/pattern_detector.py +479 -0
  360. crackerjack/services/quality/qa_orchestrator.py +491 -0
  361. crackerjack/services/{quality_baseline.py → quality/quality_baseline.py} +163 -2
  362. crackerjack/services/{quality_baseline_enhanced.py → quality/quality_baseline_enhanced.py} +4 -1
  363. crackerjack/services/{quality_intelligence.py → quality/quality_intelligence.py} +180 -16
  364. crackerjack/services/regex_patterns.py +58 -2987
  365. crackerjack/services/regex_utils.py +55 -29
  366. crackerjack/services/secure_status_formatter.py +42 -15
  367. crackerjack/services/secure_subprocess.py +35 -2
  368. crackerjack/services/security.py +16 -8
  369. crackerjack/services/server_manager.py +40 -51
  370. crackerjack/services/smart_scheduling.py +46 -6
  371. crackerjack/services/status_authentication.py +3 -3
  372. crackerjack/services/thread_safe_status_collector.py +1 -0
  373. crackerjack/services/tool_filter.py +368 -0
  374. crackerjack/services/tool_version_service.py +9 -5
  375. crackerjack/services/unified_config.py +43 -351
  376. crackerjack/services/vector_store.py +689 -0
  377. crackerjack/services/version_analyzer.py +6 -4
  378. crackerjack/services/version_checker.py +14 -8
  379. crackerjack/services/zuban_lsp_service.py +5 -4
  380. crackerjack/slash_commands/README.md +11 -0
  381. crackerjack/slash_commands/init.md +2 -12
  382. crackerjack/slash_commands/run.md +84 -50
  383. crackerjack/tools/README.md +11 -0
  384. crackerjack/tools/__init__.py +30 -0
  385. crackerjack/tools/_git_utils.py +105 -0
  386. crackerjack/tools/check_added_large_files.py +139 -0
  387. crackerjack/tools/check_ast.py +105 -0
  388. crackerjack/tools/check_json.py +103 -0
  389. crackerjack/tools/check_jsonschema.py +297 -0
  390. crackerjack/tools/check_toml.py +103 -0
  391. crackerjack/tools/check_yaml.py +110 -0
  392. crackerjack/tools/codespell_wrapper.py +72 -0
  393. crackerjack/tools/end_of_file_fixer.py +202 -0
  394. crackerjack/tools/format_json.py +128 -0
  395. crackerjack/tools/mdformat_wrapper.py +114 -0
  396. crackerjack/tools/trailing_whitespace.py +198 -0
  397. crackerjack/tools/validate_regex_patterns.py +7 -3
  398. crackerjack/ui/README.md +11 -0
  399. crackerjack/ui/dashboard_renderer.py +28 -0
  400. crackerjack/ui/templates/README.md +11 -0
  401. crackerjack/utils/console_utils.py +13 -0
  402. crackerjack/utils/dependency_guard.py +230 -0
  403. crackerjack/utils/retry_utils.py +275 -0
  404. crackerjack/workflows/README.md +590 -0
  405. crackerjack/workflows/__init__.py +46 -0
  406. crackerjack/workflows/actions.py +811 -0
  407. crackerjack/workflows/auto_fix.py +444 -0
  408. crackerjack/workflows/container_builder.py +499 -0
  409. crackerjack/workflows/definitions.py +443 -0
  410. crackerjack/workflows/engine.py +177 -0
  411. crackerjack/workflows/event_bridge.py +242 -0
  412. {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/METADATA +678 -98
  413. crackerjack-0.45.2.dist-info/RECORD +478 -0
  414. {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/WHEEL +1 -1
  415. crackerjack/managers/test_manager_backup.py +0 -1075
  416. crackerjack/mcp/tools/execution_tools_backup.py +0 -1011
  417. crackerjack/mixins/__init__.py +0 -3
  418. crackerjack/mixins/error_handling.py +0 -145
  419. crackerjack/services/config.py +0 -358
  420. crackerjack/ui/server_panels.py +0 -125
  421. crackerjack-0.37.9.dist-info/RECORD +0 -231
  422. /crackerjack/adapters/{rust_tool_adapter.py → lsp/_base.py} +0 -0
  423. /crackerjack/adapters/{lsp_client.py → lsp/_client.py} +0 -0
  424. {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/entry_points.txt +0 -0
  425. {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,1398 @@
1
+ """Hook Orchestrator for ACB integration.
2
+
3
+ ACB-powered orchestration layer managing hook lifecycle, dependency resolution,
4
+ and execution strategies. Supports dual execution modes for gradual migration.
5
+
6
+ ACB Patterns:
7
+ - MODULE_ID and MODULE_STATUS at module level
8
+ - depends.set() registration after class definition
9
+ - Structured logging with context fields
10
+ - Protocol-based interfaces
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ import asyncio
16
+ import logging
17
+ import typing as t
18
+ from collections import Counter
19
+ from contextlib import suppress
20
+ from typing import cast
21
+ from uuid import UUID
22
+
23
+ from acb.depends import depends
24
+ from pydantic import BaseModel, Field
25
+
26
+ from crackerjack.config.hooks import HookDefinition, HookStrategy
27
+ from crackerjack.events import WorkflowEvent, WorkflowEventBus
28
+ from crackerjack.models.qa_results import QAResultStatus
29
+ from crackerjack.models.task import HookResult
30
+
31
+ if t.TYPE_CHECKING:
32
+ from crackerjack.executors.hook_executor import HookExecutor
33
+ from crackerjack.orchestration.cache.memory_cache import MemoryCacheAdapter
34
+ from crackerjack.orchestration.cache.tool_proxy_cache import ToolProxyCacheAdapter
35
+ from crackerjack.orchestration.execution_strategies import ExecutionContext
36
+
37
+ # ACB Module Registration (REQUIRED)
38
+ MODULE_ID = UUID(
39
+ "01937d86-ace0-7000-8000-000000000003"
40
+ ) # Static UUID7 for reproducible module identity
41
+ MODULE_STATUS = "stable"
42
+
43
+ # Module-level logger for structured logging
44
+ logger = logging.getLogger(__name__)
45
+
46
+
47
+ class HookOrchestratorSettings(BaseModel):
48
+ """Settings for hook orchestration."""
49
+
50
+ max_parallel_hooks: int = Field(default=3, ge=1, le=10)
51
+ default_timeout: int = Field(default=300, ge=30, le=1800)
52
+ enable_caching: bool = True
53
+ enable_dependency_resolution: bool = True
54
+ retry_on_failure: bool = False
55
+ cache_backend: str = Field(
56
+ default="tool_proxy", pattern="^(tool_proxy|redis|memory)$"
57
+ )
58
+ execution_mode: str = Field(default="acb", pattern="^(legacy|acb)$")
59
+ # Phase 5-7: Triple parallelism settings
60
+ enable_adaptive_execution: bool = True # Use adaptive strategy (dependency-aware)
61
+
62
+
63
+ class HookOrchestratorAdapter:
64
+ """ACB-powered hook orchestration layer.
65
+
66
+ Manages hook lifecycle, dependency resolution, and execution strategies.
67
+ Supports dual execution mode: pre-commit CLI (legacy) and direct adapters (ACB).
68
+
69
+ Features:
70
+ - Async parallel execution with resource limits
71
+ - Dependency resolution between hooks
72
+ - Content-based caching integration
73
+ - Dual execution mode for gradual migration
74
+ - Comprehensive structured logging
75
+
76
+ Architecture:
77
+ - Legacy mode (Phase 3-7): Delegates to existing HookExecutor
78
+ - ACB mode (Phase 8+): Direct adapter.check() calls via depends.get()
79
+
80
+ Example:
81
+ ```python
82
+ from acb.depends import depends
83
+ from crackerjack.orchestration import HookOrchestratorAdapter
84
+
85
+ # Initialize orchestrator
86
+ orchestrator = await depends.get(HookOrchestratorAdapter)
87
+ await orchestrator.init()
88
+
89
+ # Execute strategy (legacy mode during Phase 3-7)
90
+ results = await orchestrator.execute_strategy(
91
+ strategy=fast_strategy, execution_mode="legacy"
92
+ )
93
+ ```
94
+ """
95
+
96
+ def __init__(
97
+ self,
98
+ settings: HookOrchestratorSettings | None = None,
99
+ hook_executor: HookExecutor | None = None,
100
+ cache_adapter: ToolProxyCacheAdapter | MemoryCacheAdapter | None = None,
101
+ event_bus: WorkflowEventBus | None = None,
102
+ execution_context: ExecutionContext | None = None,
103
+ ) -> None:
104
+ """Initialize Hook Orchestrator.
105
+
106
+ Args:
107
+ settings: Optional settings override
108
+ hook_executor: Optional HookExecutor for legacy mode delegation
109
+ cache_adapter: Optional cache adapter (auto-selected from settings.cache_backend if not provided)
110
+ execution_context: Optional execution context for adapters that need it
111
+ """
112
+ self.settings = settings or HookOrchestratorSettings()
113
+ self._hook_executor = hook_executor
114
+ self._cache_adapter = cache_adapter
115
+ self._dependency_graph: dict[str, list[str]] = {}
116
+ self.execution_context = execution_context
117
+ self._initialized = False
118
+ self._cache_hits = 0
119
+ self._cache_misses = 0
120
+ self._event_bus = event_bus or self._resolve_event_bus()
121
+
122
+ logger.debug(
123
+ "HookOrchestratorAdapter initialized",
124
+ extra={
125
+ "has_settings": settings is not None,
126
+ "has_executor": hook_executor is not None,
127
+ "has_cache": cache_adapter is not None,
128
+ },
129
+ )
130
+
131
+ @staticmethod
132
+ def _resolve_event_bus() -> WorkflowEventBus | None:
133
+ """Resolve workflow event bus from dependency injection."""
134
+ try:
135
+ return cast(WorkflowEventBus, depends.get_sync(WorkflowEventBus))
136
+ except Exception:
137
+ logger.debug("Workflow event bus not available during orchestrator setup")
138
+ return None
139
+
140
+ async def init(self) -> None:
141
+ """Initialize orchestrator and build dependency graph."""
142
+ if self._initialized:
143
+ logger.debug("HookOrchestratorAdapter already initialized")
144
+ return
145
+
146
+ # Build dependency graph for hook execution order
147
+ self._build_dependency_graph()
148
+
149
+ # Initialize cache adapter if caching enabled
150
+ if self.settings.enable_caching and not self._cache_adapter:
151
+ logger.debug(
152
+ "Initializing cache adapter",
153
+ extra={"cache_backend": self.settings.cache_backend},
154
+ )
155
+
156
+ # Auto-select cache backend
157
+ if self.settings.cache_backend == "tool_proxy":
158
+ from crackerjack.orchestration.cache.tool_proxy_cache import (
159
+ ToolProxyCacheAdapter,
160
+ )
161
+
162
+ self._cache_adapter = ToolProxyCacheAdapter()
163
+ elif self.settings.cache_backend == "memory":
164
+ from crackerjack.orchestration.cache.memory_cache import (
165
+ MemoryCacheAdapter,
166
+ )
167
+
168
+ self._cache_adapter = MemoryCacheAdapter()
169
+ else:
170
+ logger.warning(
171
+ f"Unknown cache backend: {self.settings.cache_backend}, disabling caching"
172
+ )
173
+ self.settings.enable_caching = False
174
+
175
+ # Initialize cache if provided
176
+ if self._cache_adapter:
177
+ await self._cache_adapter.init()
178
+ logger.debug("Cache adapter initialized")
179
+
180
+ self._initialized = True
181
+ logger.info(
182
+ "HookOrchestratorAdapter initialization complete",
183
+ extra={
184
+ "max_parallel_hooks": self.settings.max_parallel_hooks,
185
+ "enable_caching": self.settings.enable_caching,
186
+ "enable_dependency_resolution": self.settings.enable_dependency_resolution,
187
+ "execution_mode": self.settings.execution_mode,
188
+ "dependency_count": len(self._dependency_graph),
189
+ "cache_backend": self.settings.cache_backend
190
+ if self.settings.enable_caching
191
+ else "disabled",
192
+ },
193
+ )
194
+
195
+ @property
196
+ def module_id(self) -> UUID:
197
+ """Reference to module-level MODULE_ID."""
198
+ return MODULE_ID
199
+
200
+ @property
201
+ def adapter_name(self) -> str:
202
+ """Human-readable adapter name."""
203
+ return "Hook Orchestrator"
204
+
205
+ def _build_dependency_graph(self) -> None:
206
+ """Build dependency graph for hook execution order.
207
+
208
+ Dependency rules:
209
+ - gitleaks must run before bandit (secrets before security)
210
+ - zuban must run before refurb (types before refactoring)
211
+ - formatting hooks run first (ruff-format, mdformat)
212
+ - validation hooks run early (check-yaml, check-toml)
213
+ """
214
+ self._dependency_graph = {
215
+ # Gitleaks before security analysis
216
+ "bandit": ["gitleaks"],
217
+ "skylos": ["gitleaks"],
218
+ # Type checking before refactoring
219
+ "refurb": ["zuban"],
220
+ "creosote": ["zuban"],
221
+ # Formatting before linting
222
+ "ruff-check": ["ruff-format"],
223
+ "codespell": ["ruff-format", "mdformat"],
224
+ # Complexity analysis after refactoring
225
+ "complexipy": ["refurb"],
226
+ }
227
+
228
+ logger.debug(
229
+ "Built hook dependency graph",
230
+ extra={
231
+ "dependency_count": len(self._dependency_graph),
232
+ "dependent_hooks": list(self._dependency_graph.keys()),
233
+ },
234
+ )
235
+
236
+ async def execute_strategy(
237
+ self,
238
+ strategy: HookStrategy,
239
+ execution_mode: str | None = None,
240
+ progress_callback: t.Callable[[int, int], None] | None = None,
241
+ progress_start_callback: t.Callable[[int, int], None] | None = None,
242
+ execution_context: ExecutionContext | None = None,
243
+ ) -> list[HookResult]:
244
+ """Execute hook strategy with specified mode.
245
+
246
+ Args:
247
+ strategy: Hook strategy (fast or comprehensive)
248
+ execution_mode: "legacy" (pre-commit CLI) or "acb" (direct adapters)
249
+ Defaults to settings.execution_mode if not specified
250
+ progress_callback: Optional callback(completed, total) for progress updates
251
+
252
+ Returns:
253
+ List of HookResult objects
254
+
255
+ Raises:
256
+ ValueError: If execution_mode is invalid
257
+ RuntimeError: If orchestrator not initialized
258
+ """
259
+ if not self._initialized:
260
+ raise RuntimeError("HookOrchestrator not initialized. Call init() first.")
261
+
262
+ mode = execution_mode or self.settings.execution_mode
263
+
264
+ logger.info(
265
+ "Executing hook strategy",
266
+ extra={
267
+ "strategy_name": strategy.name,
268
+ "hook_count": len(strategy.hooks),
269
+ "execution_mode": mode,
270
+ "parallel": strategy.parallel,
271
+ "max_workers": strategy.max_workers,
272
+ },
273
+ )
274
+
275
+ await self._publish_event(
276
+ WorkflowEvent.HOOK_STRATEGY_STARTED,
277
+ {
278
+ "strategy": strategy.name,
279
+ "execution_mode": mode,
280
+ "hook_count": len(strategy.hooks),
281
+ },
282
+ )
283
+
284
+ try:
285
+ if mode == "legacy":
286
+ results = await self._execute_legacy_mode(strategy)
287
+ elif mode == "acb":
288
+ results = await self._execute_acb_mode(
289
+ strategy,
290
+ progress_callback,
291
+ progress_start_callback,
292
+ )
293
+ else:
294
+ raise ValueError(
295
+ f"Invalid execution mode: {mode}. Must be 'legacy' or 'acb'"
296
+ )
297
+ except Exception as exc:
298
+ await self._publish_event(
299
+ WorkflowEvent.HOOK_STRATEGY_FAILED,
300
+ {
301
+ "strategy": strategy.name,
302
+ "execution_mode": mode,
303
+ "error": str(exc),
304
+ },
305
+ )
306
+ raise
307
+
308
+ await self._publish_event(
309
+ WorkflowEvent.HOOK_STRATEGY_COMPLETED,
310
+ {
311
+ "strategy": strategy.name,
312
+ "execution_mode": mode,
313
+ "summary": self._summarize_results(results),
314
+ },
315
+ )
316
+
317
+ return results
318
+
319
+ async def _execute_legacy_mode(self, strategy: HookStrategy) -> list[HookResult]:
320
+ """Execute hooks via pre-commit CLI (existing HookExecutor).
321
+
322
+ This is the bridge to the existing system during Phase 3-7.
323
+ Delegates to HookExecutor which calls pre-commit CLI via subprocess.
324
+
325
+ Args:
326
+ strategy: Hook strategy to execute
327
+
328
+ Returns:
329
+ List of HookResult objects from HookExecutor
330
+
331
+ Raises:
332
+ RuntimeError: If HookExecutor not provided during initialization
333
+ """
334
+ logger.debug(
335
+ "Using legacy pre-commit execution mode",
336
+ extra={
337
+ "strategy_name": strategy.name,
338
+ "has_executor": self._hook_executor is not None,
339
+ },
340
+ )
341
+
342
+ if not self._hook_executor:
343
+ raise RuntimeError(
344
+ "Legacy mode requires HookExecutor. "
345
+ "Pass hook_executor during initialization or use execution_mode='acb'"
346
+ )
347
+
348
+ # Delegate to existing HookExecutor
349
+ # This maintains full backward compatibility with current system
350
+ execution_result = self._hook_executor.execute_strategy(strategy)
351
+
352
+ logger.info(
353
+ "Legacy mode execution complete",
354
+ extra={
355
+ "strategy_name": strategy.name,
356
+ "total_hooks": len(execution_result.results),
357
+ "passed": execution_result.passed_count,
358
+ "failed": execution_result.failed_count,
359
+ "duration": execution_result.total_duration,
360
+ },
361
+ )
362
+
363
+ return execution_result.results
364
+
365
+ async def _execute_acb_mode(
366
+ self,
367
+ strategy: HookStrategy,
368
+ progress_callback: t.Callable[[int, int], None] | None = None,
369
+ progress_start_callback: t.Callable[[int, int], None] | None = None,
370
+ ) -> list[HookResult]:
371
+ """Execute hooks via direct adapter calls (ACB-powered).
372
+
373
+ This is the target architecture for Phase 8+.
374
+ Calls adapter.check() directly via depends.get() instead of subprocess.
375
+
376
+ Args:
377
+ strategy: Hook strategy to execute
378
+ progress_callback: Optional callback(completed, total) for progress updates
379
+
380
+ Returns:
381
+ List of HookResult objects from direct adapter execution
382
+ """
383
+ logger.debug(
384
+ "Using ACB direct adapter execution mode",
385
+ extra={
386
+ "strategy_name": strategy.name,
387
+ "enable_adaptive_execution": self.settings.enable_adaptive_execution,
388
+ },
389
+ )
390
+
391
+ # NEW Phase 5-7: Use adaptive strategy for dependency-aware parallel execution
392
+ if self.settings.enable_adaptive_execution:
393
+ from crackerjack.orchestration.strategies.adaptive_strategy import (
394
+ AdaptiveExecutionStrategy,
395
+ )
396
+
397
+ logger.info(
398
+ "Using adaptive execution strategy with dependency-aware batching",
399
+ extra={
400
+ "strategy_name": strategy.name,
401
+ "max_parallel": strategy.max_workers
402
+ or self.settings.max_parallel_hooks,
403
+ "dependency_graph_size": len(self._dependency_graph),
404
+ },
405
+ )
406
+
407
+ execution_strategy = AdaptiveExecutionStrategy(
408
+ dependency_graph=self._dependency_graph,
409
+ max_parallel=strategy.max_workers or self.settings.max_parallel_hooks,
410
+ default_timeout=self.settings.default_timeout,
411
+ stop_on_critical_failure=True,
412
+ )
413
+
414
+ results = await execution_strategy.execute(
415
+ hooks=strategy.hooks,
416
+ executor_callable=self._execute_single_hook,
417
+ progress_callback=progress_callback,
418
+ progress_start_callback=progress_start_callback,
419
+ )
420
+ elif strategy.parallel:
421
+ # Fallback to simple parallel execution without dependency resolution
422
+ results = await self._execute_parallel(strategy.hooks, strategy.max_workers)
423
+ else:
424
+ # Sequential execution
425
+ results = await self._execute_sequential(strategy.hooks)
426
+
427
+ logger.info(
428
+ "ACB mode execution complete",
429
+ extra={
430
+ "strategy_name": strategy.name,
431
+ "total_hooks": len(results),
432
+ "passed": sum(1 for r in results if r.status == "passed"),
433
+ "failed": sum(1 for r in results if r.status == "failed"),
434
+ "errors": sum(1 for r in results if r.status in ("timeout", "error")),
435
+ },
436
+ )
437
+
438
+ return results
439
+
440
+ def _resolve_dependencies(
441
+ self, hooks: list[HookDefinition]
442
+ ) -> list[HookDefinition]:
443
+ """Resolve hook dependencies and return execution order.
444
+
445
+ Uses topological sort to order hooks based on dependency graph.
446
+
447
+ Args:
448
+ hooks: Unordered list of hooks
449
+
450
+ Returns:
451
+ Ordered list of hooks respecting dependencies
452
+
453
+ Algorithm:
454
+ 1. Build in-degree map (count of dependencies per hook)
455
+ 2. Start with hooks having zero dependencies
456
+ 3. Process hooks in layers, removing satisfied dependencies
457
+ 4. Hooks without dependencies execute in original order
458
+ """
459
+ # Build hook name to hook object mapping with original indices
460
+ hook_map = {hook.name: hook for hook in hooks}
461
+ hook_indices = {hook.name: idx for idx, hook in enumerate(hooks)}
462
+
463
+ # Build in-degree map (how many dependencies each hook has)
464
+ # Only count dependencies that are actually present in the hooks list
465
+ in_degree = {hook.name: 0 for hook in hooks}
466
+ for hook_name in hook_map:
467
+ if hook_name in self._dependency_graph:
468
+ # Only count dependencies that are in the current hooks list
469
+ deps_in_list = [
470
+ dep for dep in self._dependency_graph[hook_name] if dep in hook_map
471
+ ]
472
+ in_degree[hook_name] = len(deps_in_list)
473
+
474
+ # Queue of hooks ready to execute (zero dependencies)
475
+ # Maintain original order for hooks with same in-degree
476
+ ready_queue = [hook for hook in hooks if in_degree[hook.name] == 0]
477
+ ordered = []
478
+
479
+ # Process hooks in dependency order
480
+ while ready_queue:
481
+ # Take next ready hook (first in original order)
482
+ current_hook = ready_queue.pop(0)
483
+ ordered.append(current_hook)
484
+
485
+ # Update in-degrees for dependent hooks
486
+ for hook_name, deps in self._dependency_graph.items():
487
+ if current_hook.name in deps and hook_name in in_degree:
488
+ in_degree[hook_name] -= 1
489
+ if in_degree[hook_name] == 0 and hook_name in hook_map:
490
+ ready_queue.append(hook_map[hook_name])
491
+
492
+ # Re-sort ready_queue by original index to maintain stable order
493
+ ready_queue.sort(key=lambda h: hook_indices[h.name])
494
+
495
+ logger.debug(
496
+ "Resolved hook dependencies",
497
+ extra={
498
+ "input_count": len(hooks),
499
+ "output_count": len(ordered),
500
+ "reordered": len(hooks) != len(ordered) or hooks != ordered,
501
+ },
502
+ )
503
+
504
+ return ordered
505
+
506
+ async def _execute_parallel(
507
+ self,
508
+ hooks: list[HookDefinition],
509
+ max_workers: int = 3,
510
+ ) -> list[HookResult]:
511
+ """Execute hooks in parallel with resource limits.
512
+
513
+ Args:
514
+ hooks: Hooks to execute
515
+ max_workers: Maximum concurrent executions
516
+
517
+ Returns:
518
+ List of HookResult objects
519
+ """
520
+ max_parallel = min(max_workers, self.settings.max_parallel_hooks)
521
+ semaphore = asyncio.Semaphore(max_parallel)
522
+
523
+ logger.debug(
524
+ "Starting parallel execution",
525
+ extra={
526
+ "hook_count": len(hooks),
527
+ "max_parallel": max_parallel,
528
+ },
529
+ )
530
+
531
+ async def execute_with_limit(hook: HookDefinition) -> HookResult:
532
+ async with semaphore:
533
+ return await self._execute_single_hook(hook)
534
+
535
+ tasks = [execute_with_limit(hook) for hook in hooks]
536
+ results = await asyncio.gather(*tasks, return_exceptions=True)
537
+
538
+ # Convert exceptions to error HookResults
539
+ final_results = []
540
+ for hook, result in zip(hooks, results):
541
+ if isinstance(result, HookResult):
542
+ final_results.append(result)
543
+ else:
544
+ logger.error(
545
+ "Hook execution raised exception",
546
+ extra={
547
+ "hook": hook.name,
548
+ "exception": str(result),
549
+ "exception_type": type(result).__name__,
550
+ },
551
+ )
552
+ final_results.append(self._error_result(hook, result))
553
+
554
+ logger.debug(
555
+ "Parallel execution complete",
556
+ extra={
557
+ "total_hooks": len(final_results),
558
+ "successful": sum(
559
+ 1 for r in final_results if isinstance(r, HookResult)
560
+ ),
561
+ },
562
+ )
563
+
564
+ return final_results
565
+
566
+ async def _execute_sequential(
567
+ self, hooks: list[HookDefinition]
568
+ ) -> list[HookResult]:
569
+ """Execute hooks sequentially.
570
+
571
+ Args:
572
+ hooks: Hooks to execute
573
+
574
+ Returns:
575
+ List of HookResult objects
576
+ """
577
+ logger.debug("Starting sequential execution", extra={"hook_count": len(hooks)})
578
+
579
+ results = []
580
+ for hook in hooks:
581
+ result = await self._execute_single_hook(hook)
582
+ results.append(result)
583
+
584
+ # Early exit on critical failures
585
+ if result.status == "failed" and hook.security_level.value == "critical":
586
+ logger.warning(
587
+ f"Critical hook {hook.name} failed, stopping execution",
588
+ extra={
589
+ "hook": hook.name,
590
+ "security_level": "critical",
591
+ "remaining_hooks": len(hooks) - len(results),
592
+ },
593
+ )
594
+ break
595
+
596
+ logger.debug(
597
+ "Sequential execution complete",
598
+ extra={
599
+ "total_hooks": len(results),
600
+ "executed": len(results),
601
+ "skipped": len(hooks) - len(results),
602
+ },
603
+ )
604
+
605
+ return results
606
+
607
+ async def _execute_single_hook(self, hook: HookDefinition) -> HookResult:
608
+ """Execute a single hook (adapter or subprocess) with caching and events."""
609
+ logger.debug(
610
+ f"Executing hook: {hook.name}",
611
+ extra={
612
+ "hook": hook.name,
613
+ "timeout": hook.timeout,
614
+ "stage": hook.stage.value,
615
+ },
616
+ )
617
+ await self._publish_event(
618
+ WorkflowEvent.HOOK_EXECUTION_STARTED,
619
+ {
620
+ "hook": hook.name,
621
+ "stage": hook.stage.value,
622
+ "security_level": hook.security_level.value,
623
+ },
624
+ )
625
+
626
+ # Cache fast-path
627
+ cached = await self._try_get_cached(hook)
628
+ if cached is not None:
629
+ await self._publish_event(
630
+ WorkflowEvent.HOOK_EXECUTION_COMPLETED,
631
+ {
632
+ "hook": hook.name,
633
+ "stage": hook.stage.value,
634
+ "status": cached.status,
635
+ "duration": cached.duration,
636
+ "cached": True,
637
+ },
638
+ )
639
+ return cached
640
+
641
+ try:
642
+ import time
643
+
644
+ start_time = time.time()
645
+
646
+ # Execute hooks via direct adapter calls or subprocess if no adapter exists
647
+ adapter = self._build_adapter(hook)
648
+ if adapter is not None:
649
+ result = await self._run_adapter(adapter, hook, start_time)
650
+ else:
651
+ result = self._run_subprocess(hook, start_time)
652
+
653
+ await self._maybe_cache(hook, result)
654
+ except Exception as exc:
655
+ await self._publish_event(
656
+ WorkflowEvent.HOOK_EXECUTION_FAILED,
657
+ {"hook": hook.name, "stage": hook.stage.value, "error": str(exc)},
658
+ )
659
+ raise
660
+
661
+ await self._publish_event(
662
+ WorkflowEvent.HOOK_EXECUTION_COMPLETED,
663
+ {
664
+ "hook": hook.name,
665
+ "stage": hook.stage.value,
666
+ "status": result.status,
667
+ "duration": result.duration,
668
+ "cached": False,
669
+ },
670
+ )
671
+ return result
672
+
673
+ async def _try_get_cached(self, hook: HookDefinition) -> HookResult | None:
674
+ if not (self.settings.enable_caching and self._cache_adapter):
675
+ return None
676
+ cache_key = self._cache_adapter.compute_key(hook, files=[])
677
+ cached = await self._cache_adapter.get(cache_key)
678
+ if cached:
679
+ self._cache_hits += 1
680
+ logger.debug(
681
+ f"Cache hit for hook {hook.name}",
682
+ extra={
683
+ "hook": hook.name,
684
+ "cache_key": cache_key,
685
+ "cache_hits": self._cache_hits,
686
+ },
687
+ )
688
+ return cached
689
+ self._cache_misses += 1
690
+ logger.debug(
691
+ f"Cache miss for hook {hook.name}",
692
+ extra={
693
+ "hook": hook.name,
694
+ "cache_key": cache_key,
695
+ "cache_misses": self._cache_misses,
696
+ },
697
+ )
698
+ return None
699
+
700
+ @staticmethod
701
+ def _pass_result(hook: HookDefinition, duration: float) -> HookResult:
702
+ return HookResult(
703
+ id=hook.name,
704
+ name=hook.name,
705
+ status="passed",
706
+ duration=duration,
707
+ files_processed=0,
708
+ issues_found=[],
709
+ stage=hook.stage.value,
710
+ exit_code=None, # No error for passed hooks
711
+ error_message=None,
712
+ is_timeout=False,
713
+ )
714
+
715
+ def _build_adapter(self, hook: HookDefinition) -> t.Any | None:
716
+ """Build adapter for hook, dispatching to specific adapter factories."""
717
+ try:
718
+ adapter_factory = self._get_adapter_factory(hook.name)
719
+ if adapter_factory:
720
+ return adapter_factory(hook)
721
+ except Exception:
722
+ return None
723
+ return None
724
+
725
+ def _get_adapter_factory(
726
+ self, hook_name: str
727
+ ) -> t.Callable[[HookDefinition], t.Any] | None:
728
+ """Get adapter factory function for hook name."""
729
+ factories: dict[str, t.Callable[[HookDefinition], t.Any]] = {
730
+ "ruff-check": self._build_ruff_adapter,
731
+ "ruff-format": self._build_ruff_adapter,
732
+ "bandit": self._build_bandit_adapter,
733
+ "codespell": self._build_codespell_adapter,
734
+ "gitleaks": self._build_gitleaks_adapter,
735
+ "skylos": self._build_skylos_adapter,
736
+ "zuban": self._build_zuban_adapter,
737
+ "complexipy": self._build_complexipy_adapter,
738
+ "creosote": self._build_creosote_adapter,
739
+ "refurb": self._build_refurb_adapter,
740
+ "pyrefly": self._build_refurb_adapter,
741
+ "mdformat": self._build_mdformat_adapter,
742
+ }
743
+ return factories.get(hook_name)
744
+
745
+ @staticmethod
746
+ def _build_ruff_adapter(hook: HookDefinition) -> t.Any:
747
+ """Build Ruff adapter for format or check mode."""
748
+ from crackerjack.adapters.format.ruff import RuffAdapter, RuffSettings
749
+
750
+ is_format_mode = "format" in hook.name
751
+ is_check_mode = "check" in hook.name
752
+
753
+ return RuffAdapter(
754
+ settings=RuffSettings(
755
+ mode="format" if is_format_mode else "check",
756
+ fix_enabled=True, # Enable fixing for both check and format modes
757
+ unsafe_fixes=is_check_mode, # Enable unsafe fixes for check mode only
758
+ )
759
+ )
760
+
761
+ @staticmethod
762
+ def _build_bandit_adapter(hook: HookDefinition) -> t.Any:
763
+ """Build Bandit security adapter."""
764
+ from crackerjack.adapters.sast.bandit import BanditAdapter
765
+
766
+ return BanditAdapter()
767
+
768
+ @staticmethod
769
+ def _build_codespell_adapter(hook: HookDefinition) -> t.Any:
770
+ """Build Codespell lint adapter."""
771
+ from crackerjack.adapters.lint.codespell import CodespellAdapter
772
+
773
+ return CodespellAdapter()
774
+
775
+ @staticmethod
776
+ def _build_gitleaks_adapter(hook: HookDefinition) -> t.Any:
777
+ """Build Gitleaks security adapter."""
778
+ from crackerjack.adapters.security.gitleaks import GitleaksAdapter
779
+
780
+ return GitleaksAdapter()
781
+
782
+ def _build_skylos_adapter(self, hook: HookDefinition) -> t.Any:
783
+ """Build Skylos LSP adapter."""
784
+ from crackerjack.adapters.lsp.skylos import SkylosAdapter
785
+
786
+ if self.execution_context is None:
787
+ msg = f"Execution context required for {hook.name} adapter"
788
+ raise ValueError(msg)
789
+ return SkylosAdapter(context=self.execution_context)
790
+
791
+ @staticmethod
792
+ def _build_zuban_adapter(hook: HookDefinition) -> t.Any:
793
+ """Build Zuban type checking adapter."""
794
+ from crackerjack.adapters.type.zuban import ZubanAdapter, ZubanSettings
795
+
796
+ return ZubanAdapter(settings=ZubanSettings())
797
+
798
+ @staticmethod
799
+ def _build_complexipy_adapter(hook: HookDefinition) -> t.Any:
800
+ """Build Complexipy complexity adapter."""
801
+ from crackerjack.adapters.complexity.complexipy import ComplexipyAdapter
802
+
803
+ return ComplexipyAdapter()
804
+
805
+ @staticmethod
806
+ def _build_creosote_adapter(hook: HookDefinition) -> t.Any:
807
+ """Build Creosote refactor adapter."""
808
+ from crackerjack.adapters.refactor.creosote import CreosoteAdapter
809
+
810
+ return CreosoteAdapter()
811
+
812
+ @staticmethod
813
+ def _build_refurb_adapter(hook: HookDefinition) -> t.Any:
814
+ """Build Refurb refactor adapter."""
815
+ from crackerjack.adapters.refactor.refurb import RefurbAdapter
816
+
817
+ return RefurbAdapter()
818
+
819
+ @staticmethod
820
+ def _build_mdformat_adapter(hook: HookDefinition) -> t.Any:
821
+ """Build Mdformat markdown adapter."""
822
+ from crackerjack.adapters.format.mdformat import MdformatAdapter
823
+
824
+ return MdformatAdapter()
825
+
826
+ @staticmethod
827
+ def _get_reporting_tools() -> set[str]:
828
+ """Get the set of tools that report issues."""
829
+ return {"complexipy", "refurb", "gitleaks", "creosote"}
830
+
831
+ @staticmethod
832
+ def _get_formatters() -> set[str]:
833
+ """Get the set of formatting tools."""
834
+ return {"ruff-format"}
835
+
836
+ def _determine_status(self, hook: HookDefinition, qa_result: t.Any) -> str:
837
+ """Determine the status based on hook name and QA result."""
838
+ reporting_tools = self._get_reporting_tools()
839
+ formatters = self._get_formatters()
840
+
841
+ # Override status for tools that found issues but returned SUCCESS/WARNING
842
+ if (
843
+ (hook.name in reporting_tools or hook.name in formatters)
844
+ and qa_result.issues_found > 0
845
+ and qa_result.status in (QAResultStatus.SUCCESS, QAResultStatus.WARNING)
846
+ ):
847
+ return "failed" # Trigger auto-fix stage
848
+ return (
849
+ "passed"
850
+ if qa_result.status in (QAResultStatus.SUCCESS, QAResultStatus.WARNING)
851
+ else "failed"
852
+ )
853
+
854
+ @staticmethod
855
+ def _build_issues_list(qa_result: t.Any) -> list[str]:
856
+ """Build the issues list from the QA result.
857
+
858
+ This method uses the adapter's pre-formatted details string directly
859
+ instead of re-parsing it, which preserves the original formatting and
860
+ prevents loss of detailed error information.
861
+
862
+ Args:
863
+ qa_result: QAResult from adapter execution
864
+
865
+ Returns:
866
+ List of issue strings for display. Returns empty list if no issues.
867
+ """
868
+ if qa_result.issues_found == 0:
869
+ return []
870
+
871
+ # NEW: Use adapter's pre-formatted details directly
872
+ if qa_result.details:
873
+ # Parse detail lines from the adapter's formatted output
874
+ detail_lines = [
875
+ line.strip()
876
+ for line in qa_result.details.split("\n")
877
+ if line.strip() and not line.strip().startswith("...")
878
+ ]
879
+
880
+ # If we successfully parsed details, use them
881
+ if detail_lines:
882
+ # Show first 20 issues, then add summary for remainder
883
+ max_displayed = 20
884
+ if len(detail_lines) > max_displayed:
885
+ issues = detail_lines[:max_displayed]
886
+ remaining = len(detail_lines) - max_displayed
887
+ issues.append(
888
+ f"... and {remaining} more issue{'s' if remaining != 1 else ''} "
889
+ f"(run with --ai-debug for full details)"
890
+ )
891
+ else:
892
+ issues = detail_lines
893
+
894
+ # If qa_result reports more issues than we have details for, note it
895
+ if qa_result.issues_found > len(detail_lines):
896
+ extra = qa_result.issues_found - len(detail_lines)
897
+ issues.append(
898
+ f"... and {extra} additional issue{'s' if extra != 1 else ''} without details"
899
+ )
900
+
901
+ return issues
902
+
903
+ # Fallback: No details available or details parsing failed
904
+ # This should only happen when the adapter doesn't provide detailed output
905
+ count = qa_result.issues_found
906
+ return [
907
+ f"{count} issue{'s' if count != 1 else ''} found (run with --ai-debug for full details)"
908
+ ]
909
+
910
+ @staticmethod
911
+ def _extract_error_details(
912
+ hook: HookDefinition, qa_result: t.Any, status: str, issues: list[str]
913
+ ) -> tuple[int | None, str | None, list[str]]:
914
+ """Extract error details for failed hooks from adapter results.
915
+
916
+ Note: This method should only add the generic fallback if _build_issues_list
917
+ hasn't already provided a fallback message. This prevents double-fallback.
918
+
919
+ Args:
920
+ hook: Hook definition
921
+ qa_result: QAResult from adapter execution
922
+ status: Hook status (passed/failed)
923
+ issues: Issues list from _build_issues_list
924
+
925
+ Returns:
926
+ Tuple of (exit_code, error_message, updated_issues)
927
+ """
928
+ exit_code = None
929
+ error_message = None
930
+
931
+ if status == "failed":
932
+ if hasattr(qa_result, "details") and qa_result.details:
933
+ # For adapter-based hooks, use details as error message
934
+ error_message = qa_result.details[:500] # Truncate if very long
935
+
936
+ # Only extract error from details if issues list is truly empty
937
+ # (not just a fallback message from _build_issues_list)
938
+ if not issues:
939
+ error_lines = [
940
+ line.strip()
941
+ for line in qa_result.details.split("\n")
942
+ if line.strip()
943
+ ][:10]
944
+ issues = error_lines or ["Hook failed with no parseable output"]
945
+ elif not issues:
946
+ # Only add generic fallback if we have absolutely no information
947
+ # This should be rare since _build_issues_list provides a fallback
948
+ issues = [
949
+ f"Hook {hook.name} failed with no detailed output (exit code: "
950
+ f"{
951
+ qa_result.exit_code
952
+ if hasattr(qa_result, 'exit_code')
953
+ else 'unknown'
954
+ })"
955
+ ]
956
+
957
+ return exit_code, error_message, issues
958
+
959
+ @staticmethod
960
+ def _calculate_total_issues(
961
+ qa_result: t.Any, status: str, issues: list[str]
962
+ ) -> int:
963
+ """Calculate the total count of issues from qa_result.
964
+
965
+ This method distinguishes between:
966
+ 1. Genuine code issues (show actual count)
967
+ 2. Configuration/tool errors (show 0, not forced to 1)
968
+ 3. Parsing failures (may show 1 if no issues parseable)
969
+
970
+ The key insight: QAResultStatus.ERROR indicates a config/tool error,
971
+ not a code quality issue. These should show 0 issues, not 1.
972
+ """
973
+ # Get the actual total count of issues from qa_result
974
+ # This may be larger than len(issues) if issues were truncated for display
975
+ total_issues = (
976
+ qa_result.issues_found
977
+ if hasattr(qa_result, "issues_found")
978
+ else len(issues)
979
+ )
980
+
981
+ # Only force "1 issue" for genuine parsing failures, not config errors
982
+ if status == "failed" and total_issues == 0:
983
+ # Check if this is a config/tool error vs code quality failure
984
+ if (
985
+ hasattr(qa_result, "status")
986
+ and qa_result.status == QAResultStatus.ERROR
987
+ ):
988
+ # Config/tool error - show actual count (0)
989
+ # This prevents misleading "1 issue" for things like:
990
+ # - Missing binary
991
+ # - Invalid configuration
992
+ # - Tool initialization failures
993
+ return 0
994
+ else:
995
+ # Parsing failure or unexpected error - show 1 to indicate problem
996
+ # This handles cases where the tool found issues but we couldn't parse them
997
+ return max(total_issues, 1)
998
+
999
+ return total_issues
1000
+
1001
+ def _create_success_result(
1002
+ self, hook: HookDefinition, qa_result: t.Any, start_time: float
1003
+ ) -> HookResult:
1004
+ """Create a HookResult for successful execution."""
1005
+ files_processed = (
1006
+ len(qa_result.files_checked) if hasattr(qa_result, "files_checked") else 0
1007
+ )
1008
+ status = self._determine_status(hook, qa_result)
1009
+ issues = self._build_issues_list(qa_result)
1010
+
1011
+ # Extract error details for failed hooks from adapter results
1012
+ exit_code, error_message, issues = self._extract_error_details(
1013
+ hook, qa_result, status, issues
1014
+ )
1015
+
1016
+ # Calculate the total issues count
1017
+ total_issues = self._calculate_total_issues(qa_result, status, issues)
1018
+
1019
+ # Determine if this is a config/tool error (not code issues)
1020
+ is_config_error = (
1021
+ status == "failed"
1022
+ and hasattr(qa_result, "status")
1023
+ and qa_result.status == QAResultStatus.ERROR
1024
+ )
1025
+
1026
+ return HookResult(
1027
+ id=hook.name,
1028
+ name=hook.name,
1029
+ status=status,
1030
+ duration=self._elapsed(start_time),
1031
+ files_processed=files_processed,
1032
+ issues_found=issues,
1033
+ issues_count=total_issues, # Store the actual total count
1034
+ stage=hook.stage.value,
1035
+ exit_code=exit_code, # Adapters don't provide exit codes directly
1036
+ error_message=error_message,
1037
+ is_timeout=False,
1038
+ is_config_error=is_config_error, # Mark config/tool errors
1039
+ )
1040
+
1041
+ def _create_timeout_result(
1042
+ self, hook: HookDefinition, start_time: float
1043
+ ) -> HookResult:
1044
+ """Create a HookResult for timeout."""
1045
+ duration = self._elapsed(start_time)
1046
+ return HookResult(
1047
+ id=hook.name,
1048
+ name=hook.name,
1049
+ status="timeout",
1050
+ duration=duration,
1051
+ files_processed=0,
1052
+ issues_found=[f"Hook timed out after {hook.timeout}s"],
1053
+ issues_count=1, # Timeout counts as 1 issue
1054
+ stage=hook.stage.value,
1055
+ exit_code=124, # Standard timeout exit code
1056
+ error_message=f"Execution exceeded timeout of {hook.timeout}s",
1057
+ is_timeout=True,
1058
+ )
1059
+
1060
+ def _create_error_result(
1061
+ self, hook: HookDefinition, start_time: float, error: Exception
1062
+ ) -> HookResult:
1063
+ """Create a HookResult for error."""
1064
+ return HookResult(
1065
+ id=hook.name,
1066
+ name=hook.name,
1067
+ status="error",
1068
+ duration=self._elapsed(start_time),
1069
+ files_processed=0,
1070
+ issues_found=[f"Adapter execution error: {error}"],
1071
+ issues_count=1, # Error counts as 1 issue
1072
+ stage=hook.stage.value,
1073
+ exit_code=1,
1074
+ error_message=str(error),
1075
+ is_timeout=False,
1076
+ )
1077
+
1078
+ async def _run_adapter(
1079
+ self, adapter: t.Any, hook: HookDefinition, start_time: float
1080
+ ) -> HookResult:
1081
+ import asyncio
1082
+
1083
+ try:
1084
+ await adapter.init()
1085
+ # Let the adapter determine the appropriate files to check
1086
+ # Pass None to allow the adapter to scan for appropriate files
1087
+ qa_result = await asyncio.wait_for(
1088
+ adapter.check(files=None), timeout=hook.timeout
1089
+ )
1090
+ return self._create_success_result(hook, qa_result, start_time)
1091
+ except TimeoutError:
1092
+ return self._create_timeout_result(hook, start_time)
1093
+ except Exception as e:
1094
+ return self._create_error_result(hook, start_time, e)
1095
+
1096
+ def _run_subprocess(self, hook: HookDefinition, start_time: float) -> HookResult:
1097
+ import subprocess
1098
+
1099
+ cmd = hook.get_command()
1100
+ proc_result = subprocess.run(
1101
+ cmd, capture_output=True, text=True, timeout=hook.timeout
1102
+ )
1103
+ output_text = (proc_result.stdout or "") + (proc_result.stderr or "")
1104
+
1105
+ files_processed = self._extract_file_count(output_text)
1106
+ status = self._determine_hook_status(hook, proc_result, output_text)
1107
+ issues = self._collect_issues(status, proc_result)
1108
+
1109
+ # Semgrep-specific JSON error parsing
1110
+ if hook.name == "semgrep" and status == "failed":
1111
+ issues = self._parse_semgrep_json_errors(output_text, issues)
1112
+
1113
+ # Extract error details for failed hooks
1114
+ exit_code = proc_result.returncode if status == "failed" else None
1115
+ error_message = None
1116
+ if status == "failed" and output_text.strip():
1117
+ # Capture stdout + stderr for failed hooks (truncate if very long)
1118
+ error_message = output_text.strip()[:500]
1119
+
1120
+ # Ensure failed hooks always have at least 1 issue count
1121
+ issues_count = max(len(issues), 1 if status == "failed" else 0)
1122
+
1123
+ return HookResult(
1124
+ id=hook.name,
1125
+ name=hook.name,
1126
+ status=status,
1127
+ duration=self._elapsed(start_time),
1128
+ files_processed=files_processed,
1129
+ issues_found=issues,
1130
+ issues_count=issues_count,
1131
+ stage=hook.stage.value,
1132
+ exit_code=exit_code,
1133
+ error_message=error_message,
1134
+ is_timeout=False,
1135
+ )
1136
+
1137
+ @staticmethod
1138
+ def _extract_file_count(output_text: str) -> int:
1139
+ """Extract file count from subprocess output using regex patterns."""
1140
+ import re
1141
+
1142
+ file_count_patterns = [
1143
+ r"(\d+)\s+files?\s+would\s+be",
1144
+ r"(\d+)\s+files?\s+already\s+formatted",
1145
+ r"(\d+)\s+files?\s+processed",
1146
+ r"(\d+)\s+files?\s+checked",
1147
+ r"(\d+)\s+files?\s+analyzed",
1148
+ r"Checking\s+(\d+)\s+files?",
1149
+ r"Found\s+(\d+)\s+files?",
1150
+ r"(\d+)\s+files?",
1151
+ ]
1152
+
1153
+ all_matches = []
1154
+ for pattern in file_count_patterns:
1155
+ matches = re.findall(pattern, output_text, re.IGNORECASE)
1156
+ if matches:
1157
+ all_matches.extend([int(m) for m in matches if m.isdigit()])
1158
+
1159
+ return max(all_matches) if all_matches else 0
1160
+
1161
+ def _determine_hook_status(
1162
+ self, hook: HookDefinition, proc_result: t.Any, output_text: str
1163
+ ) -> str:
1164
+ """Determine hook status from subprocess return code and output."""
1165
+ base_status = "passed" if proc_result.returncode == 0 else "failed"
1166
+
1167
+ if base_status == "passed":
1168
+ return "passed"
1169
+
1170
+ # Check special cases where return code 1 indicates success
1171
+ if self._is_formatting_success(hook, proc_result, output_text):
1172
+ return "passed"
1173
+
1174
+ # For tools like ruff that return 1 when they detect issues but execute successfully
1175
+ # These should still be considered "passed" execution-wise but may have issues found
1176
+ if self._is_analysis_tool_success(hook, proc_result):
1177
+ return "passed"
1178
+
1179
+ if self._is_bandit_success(hook, proc_result, output_text):
1180
+ return "passed"
1181
+
1182
+ return "failed"
1183
+
1184
+ @staticmethod
1185
+ def _is_formatting_success(
1186
+ hook: HookDefinition, proc_result: t.Any, output_text: str
1187
+ ) -> bool:
1188
+ """Check if formatting tool return code 1 indicates successful modification."""
1189
+ if not hook.is_formatting or proc_result.returncode != 1:
1190
+ return False
1191
+ return "files were modified by this hook" in output_text.lower()
1192
+
1193
+ @staticmethod
1194
+ def _is_analysis_tool_success(hook: HookDefinition, proc_result: t.Any) -> bool:
1195
+ """Check if analysis tool return code 1 indicates findings (not failure)."""
1196
+ if proc_result.returncode != 1:
1197
+ return False
1198
+ return hook.name in {
1199
+ "creosote",
1200
+ "complexipy",
1201
+ "refurb",
1202
+ "ruff-check",
1203
+ "ruff-format",
1204
+ }
1205
+
1206
+ @staticmethod
1207
+ def _is_bandit_success(
1208
+ hook: HookDefinition, proc_result: t.Any, output_text: str
1209
+ ) -> bool:
1210
+ """Check if bandit return code 1 indicates findings (not failure)."""
1211
+ if hook.name != "bandit" or proc_result.returncode != 1:
1212
+ return False
1213
+ output_text_lower = output_text.lower()
1214
+ return (
1215
+ "potential issues" in output_text_lower
1216
+ or "no issues identified" not in output_text_lower
1217
+ )
1218
+
1219
+ @staticmethod
1220
+ def _collect_issues(status: str, proc_result: t.Any) -> list[str]:
1221
+ """Collect issues from subprocess output if hook failed.
1222
+
1223
+ For subprocess hooks (non-adapter), extracts error information from output.
1224
+ Returns list of strings since the display layer handles both string and object types.
1225
+ """
1226
+ if status == "passed":
1227
+ return []
1228
+
1229
+ # Get combined output
1230
+ output_text = (proc_result.stdout or "") + (proc_result.stderr or "")
1231
+ if not output_text.strip():
1232
+ return [
1233
+ f"Hook failed with exit code {
1234
+ getattr(proc_result, 'returncode', 'unknown')
1235
+ } and no output"
1236
+ ]
1237
+
1238
+ # Try to extract meaningful error lines (first 10 non-empty lines)
1239
+ error_lines = [
1240
+ line.strip() for line in output_text.split("\n") if line.strip()
1241
+ ][:10]
1242
+
1243
+ return error_lines or ["Hook failed with non-zero exit code"]
1244
+
1245
+ @staticmethod
1246
+ def _parse_semgrep_json_errors(
1247
+ output_text: str, fallback_issues: list[str]
1248
+ ) -> list[str]:
1249
+ """Parse semgrep JSON output to extract errors from errors array.
1250
+
1251
+ Semgrep returns JSON with:
1252
+ - "results": Security/code quality findings (usually empty when download fails)
1253
+ - "errors": Configuration errors, download failures, etc.
1254
+
1255
+ Args:
1256
+ output_text: Combined stdout + stderr from semgrep
1257
+ fallback_issues: Issues collected from raw output (used if JSON parsing fails)
1258
+
1259
+ Returns:
1260
+ List of formatted error strings
1261
+ """
1262
+ import json
1263
+
1264
+ try:
1265
+ json_data = json.loads(output_text.strip())
1266
+
1267
+ issues = []
1268
+
1269
+ # Extract security findings from results array
1270
+ if "results" in json_data:
1271
+ for result in json_data.get("results", []):
1272
+ path = result.get("path", "unknown")
1273
+ line_num = result.get("start", {}).get("line", "?")
1274
+ rule_id = result.get("check_id", "unknown-rule")
1275
+ message = result.get("extra", {}).get(
1276
+ "message", "Security issue detected"
1277
+ )
1278
+ issues.append(f"{path}:{line_num} - {rule_id}: {message}")
1279
+
1280
+ # Extract errors (download failures, config errors, etc.)
1281
+ if "errors" in json_data:
1282
+ for error in json_data.get("errors", []):
1283
+ error_type = error.get("type", "SemgrepError")
1284
+ error_msg = error.get("message", str(error))
1285
+ issues.append(f"{error_type}: {error_msg}")
1286
+
1287
+ return issues or fallback_issues
1288
+
1289
+ except json.JSONDecodeError:
1290
+ # JSON parsing failed, use fallback
1291
+ return fallback_issues
1292
+
1293
+ async def _maybe_cache(self, hook: HookDefinition, result: HookResult) -> None:
1294
+ if not (self.settings.enable_caching and self._cache_adapter):
1295
+ return
1296
+ cache_key = self._cache_adapter.compute_key(hook, files=[])
1297
+ await self._cache_adapter.set(cache_key, result)
1298
+ logger.debug(
1299
+ f"Cached result for hook {hook.name}",
1300
+ extra={
1301
+ "hook": hook.name,
1302
+ "cache_key": cache_key,
1303
+ "status": result.status,
1304
+ "files_processed": result.files_processed,
1305
+ },
1306
+ )
1307
+
1308
+ @staticmethod
1309
+ def _elapsed(start_time: float) -> float:
1310
+ import time
1311
+
1312
+ return time.time() - start_time
1313
+
1314
+ @staticmethod
1315
+ def _error_result(hook: HookDefinition, error: BaseException) -> HookResult:
1316
+ """Create error HookResult from exception.
1317
+
1318
+ Args:
1319
+ hook: Hook that raised exception
1320
+ error: Exception that was raised
1321
+
1322
+ Returns:
1323
+ HookResult with error status
1324
+ """
1325
+ return HookResult(
1326
+ id=hook.name,
1327
+ name=hook.name,
1328
+ status="error",
1329
+ duration=0.0,
1330
+ files_processed=0,
1331
+ issues_found=[str(error)],
1332
+ issues_count=1, # Error counts as 1 issue
1333
+ stage=hook.stage.value,
1334
+ exit_code=1,
1335
+ error_message=str(error),
1336
+ is_timeout=False,
1337
+ )
1338
+
1339
+ async def get_cache_stats(self) -> dict[str, t.Any]:
1340
+ """Get cache statistics including hit/miss ratios.
1341
+
1342
+ Returns:
1343
+ Dictionary with cache statistics
1344
+ """
1345
+ stats = {
1346
+ "caching_enabled": self.settings.enable_caching,
1347
+ "cache_backend": self.settings.cache_backend
1348
+ if self.settings.enable_caching
1349
+ else "disabled",
1350
+ "cache_hits": self._cache_hits,
1351
+ "cache_misses": self._cache_misses,
1352
+ "total_requests": self._cache_hits + self._cache_misses,
1353
+ "hit_ratio": (
1354
+ self._cache_hits / (self._cache_hits + self._cache_misses)
1355
+ if (self._cache_hits + self._cache_misses) > 0
1356
+ else 0.0
1357
+ ),
1358
+ }
1359
+
1360
+ # Get adapter-specific stats if available
1361
+ if self._cache_adapter:
1362
+ adapter_stats = await self._cache_adapter.get_stats()
1363
+ stats["adapter_stats"] = adapter_stats
1364
+
1365
+ logger.debug("Cache statistics", extra=stats)
1366
+
1367
+ return stats
1368
+
1369
+ async def _publish_event(
1370
+ self,
1371
+ event: WorkflowEvent,
1372
+ payload: dict[str, t.Any],
1373
+ ) -> None:
1374
+ """Publish an event to the workflow bus if available."""
1375
+ if not self._event_bus:
1376
+ return
1377
+
1378
+ try:
1379
+ await self._event_bus.publish(event, payload)
1380
+ except Exception as exc:
1381
+ logger.debug(
1382
+ "Failed to publish orchestrator event",
1383
+ extra={"event": event.value, "error": str(exc)},
1384
+ )
1385
+
1386
+ @staticmethod
1387
+ def _summarize_results(results: list[HookResult]) -> dict[str, t.Any]:
1388
+ """Summarize hook results for telemetry payloads."""
1389
+ counts = Counter(result.status for result in results)
1390
+ return {
1391
+ "counts": dict(counts),
1392
+ "total": len(results),
1393
+ }
1394
+
1395
+
1396
+ # ACB Registration (REQUIRED at module level)
1397
+ with suppress(Exception):
1398
+ depends.set(HookOrchestratorAdapter)