crackerjack 0.37.9__py3-none-any.whl → 0.45.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- crackerjack/README.md +19 -0
- crackerjack/__init__.py +30 -1
- crackerjack/__main__.py +342 -1263
- crackerjack/adapters/README.md +18 -0
- crackerjack/adapters/__init__.py +27 -5
- crackerjack/adapters/_output_paths.py +167 -0
- crackerjack/adapters/_qa_adapter_base.py +309 -0
- crackerjack/adapters/_tool_adapter_base.py +706 -0
- crackerjack/adapters/ai/README.md +65 -0
- crackerjack/adapters/ai/__init__.py +5 -0
- crackerjack/adapters/ai/claude.py +853 -0
- crackerjack/adapters/complexity/README.md +53 -0
- crackerjack/adapters/complexity/__init__.py +10 -0
- crackerjack/adapters/complexity/complexipy.py +641 -0
- crackerjack/adapters/dependency/__init__.py +22 -0
- crackerjack/adapters/dependency/pip_audit.py +418 -0
- crackerjack/adapters/format/README.md +72 -0
- crackerjack/adapters/format/__init__.py +11 -0
- crackerjack/adapters/format/mdformat.py +313 -0
- crackerjack/adapters/format/ruff.py +516 -0
- crackerjack/adapters/lint/README.md +47 -0
- crackerjack/adapters/lint/__init__.py +11 -0
- crackerjack/adapters/lint/codespell.py +273 -0
- crackerjack/adapters/lsp/README.md +49 -0
- crackerjack/adapters/lsp/__init__.py +27 -0
- crackerjack/adapters/{rust_tool_manager.py → lsp/_manager.py} +3 -3
- crackerjack/adapters/{skylos_adapter.py → lsp/skylos.py} +59 -7
- crackerjack/adapters/{zuban_adapter.py → lsp/zuban.py} +3 -6
- crackerjack/adapters/refactor/README.md +59 -0
- crackerjack/adapters/refactor/__init__.py +12 -0
- crackerjack/adapters/refactor/creosote.py +318 -0
- crackerjack/adapters/refactor/refurb.py +406 -0
- crackerjack/adapters/refactor/skylos.py +494 -0
- crackerjack/adapters/sast/README.md +132 -0
- crackerjack/adapters/sast/__init__.py +32 -0
- crackerjack/adapters/sast/_base.py +201 -0
- crackerjack/adapters/sast/bandit.py +423 -0
- crackerjack/adapters/sast/pyscn.py +405 -0
- crackerjack/adapters/sast/semgrep.py +241 -0
- crackerjack/adapters/security/README.md +111 -0
- crackerjack/adapters/security/__init__.py +17 -0
- crackerjack/adapters/security/gitleaks.py +339 -0
- crackerjack/adapters/type/README.md +52 -0
- crackerjack/adapters/type/__init__.py +12 -0
- crackerjack/adapters/type/pyrefly.py +402 -0
- crackerjack/adapters/type/ty.py +402 -0
- crackerjack/adapters/type/zuban.py +522 -0
- crackerjack/adapters/utility/README.md +51 -0
- crackerjack/adapters/utility/__init__.py +10 -0
- crackerjack/adapters/utility/checks.py +884 -0
- crackerjack/agents/README.md +264 -0
- crackerjack/agents/__init__.py +40 -12
- crackerjack/agents/base.py +1 -0
- crackerjack/agents/claude_code_bridge.py +641 -0
- crackerjack/agents/coordinator.py +49 -53
- crackerjack/agents/dry_agent.py +187 -3
- crackerjack/agents/enhanced_coordinator.py +279 -0
- crackerjack/agents/enhanced_proactive_agent.py +185 -0
- crackerjack/agents/error_middleware.py +53 -0
- crackerjack/agents/formatting_agent.py +6 -8
- crackerjack/agents/helpers/__init__.py +9 -0
- crackerjack/agents/helpers/performance/__init__.py +22 -0
- crackerjack/agents/helpers/performance/performance_ast_analyzer.py +357 -0
- crackerjack/agents/helpers/performance/performance_pattern_detector.py +909 -0
- crackerjack/agents/helpers/performance/performance_recommender.py +572 -0
- crackerjack/agents/helpers/refactoring/__init__.py +22 -0
- crackerjack/agents/helpers/refactoring/code_transformer.py +536 -0
- crackerjack/agents/helpers/refactoring/complexity_analyzer.py +344 -0
- crackerjack/agents/helpers/refactoring/dead_code_detector.py +437 -0
- crackerjack/agents/helpers/test_creation/__init__.py +19 -0
- crackerjack/agents/helpers/test_creation/test_ast_analyzer.py +216 -0
- crackerjack/agents/helpers/test_creation/test_coverage_analyzer.py +643 -0
- crackerjack/agents/helpers/test_creation/test_template_generator.py +1031 -0
- crackerjack/agents/performance_agent.py +121 -1152
- crackerjack/agents/refactoring_agent.py +156 -655
- crackerjack/agents/semantic_agent.py +479 -0
- crackerjack/agents/semantic_helpers.py +356 -0
- crackerjack/agents/test_creation_agent.py +19 -1605
- crackerjack/api.py +5 -7
- crackerjack/cli/README.md +394 -0
- crackerjack/cli/__init__.py +1 -1
- crackerjack/cli/cache_handlers.py +23 -18
- crackerjack/cli/cache_handlers_enhanced.py +1 -4
- crackerjack/cli/facade.py +70 -8
- crackerjack/cli/formatting.py +13 -0
- crackerjack/cli/handlers/__init__.py +85 -0
- crackerjack/cli/handlers/advanced.py +103 -0
- crackerjack/cli/handlers/ai_features.py +62 -0
- crackerjack/cli/handlers/analytics.py +479 -0
- crackerjack/cli/handlers/changelog.py +271 -0
- crackerjack/cli/handlers/config_handlers.py +16 -0
- crackerjack/cli/handlers/coverage.py +84 -0
- crackerjack/cli/handlers/documentation.py +280 -0
- crackerjack/cli/handlers/main_handlers.py +497 -0
- crackerjack/cli/handlers/monitoring.py +371 -0
- crackerjack/cli/handlers.py +249 -49
- crackerjack/cli/interactive.py +8 -5
- crackerjack/cli/options.py +203 -110
- crackerjack/cli/semantic_handlers.py +292 -0
- crackerjack/cli/version.py +19 -0
- crackerjack/code_cleaner.py +60 -24
- crackerjack/config/README.md +472 -0
- crackerjack/config/__init__.py +256 -0
- crackerjack/config/global_lock_config.py +191 -54
- crackerjack/config/hooks.py +188 -16
- crackerjack/config/loader.py +239 -0
- crackerjack/config/settings.py +141 -0
- crackerjack/config/tool_commands.py +331 -0
- crackerjack/core/README.md +393 -0
- crackerjack/core/async_workflow_orchestrator.py +79 -53
- crackerjack/core/autofix_coordinator.py +22 -9
- crackerjack/core/container.py +10 -9
- crackerjack/core/enhanced_container.py +9 -9
- crackerjack/core/performance.py +1 -1
- crackerjack/core/performance_monitor.py +5 -3
- crackerjack/core/phase_coordinator.py +1018 -634
- crackerjack/core/proactive_workflow.py +3 -3
- crackerjack/core/retry.py +275 -0
- crackerjack/core/service_watchdog.py +167 -23
- crackerjack/core/session_coordinator.py +187 -382
- crackerjack/core/timeout_manager.py +161 -44
- crackerjack/core/workflow/__init__.py +21 -0
- crackerjack/core/workflow/workflow_ai_coordinator.py +863 -0
- crackerjack/core/workflow/workflow_event_orchestrator.py +1107 -0
- crackerjack/core/workflow/workflow_issue_parser.py +714 -0
- crackerjack/core/workflow/workflow_phase_executor.py +1158 -0
- crackerjack/core/workflow/workflow_security_gates.py +400 -0
- crackerjack/core/workflow_orchestrator.py +1247 -953
- crackerjack/data/README.md +11 -0
- crackerjack/data/__init__.py +8 -0
- crackerjack/data/models.py +79 -0
- crackerjack/data/repository.py +210 -0
- crackerjack/decorators/README.md +180 -0
- crackerjack/decorators/__init__.py +35 -0
- crackerjack/decorators/error_handling.py +649 -0
- crackerjack/decorators/error_handling_decorators.py +334 -0
- crackerjack/decorators/helpers.py +58 -0
- crackerjack/decorators/patterns.py +281 -0
- crackerjack/decorators/utils.py +58 -0
- crackerjack/docs/README.md +11 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +1 -1
- crackerjack/documentation/README.md +11 -0
- crackerjack/documentation/ai_templates.py +1 -1
- crackerjack/documentation/dual_output_generator.py +11 -9
- crackerjack/documentation/reference_generator.py +104 -59
- crackerjack/dynamic_config.py +52 -61
- crackerjack/errors.py +1 -1
- crackerjack/events/README.md +11 -0
- crackerjack/events/__init__.py +16 -0
- crackerjack/events/telemetry.py +175 -0
- crackerjack/events/workflow_bus.py +346 -0
- crackerjack/exceptions/README.md +301 -0
- crackerjack/exceptions/__init__.py +5 -0
- crackerjack/exceptions/config.py +4 -0
- crackerjack/exceptions/tool_execution_error.py +245 -0
- crackerjack/executors/README.md +591 -0
- crackerjack/executors/__init__.py +2 -0
- crackerjack/executors/async_hook_executor.py +539 -77
- crackerjack/executors/cached_hook_executor.py +3 -3
- crackerjack/executors/hook_executor.py +967 -102
- crackerjack/executors/hook_lock_manager.py +31 -22
- crackerjack/executors/individual_hook_executor.py +66 -32
- crackerjack/executors/lsp_aware_hook_executor.py +136 -57
- crackerjack/executors/progress_hook_executor.py +282 -0
- crackerjack/executors/tool_proxy.py +23 -7
- crackerjack/hooks/README.md +485 -0
- crackerjack/hooks/lsp_hook.py +8 -9
- crackerjack/intelligence/README.md +557 -0
- crackerjack/interactive.py +37 -10
- crackerjack/managers/README.md +369 -0
- crackerjack/managers/async_hook_manager.py +41 -57
- crackerjack/managers/hook_manager.py +449 -79
- crackerjack/managers/publish_manager.py +81 -36
- crackerjack/managers/test_command_builder.py +290 -12
- crackerjack/managers/test_executor.py +93 -8
- crackerjack/managers/test_manager.py +1082 -75
- crackerjack/managers/test_progress.py +118 -26
- crackerjack/mcp/README.md +374 -0
- crackerjack/mcp/cache.py +25 -2
- crackerjack/mcp/client_runner.py +35 -18
- crackerjack/mcp/context.py +9 -9
- crackerjack/mcp/dashboard.py +24 -8
- crackerjack/mcp/enhanced_progress_monitor.py +34 -23
- crackerjack/mcp/file_monitor.py +27 -6
- crackerjack/mcp/progress_components.py +45 -34
- crackerjack/mcp/progress_monitor.py +6 -9
- crackerjack/mcp/rate_limiter.py +11 -7
- crackerjack/mcp/server.py +2 -0
- crackerjack/mcp/server_core.py +187 -55
- crackerjack/mcp/service_watchdog.py +12 -9
- crackerjack/mcp/task_manager.py +2 -2
- crackerjack/mcp/tools/README.md +27 -0
- crackerjack/mcp/tools/__init__.py +2 -0
- crackerjack/mcp/tools/core_tools.py +75 -52
- crackerjack/mcp/tools/execution_tools.py +87 -31
- crackerjack/mcp/tools/intelligence_tools.py +2 -2
- crackerjack/mcp/tools/proactive_tools.py +1 -1
- crackerjack/mcp/tools/semantic_tools.py +584 -0
- crackerjack/mcp/tools/utility_tools.py +180 -132
- crackerjack/mcp/tools/workflow_executor.py +87 -46
- crackerjack/mcp/websocket/README.md +31 -0
- crackerjack/mcp/websocket/app.py +11 -1
- crackerjack/mcp/websocket/event_bridge.py +188 -0
- crackerjack/mcp/websocket/jobs.py +27 -4
- crackerjack/mcp/websocket/monitoring/__init__.py +25 -0
- crackerjack/mcp/websocket/monitoring/api/__init__.py +19 -0
- crackerjack/mcp/websocket/monitoring/api/dependencies.py +141 -0
- crackerjack/mcp/websocket/monitoring/api/heatmap.py +154 -0
- crackerjack/mcp/websocket/monitoring/api/intelligence.py +199 -0
- crackerjack/mcp/websocket/monitoring/api/metrics.py +203 -0
- crackerjack/mcp/websocket/monitoring/api/telemetry.py +101 -0
- crackerjack/mcp/websocket/monitoring/dashboard.py +18 -0
- crackerjack/mcp/websocket/monitoring/factory.py +109 -0
- crackerjack/mcp/websocket/monitoring/filters.py +10 -0
- crackerjack/mcp/websocket/monitoring/metrics.py +64 -0
- crackerjack/mcp/websocket/monitoring/models.py +90 -0
- crackerjack/mcp/websocket/monitoring/utils.py +171 -0
- crackerjack/mcp/websocket/monitoring/websocket_manager.py +78 -0
- crackerjack/mcp/websocket/monitoring/websockets/__init__.py +17 -0
- crackerjack/mcp/websocket/monitoring/websockets/dependencies.py +126 -0
- crackerjack/mcp/websocket/monitoring/websockets/heatmap.py +176 -0
- crackerjack/mcp/websocket/monitoring/websockets/intelligence.py +291 -0
- crackerjack/mcp/websocket/monitoring/websockets/metrics.py +291 -0
- crackerjack/mcp/websocket/monitoring_endpoints.py +16 -2930
- crackerjack/mcp/websocket/server.py +1 -3
- crackerjack/mcp/websocket/websocket_handler.py +107 -6
- crackerjack/models/README.md +308 -0
- crackerjack/models/__init__.py +10 -1
- crackerjack/models/config.py +639 -22
- crackerjack/models/config_adapter.py +6 -6
- crackerjack/models/protocols.py +1167 -23
- crackerjack/models/pydantic_models.py +320 -0
- crackerjack/models/qa_config.py +145 -0
- crackerjack/models/qa_results.py +134 -0
- crackerjack/models/results.py +35 -0
- crackerjack/models/semantic_models.py +258 -0
- crackerjack/models/task.py +19 -3
- crackerjack/models/test_models.py +60 -0
- crackerjack/monitoring/README.md +11 -0
- crackerjack/monitoring/ai_agent_watchdog.py +5 -4
- crackerjack/monitoring/metrics_collector.py +4 -3
- crackerjack/monitoring/regression_prevention.py +4 -3
- crackerjack/monitoring/websocket_server.py +4 -241
- crackerjack/orchestration/README.md +340 -0
- crackerjack/orchestration/__init__.py +43 -0
- crackerjack/orchestration/advanced_orchestrator.py +20 -67
- crackerjack/orchestration/cache/README.md +312 -0
- crackerjack/orchestration/cache/__init__.py +37 -0
- crackerjack/orchestration/cache/memory_cache.py +338 -0
- crackerjack/orchestration/cache/tool_proxy_cache.py +340 -0
- crackerjack/orchestration/config.py +297 -0
- crackerjack/orchestration/coverage_improvement.py +13 -6
- crackerjack/orchestration/execution_strategies.py +6 -6
- crackerjack/orchestration/hook_orchestrator.py +1398 -0
- crackerjack/orchestration/strategies/README.md +401 -0
- crackerjack/orchestration/strategies/__init__.py +39 -0
- crackerjack/orchestration/strategies/adaptive_strategy.py +630 -0
- crackerjack/orchestration/strategies/parallel_strategy.py +237 -0
- crackerjack/orchestration/strategies/sequential_strategy.py +299 -0
- crackerjack/orchestration/test_progress_streamer.py +1 -1
- crackerjack/plugins/README.md +11 -0
- crackerjack/plugins/hooks.py +3 -2
- crackerjack/plugins/loader.py +3 -3
- crackerjack/plugins/managers.py +1 -1
- crackerjack/py313.py +191 -0
- crackerjack/security/README.md +11 -0
- crackerjack/services/README.md +374 -0
- crackerjack/services/__init__.py +8 -21
- crackerjack/services/ai/README.md +295 -0
- crackerjack/services/ai/__init__.py +7 -0
- crackerjack/services/ai/advanced_optimizer.py +878 -0
- crackerjack/services/{contextual_ai_assistant.py → ai/contextual_ai_assistant.py} +5 -3
- crackerjack/services/ai/embeddings.py +444 -0
- crackerjack/services/ai/intelligent_commit.py +328 -0
- crackerjack/services/ai/predictive_analytics.py +510 -0
- crackerjack/services/api_extractor.py +5 -3
- crackerjack/services/bounded_status_operations.py +45 -5
- crackerjack/services/cache.py +249 -318
- crackerjack/services/changelog_automation.py +7 -3
- crackerjack/services/command_execution_service.py +305 -0
- crackerjack/services/config_integrity.py +83 -39
- crackerjack/services/config_merge.py +9 -6
- crackerjack/services/config_service.py +198 -0
- crackerjack/services/config_template.py +13 -26
- crackerjack/services/coverage_badge_service.py +6 -4
- crackerjack/services/coverage_ratchet.py +53 -27
- crackerjack/services/debug.py +18 -7
- crackerjack/services/dependency_analyzer.py +4 -4
- crackerjack/services/dependency_monitor.py +13 -13
- crackerjack/services/documentation_generator.py +4 -2
- crackerjack/services/documentation_service.py +62 -33
- crackerjack/services/enhanced_filesystem.py +81 -27
- crackerjack/services/enterprise_optimizer.py +1 -1
- crackerjack/services/error_pattern_analyzer.py +10 -10
- crackerjack/services/file_filter.py +221 -0
- crackerjack/services/file_hasher.py +5 -7
- crackerjack/services/file_io_service.py +361 -0
- crackerjack/services/file_modifier.py +615 -0
- crackerjack/services/filesystem.py +80 -109
- crackerjack/services/git.py +99 -5
- crackerjack/services/health_metrics.py +4 -6
- crackerjack/services/heatmap_generator.py +12 -3
- crackerjack/services/incremental_executor.py +380 -0
- crackerjack/services/initialization.py +101 -49
- crackerjack/services/log_manager.py +2 -2
- crackerjack/services/logging.py +120 -68
- crackerjack/services/lsp_client.py +12 -12
- crackerjack/services/memory_optimizer.py +27 -22
- crackerjack/services/monitoring/README.md +30 -0
- crackerjack/services/monitoring/__init__.py +9 -0
- crackerjack/services/monitoring/dependency_monitor.py +678 -0
- crackerjack/services/monitoring/error_pattern_analyzer.py +676 -0
- crackerjack/services/monitoring/health_metrics.py +716 -0
- crackerjack/services/monitoring/metrics.py +587 -0
- crackerjack/services/{performance_benchmarks.py → monitoring/performance_benchmarks.py} +100 -14
- crackerjack/services/{performance_cache.py → monitoring/performance_cache.py} +21 -15
- crackerjack/services/{performance_monitor.py → monitoring/performance_monitor.py} +10 -6
- crackerjack/services/parallel_executor.py +166 -55
- crackerjack/services/patterns/__init__.py +142 -0
- crackerjack/services/patterns/agents.py +107 -0
- crackerjack/services/patterns/code/__init__.py +15 -0
- crackerjack/services/patterns/code/detection.py +118 -0
- crackerjack/services/patterns/code/imports.py +107 -0
- crackerjack/services/patterns/code/paths.py +159 -0
- crackerjack/services/patterns/code/performance.py +119 -0
- crackerjack/services/patterns/code/replacement.py +36 -0
- crackerjack/services/patterns/core.py +212 -0
- crackerjack/services/patterns/documentation/__init__.py +14 -0
- crackerjack/services/patterns/documentation/badges_markdown.py +96 -0
- crackerjack/services/patterns/documentation/comments_blocks.py +83 -0
- crackerjack/services/patterns/documentation/docstrings.py +89 -0
- crackerjack/services/patterns/formatting.py +226 -0
- crackerjack/services/patterns/operations.py +339 -0
- crackerjack/services/patterns/security/__init__.py +23 -0
- crackerjack/services/patterns/security/code_injection.py +122 -0
- crackerjack/services/patterns/security/credentials.py +190 -0
- crackerjack/services/patterns/security/path_traversal.py +221 -0
- crackerjack/services/patterns/security/unsafe_operations.py +216 -0
- crackerjack/services/patterns/templates.py +62 -0
- crackerjack/services/patterns/testing/__init__.py +18 -0
- crackerjack/services/patterns/testing/error_patterns.py +107 -0
- crackerjack/services/patterns/testing/pytest_output.py +126 -0
- crackerjack/services/patterns/tool_output/__init__.py +16 -0
- crackerjack/services/patterns/tool_output/bandit.py +72 -0
- crackerjack/services/patterns/tool_output/other.py +97 -0
- crackerjack/services/patterns/tool_output/pyright.py +67 -0
- crackerjack/services/patterns/tool_output/ruff.py +44 -0
- crackerjack/services/patterns/url_sanitization.py +114 -0
- crackerjack/services/patterns/utilities.py +42 -0
- crackerjack/services/patterns/utils.py +339 -0
- crackerjack/services/patterns/validation.py +46 -0
- crackerjack/services/patterns/versioning.py +62 -0
- crackerjack/services/predictive_analytics.py +21 -8
- crackerjack/services/profiler.py +280 -0
- crackerjack/services/quality/README.md +415 -0
- crackerjack/services/quality/__init__.py +11 -0
- crackerjack/services/quality/anomaly_detector.py +392 -0
- crackerjack/services/quality/pattern_cache.py +333 -0
- crackerjack/services/quality/pattern_detector.py +479 -0
- crackerjack/services/quality/qa_orchestrator.py +491 -0
- crackerjack/services/{quality_baseline.py → quality/quality_baseline.py} +163 -2
- crackerjack/services/{quality_baseline_enhanced.py → quality/quality_baseline_enhanced.py} +4 -1
- crackerjack/services/{quality_intelligence.py → quality/quality_intelligence.py} +180 -16
- crackerjack/services/regex_patterns.py +58 -2987
- crackerjack/services/regex_utils.py +55 -29
- crackerjack/services/secure_status_formatter.py +42 -15
- crackerjack/services/secure_subprocess.py +35 -2
- crackerjack/services/security.py +16 -8
- crackerjack/services/server_manager.py +40 -51
- crackerjack/services/smart_scheduling.py +46 -6
- crackerjack/services/status_authentication.py +3 -3
- crackerjack/services/thread_safe_status_collector.py +1 -0
- crackerjack/services/tool_filter.py +368 -0
- crackerjack/services/tool_version_service.py +9 -5
- crackerjack/services/unified_config.py +43 -351
- crackerjack/services/vector_store.py +689 -0
- crackerjack/services/version_analyzer.py +6 -4
- crackerjack/services/version_checker.py +14 -8
- crackerjack/services/zuban_lsp_service.py +5 -4
- crackerjack/slash_commands/README.md +11 -0
- crackerjack/slash_commands/init.md +2 -12
- crackerjack/slash_commands/run.md +84 -50
- crackerjack/tools/README.md +11 -0
- crackerjack/tools/__init__.py +30 -0
- crackerjack/tools/_git_utils.py +105 -0
- crackerjack/tools/check_added_large_files.py +139 -0
- crackerjack/tools/check_ast.py +105 -0
- crackerjack/tools/check_json.py +103 -0
- crackerjack/tools/check_jsonschema.py +297 -0
- crackerjack/tools/check_toml.py +103 -0
- crackerjack/tools/check_yaml.py +110 -0
- crackerjack/tools/codespell_wrapper.py +72 -0
- crackerjack/tools/end_of_file_fixer.py +202 -0
- crackerjack/tools/format_json.py +128 -0
- crackerjack/tools/mdformat_wrapper.py +114 -0
- crackerjack/tools/trailing_whitespace.py +198 -0
- crackerjack/tools/validate_regex_patterns.py +7 -3
- crackerjack/ui/README.md +11 -0
- crackerjack/ui/dashboard_renderer.py +28 -0
- crackerjack/ui/templates/README.md +11 -0
- crackerjack/utils/console_utils.py +13 -0
- crackerjack/utils/dependency_guard.py +230 -0
- crackerjack/utils/retry_utils.py +275 -0
- crackerjack/workflows/README.md +590 -0
- crackerjack/workflows/__init__.py +46 -0
- crackerjack/workflows/actions.py +811 -0
- crackerjack/workflows/auto_fix.py +444 -0
- crackerjack/workflows/container_builder.py +499 -0
- crackerjack/workflows/definitions.py +443 -0
- crackerjack/workflows/engine.py +177 -0
- crackerjack/workflows/event_bridge.py +242 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/METADATA +678 -98
- crackerjack-0.45.2.dist-info/RECORD +478 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/WHEEL +1 -1
- crackerjack/managers/test_manager_backup.py +0 -1075
- crackerjack/mcp/tools/execution_tools_backup.py +0 -1011
- crackerjack/mixins/__init__.py +0 -3
- crackerjack/mixins/error_handling.py +0 -145
- crackerjack/services/config.py +0 -358
- crackerjack/ui/server_panels.py +0 -125
- crackerjack-0.37.9.dist-info/RECORD +0 -231
- /crackerjack/adapters/{rust_tool_adapter.py → lsp/_base.py} +0 -0
- /crackerjack/adapters/{lsp_client.py → lsp/_client.py} +0 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1107 @@
|
|
|
1
|
+
"""Event-driven workflow orchestration and logging.
|
|
2
|
+
|
|
3
|
+
Manages workflow execution via event bus and provides comprehensive logging/metrics.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import time
|
|
10
|
+
import typing as t
|
|
11
|
+
|
|
12
|
+
from acb.console import Console
|
|
13
|
+
from acb.depends import Inject, depends
|
|
14
|
+
from acb.events import Event, EventHandlerResult
|
|
15
|
+
|
|
16
|
+
from crackerjack.events import WorkflowEvent, WorkflowEventBus
|
|
17
|
+
from crackerjack.models.protocols import (
|
|
18
|
+
DebugServiceProtocol,
|
|
19
|
+
LoggerProtocol,
|
|
20
|
+
MemoryOptimizerProtocol,
|
|
21
|
+
OptionsProtocol,
|
|
22
|
+
PerformanceBenchmarkProtocol,
|
|
23
|
+
PerformanceCacheProtocol,
|
|
24
|
+
PerformanceMonitorProtocol,
|
|
25
|
+
)
|
|
26
|
+
from crackerjack.services.logging import LoggingContext
|
|
27
|
+
from crackerjack.services.memory_optimizer import memory_optimized
|
|
28
|
+
|
|
29
|
+
if t.TYPE_CHECKING:
|
|
30
|
+
from crackerjack.core.phase_coordinator import PhaseCoordinator
|
|
31
|
+
from crackerjack.core.session_coordinator import (
|
|
32
|
+
SessionController,
|
|
33
|
+
SessionCoordinator,
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class WorkflowEventOrchestrator:
|
|
38
|
+
"""Orchestrates workflow execution via event-driven architecture and logging."""
|
|
39
|
+
|
|
40
|
+
@depends.inject
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
console: Inject[Console],
|
|
44
|
+
logger: Inject[LoggerProtocol],
|
|
45
|
+
performance_monitor: Inject[PerformanceMonitorProtocol],
|
|
46
|
+
memory_optimizer: Inject[MemoryOptimizerProtocol],
|
|
47
|
+
performance_cache: Inject[PerformanceCacheProtocol],
|
|
48
|
+
debugger: Inject[DebugServiceProtocol],
|
|
49
|
+
event_bus: Inject[WorkflowEventBus] | None = None,
|
|
50
|
+
performance_benchmarks: Inject[PerformanceBenchmarkProtocol] | None = None,
|
|
51
|
+
) -> None:
|
|
52
|
+
"""Initialize event orchestrator with ACB dependency injection.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
console: Rich console for output
|
|
56
|
+
logger: Structured logging service
|
|
57
|
+
performance_monitor: Performance tracking service
|
|
58
|
+
memory_optimizer: Memory optimization service
|
|
59
|
+
performance_cache: Performance caching service
|
|
60
|
+
debugger: Debug service for workflow diagnostics
|
|
61
|
+
event_bus: Optional event bus for event-driven execution
|
|
62
|
+
performance_benchmarks: Optional performance benchmarking service
|
|
63
|
+
"""
|
|
64
|
+
self.console = console
|
|
65
|
+
self.logger = logger
|
|
66
|
+
self._performance_monitor = performance_monitor
|
|
67
|
+
self._memory_optimizer = memory_optimizer
|
|
68
|
+
self._cache = performance_cache
|
|
69
|
+
self.debugger = debugger
|
|
70
|
+
self._event_bus = event_bus
|
|
71
|
+
self._performance_benchmarks = performance_benchmarks
|
|
72
|
+
|
|
73
|
+
# References to workflow components (set by WorkflowPipeline)
|
|
74
|
+
self.session: SessionCoordinator | None = None
|
|
75
|
+
self.phases: PhaseCoordinator | None = None
|
|
76
|
+
self._session_controller: SessionController | None = None
|
|
77
|
+
self._workflow_pipeline: t.Any = None # Reference to parent pipeline
|
|
78
|
+
|
|
79
|
+
def set_workflow_components(
|
|
80
|
+
self,
|
|
81
|
+
session: SessionCoordinator,
|
|
82
|
+
phases: PhaseCoordinator,
|
|
83
|
+
session_controller: SessionController,
|
|
84
|
+
workflow_pipeline: t.Any,
|
|
85
|
+
) -> None:
|
|
86
|
+
"""Set workflow component references after initialization.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
session: Session coordinator for workflow session management
|
|
90
|
+
phases: Phase coordinator for workflow phase execution
|
|
91
|
+
session_controller: Session controller for session operations
|
|
92
|
+
workflow_pipeline: Parent workflow pipeline for calling non-extracted methods
|
|
93
|
+
"""
|
|
94
|
+
self.session = session
|
|
95
|
+
self.phases = phases
|
|
96
|
+
self._session_controller = session_controller
|
|
97
|
+
self._workflow_pipeline = workflow_pipeline
|
|
98
|
+
|
|
99
|
+
# ========================================
|
|
100
|
+
# Event-Driven Workflow Execution
|
|
101
|
+
# ========================================
|
|
102
|
+
|
|
103
|
+
def _should_debug(self) -> bool:
|
|
104
|
+
"""Check if debug mode is enabled via environment variable."""
|
|
105
|
+
import os
|
|
106
|
+
|
|
107
|
+
return os.environ.get("AI_AGENT_DEBUG", "0") == "1"
|
|
108
|
+
|
|
109
|
+
@memory_optimized
|
|
110
|
+
async def run_complete_workflow(self, options: OptionsProtocol) -> bool:
|
|
111
|
+
"""Execute complete workflow with event-driven architecture.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
options: Workflow execution options
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
True if workflow succeeded, False otherwise
|
|
118
|
+
"""
|
|
119
|
+
workflow_id = f"workflow_{int(time.time())}"
|
|
120
|
+
event_context = self._workflow_context(workflow_id, options)
|
|
121
|
+
start_time = time.time()
|
|
122
|
+
|
|
123
|
+
self._performance_monitor.start_workflow(workflow_id)
|
|
124
|
+
await self._cache.start()
|
|
125
|
+
await self._publish_event(WorkflowEvent.WORKFLOW_STARTED, event_context)
|
|
126
|
+
|
|
127
|
+
success = False
|
|
128
|
+
try:
|
|
129
|
+
with LoggingContext(
|
|
130
|
+
"workflow_execution",
|
|
131
|
+
testing=getattr(options, "test", False),
|
|
132
|
+
skip_hooks=getattr(options, "skip_hooks", False),
|
|
133
|
+
):
|
|
134
|
+
success = await self._execute_workflow(
|
|
135
|
+
options, workflow_id, event_context, start_time
|
|
136
|
+
)
|
|
137
|
+
return success
|
|
138
|
+
except KeyboardInterrupt:
|
|
139
|
+
return await self._handle_keyboard_interrupt(workflow_id, event_context)
|
|
140
|
+
except Exception as e:
|
|
141
|
+
return await self._handle_general_exception(e, workflow_id, event_context)
|
|
142
|
+
finally:
|
|
143
|
+
await self._cleanup_workflow_resources()
|
|
144
|
+
|
|
145
|
+
async def _execute_workflow(
|
|
146
|
+
self,
|
|
147
|
+
options: OptionsProtocol,
|
|
148
|
+
workflow_id: str,
|
|
149
|
+
event_context: dict[str, t.Any],
|
|
150
|
+
start_time: float,
|
|
151
|
+
) -> bool:
|
|
152
|
+
"""Execute the workflow either event-driven or sequentially.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
options: Workflow execution options
|
|
156
|
+
workflow_id: Unique workflow identifier
|
|
157
|
+
event_context: Event context data
|
|
158
|
+
start_time: Workflow start timestamp
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
True if workflow succeeded, False otherwise
|
|
162
|
+
"""
|
|
163
|
+
if self._event_bus:
|
|
164
|
+
return await self._run_event_driven_workflow(
|
|
165
|
+
options, workflow_id, event_context, start_time
|
|
166
|
+
)
|
|
167
|
+
return await self._run_sequential_workflow(
|
|
168
|
+
options, workflow_id, event_context, start_time
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
async def _run_sequential_workflow(
|
|
172
|
+
self,
|
|
173
|
+
options: OptionsProtocol,
|
|
174
|
+
workflow_id: str,
|
|
175
|
+
event_context: dict[str, t.Any],
|
|
176
|
+
start_time: float,
|
|
177
|
+
) -> bool:
|
|
178
|
+
"""Execute the workflow sequentially without event bus.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
options: Workflow execution options
|
|
182
|
+
workflow_id: Unique workflow identifier
|
|
183
|
+
event_context: Event context data
|
|
184
|
+
start_time: Workflow start timestamp
|
|
185
|
+
|
|
186
|
+
Returns:
|
|
187
|
+
True if workflow succeeded, False otherwise
|
|
188
|
+
"""
|
|
189
|
+
await self._publish_event(
|
|
190
|
+
WorkflowEvent.WORKFLOW_SESSION_INITIALIZING,
|
|
191
|
+
event_context,
|
|
192
|
+
)
|
|
193
|
+
if self._session_controller:
|
|
194
|
+
self._session_controller.initialize(options)
|
|
195
|
+
await self._publish_event(
|
|
196
|
+
WorkflowEvent.WORKFLOW_SESSION_READY,
|
|
197
|
+
event_context,
|
|
198
|
+
)
|
|
199
|
+
success = await self._execute_workflow_with_timing(
|
|
200
|
+
options, start_time, workflow_id
|
|
201
|
+
)
|
|
202
|
+
final_event = (
|
|
203
|
+
WorkflowEvent.WORKFLOW_COMPLETED
|
|
204
|
+
if success
|
|
205
|
+
else WorkflowEvent.WORKFLOW_FAILED
|
|
206
|
+
)
|
|
207
|
+
await self._publish_event(
|
|
208
|
+
final_event,
|
|
209
|
+
event_context | {"success": success},
|
|
210
|
+
)
|
|
211
|
+
self._performance_monitor.end_workflow(workflow_id, success)
|
|
212
|
+
return success
|
|
213
|
+
|
|
214
|
+
async def _handle_keyboard_interrupt(
|
|
215
|
+
self, workflow_id: str, event_context: dict[str, t.Any]
|
|
216
|
+
) -> bool:
|
|
217
|
+
"""Handle keyboard interrupt during workflow execution.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
workflow_id: Unique workflow identifier
|
|
221
|
+
event_context: Event context data
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
False (workflow failed due to interruption)
|
|
225
|
+
"""
|
|
226
|
+
self._performance_monitor.end_workflow(workflow_id, False)
|
|
227
|
+
await self._publish_event(
|
|
228
|
+
WorkflowEvent.WORKFLOW_INTERRUPTED,
|
|
229
|
+
event_context,
|
|
230
|
+
)
|
|
231
|
+
return self._handle_user_interruption()
|
|
232
|
+
|
|
233
|
+
async def _handle_general_exception(
|
|
234
|
+
self, e: Exception, workflow_id: str, event_context: dict[str, t.Any]
|
|
235
|
+
) -> bool:
|
|
236
|
+
"""Handle general exceptions during workflow execution.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
e: Exception that occurred
|
|
240
|
+
workflow_id: Unique workflow identifier
|
|
241
|
+
event_context: Event context data
|
|
242
|
+
|
|
243
|
+
Returns:
|
|
244
|
+
False (workflow failed due to exception)
|
|
245
|
+
"""
|
|
246
|
+
self._performance_monitor.end_workflow(workflow_id, False)
|
|
247
|
+
await self._publish_event(
|
|
248
|
+
WorkflowEvent.WORKFLOW_FAILED,
|
|
249
|
+
event_context
|
|
250
|
+
| {
|
|
251
|
+
"error": str(e),
|
|
252
|
+
"error_type": type(e).__name__,
|
|
253
|
+
},
|
|
254
|
+
)
|
|
255
|
+
return self._handle_workflow_exception(e)
|
|
256
|
+
|
|
257
|
+
async def _cleanup_workflow_resources(self) -> None:
|
|
258
|
+
"""Clean up workflow resources in the finally block."""
|
|
259
|
+
if self.session:
|
|
260
|
+
self.session.cleanup_resources()
|
|
261
|
+
self._memory_optimizer.optimize_memory()
|
|
262
|
+
await self._cache.stop()
|
|
263
|
+
|
|
264
|
+
def _unsubscribe_all_subscriptions(self, subscriptions: list[str]) -> None:
|
|
265
|
+
"""Unsubscribe from all event subscriptions.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
subscriptions: List of subscription IDs to unsubscribe
|
|
269
|
+
"""
|
|
270
|
+
for subscription_id in subscriptions.copy():
|
|
271
|
+
if self._event_bus:
|
|
272
|
+
self._event_bus.unsubscribe(subscription_id)
|
|
273
|
+
subscriptions.remove(subscription_id)
|
|
274
|
+
|
|
275
|
+
async def _finalize_workflow(
|
|
276
|
+
self,
|
|
277
|
+
start_time: float,
|
|
278
|
+
workflow_id: str,
|
|
279
|
+
success: bool,
|
|
280
|
+
completion_future: asyncio.Future[bool],
|
|
281
|
+
subscriptions: list[str],
|
|
282
|
+
payload: dict[str, t.Any] | None = None,
|
|
283
|
+
) -> EventHandlerResult:
|
|
284
|
+
"""Finalize the workflow execution.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
start_time: Workflow start timestamp
|
|
288
|
+
workflow_id: Unique workflow identifier
|
|
289
|
+
success: Whether workflow succeeded
|
|
290
|
+
completion_future: Future to set with result
|
|
291
|
+
subscriptions: List of event subscriptions to clean up
|
|
292
|
+
payload: Optional event payload data
|
|
293
|
+
|
|
294
|
+
Returns:
|
|
295
|
+
EventHandlerResult with success status
|
|
296
|
+
"""
|
|
297
|
+
if completion_future.done():
|
|
298
|
+
return EventHandlerResult(success=success)
|
|
299
|
+
|
|
300
|
+
if self.session:
|
|
301
|
+
self.session.finalize_session(start_time, success)
|
|
302
|
+
duration = time.time() - start_time
|
|
303
|
+
self._log_workflow_completion(success, duration)
|
|
304
|
+
self._log_workflow_completion_debug(success, duration)
|
|
305
|
+
|
|
306
|
+
workflow_perf = self._performance_monitor.end_workflow(workflow_id, success)
|
|
307
|
+
self.logger.info(
|
|
308
|
+
f"Workflow performance: {workflow_perf.performance_score: .1f} score, "
|
|
309
|
+
f"{workflow_perf.total_duration_seconds: .2f}s duration"
|
|
310
|
+
)
|
|
311
|
+
|
|
312
|
+
await self._generate_performance_benchmark_report(
|
|
313
|
+
workflow_id, duration, success
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
self._unsubscribe_all_subscriptions(subscriptions)
|
|
317
|
+
completion_future.set_result(success)
|
|
318
|
+
|
|
319
|
+
return EventHandlerResult(success=success)
|
|
320
|
+
|
|
321
|
+
async def _publish_workflow_failure(
|
|
322
|
+
self,
|
|
323
|
+
event_context: dict[str, t.Any],
|
|
324
|
+
stage: str,
|
|
325
|
+
error: Exception | None = None,
|
|
326
|
+
) -> None:
|
|
327
|
+
"""Publish workflow failure event.
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
event_context: Event context data
|
|
331
|
+
stage: Workflow stage where failure occurred
|
|
332
|
+
error: Optional exception that caused failure
|
|
333
|
+
"""
|
|
334
|
+
payload: dict[str, t.Any] = event_context | {"stage": stage}
|
|
335
|
+
if error is not None:
|
|
336
|
+
payload["error"] = str(error)
|
|
337
|
+
payload["error_type"] = type(error).__name__
|
|
338
|
+
|
|
339
|
+
await self._publish_event(WorkflowEvent.WORKFLOW_FAILED, payload)
|
|
340
|
+
|
|
341
|
+
async def _handle_session_ready(
|
|
342
|
+
self,
|
|
343
|
+
event: Event,
|
|
344
|
+
state_flags: dict[str, bool],
|
|
345
|
+
workflow_id: str,
|
|
346
|
+
options: OptionsProtocol,
|
|
347
|
+
) -> EventHandlerResult:
|
|
348
|
+
"""Handle session ready event.
|
|
349
|
+
|
|
350
|
+
Args:
|
|
351
|
+
event: Session ready event
|
|
352
|
+
state_flags: Workflow state tracking flags
|
|
353
|
+
workflow_id: Unique workflow identifier
|
|
354
|
+
options: Workflow execution options
|
|
355
|
+
|
|
356
|
+
Returns:
|
|
357
|
+
EventHandlerResult with configuration phase status
|
|
358
|
+
"""
|
|
359
|
+
if state_flags["configuration"]:
|
|
360
|
+
return EventHandlerResult(success=True)
|
|
361
|
+
state_flags["configuration"] = True
|
|
362
|
+
|
|
363
|
+
try:
|
|
364
|
+
await self._publish_event(
|
|
365
|
+
WorkflowEvent.CONFIG_PHASE_STARTED,
|
|
366
|
+
{"workflow_id": workflow_id},
|
|
367
|
+
)
|
|
368
|
+
config_success = await asyncio.to_thread(
|
|
369
|
+
self.phases.run_configuration_phase, # type: ignore[union-attr]
|
|
370
|
+
options,
|
|
371
|
+
)
|
|
372
|
+
await self._publish_event(
|
|
373
|
+
WorkflowEvent.CONFIG_PHASE_COMPLETED,
|
|
374
|
+
{
|
|
375
|
+
"workflow_id": workflow_id,
|
|
376
|
+
"success": config_success,
|
|
377
|
+
},
|
|
378
|
+
)
|
|
379
|
+
if not config_success:
|
|
380
|
+
await self._publish_workflow_failure(
|
|
381
|
+
{"workflow_id": workflow_id}, "configuration"
|
|
382
|
+
)
|
|
383
|
+
return EventHandlerResult(success=config_success)
|
|
384
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
385
|
+
await self._publish_workflow_failure(
|
|
386
|
+
{"workflow_id": workflow_id}, "configuration", exc
|
|
387
|
+
)
|
|
388
|
+
return EventHandlerResult(success=False, error_message=str(exc))
|
|
389
|
+
|
|
390
|
+
async def _handle_config_completed(
|
|
391
|
+
self,
|
|
392
|
+
event: Event,
|
|
393
|
+
state_flags: dict[str, bool],
|
|
394
|
+
workflow_id: str,
|
|
395
|
+
options: OptionsProtocol,
|
|
396
|
+
) -> EventHandlerResult:
|
|
397
|
+
"""Handle configuration completed event.
|
|
398
|
+
|
|
399
|
+
Args:
|
|
400
|
+
event: Configuration completed event
|
|
401
|
+
state_flags: Workflow state tracking flags
|
|
402
|
+
workflow_id: Unique workflow identifier
|
|
403
|
+
options: Workflow execution options
|
|
404
|
+
|
|
405
|
+
Returns:
|
|
406
|
+
EventHandlerResult with quality phase status
|
|
407
|
+
"""
|
|
408
|
+
if not event.payload.get("success", False):
|
|
409
|
+
return EventHandlerResult(success=False)
|
|
410
|
+
if state_flags["quality"]:
|
|
411
|
+
return EventHandlerResult(success=True)
|
|
412
|
+
state_flags["quality"] = True
|
|
413
|
+
|
|
414
|
+
try:
|
|
415
|
+
await self._publish_event(
|
|
416
|
+
WorkflowEvent.QUALITY_PHASE_STARTED,
|
|
417
|
+
{"workflow_id": workflow_id},
|
|
418
|
+
)
|
|
419
|
+
quality_success = await self._execute_quality_phase(options, workflow_id)
|
|
420
|
+
await self._publish_event(
|
|
421
|
+
WorkflowEvent.QUALITY_PHASE_COMPLETED,
|
|
422
|
+
{
|
|
423
|
+
"workflow_id": workflow_id,
|
|
424
|
+
"success": quality_success,
|
|
425
|
+
},
|
|
426
|
+
)
|
|
427
|
+
if not quality_success:
|
|
428
|
+
await self._publish_workflow_failure(
|
|
429
|
+
{"workflow_id": workflow_id}, "quality"
|
|
430
|
+
)
|
|
431
|
+
return EventHandlerResult(success=quality_success)
|
|
432
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
433
|
+
await self._publish_workflow_failure(
|
|
434
|
+
{"workflow_id": workflow_id}, "quality", exc
|
|
435
|
+
)
|
|
436
|
+
return EventHandlerResult(success=False, error_message=str(exc))
|
|
437
|
+
|
|
438
|
+
async def _handle_quality_completed(
|
|
439
|
+
self,
|
|
440
|
+
event: Event,
|
|
441
|
+
state_flags: dict[str, bool],
|
|
442
|
+
workflow_id: str,
|
|
443
|
+
options: OptionsProtocol,
|
|
444
|
+
publish_requested: bool,
|
|
445
|
+
) -> EventHandlerResult:
|
|
446
|
+
"""Handle quality phase completed event.
|
|
447
|
+
|
|
448
|
+
Args:
|
|
449
|
+
event: Quality completed event
|
|
450
|
+
state_flags: Workflow state tracking flags
|
|
451
|
+
workflow_id: Unique workflow identifier
|
|
452
|
+
options: Workflow execution options
|
|
453
|
+
publish_requested: Whether publishing was requested
|
|
454
|
+
|
|
455
|
+
Returns:
|
|
456
|
+
EventHandlerResult with publishing phase status
|
|
457
|
+
"""
|
|
458
|
+
if not event.payload.get("success", False):
|
|
459
|
+
return EventHandlerResult(success=False)
|
|
460
|
+
if state_flags["publishing"]:
|
|
461
|
+
return EventHandlerResult(success=True)
|
|
462
|
+
state_flags["publishing"] = True
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
if publish_requested:
|
|
466
|
+
await self._publish_event(
|
|
467
|
+
WorkflowEvent.PUBLISH_PHASE_STARTED,
|
|
468
|
+
{"workflow_id": workflow_id},
|
|
469
|
+
)
|
|
470
|
+
publishing_success = await self._execute_publishing_workflow(
|
|
471
|
+
options, workflow_id
|
|
472
|
+
)
|
|
473
|
+
await self._publish_event(
|
|
474
|
+
WorkflowEvent.PUBLISH_PHASE_COMPLETED,
|
|
475
|
+
{
|
|
476
|
+
"workflow_id": workflow_id,
|
|
477
|
+
"success": publishing_success,
|
|
478
|
+
},
|
|
479
|
+
)
|
|
480
|
+
if not publishing_success:
|
|
481
|
+
await self._publish_workflow_failure(
|
|
482
|
+
{"workflow_id": workflow_id}, "publishing"
|
|
483
|
+
)
|
|
484
|
+
return EventHandlerResult(success=False)
|
|
485
|
+
else:
|
|
486
|
+
await self._publish_event(
|
|
487
|
+
WorkflowEvent.PUBLISH_PHASE_COMPLETED,
|
|
488
|
+
{
|
|
489
|
+
"workflow_id": workflow_id,
|
|
490
|
+
"success": True,
|
|
491
|
+
"skipped": True,
|
|
492
|
+
},
|
|
493
|
+
)
|
|
494
|
+
return EventHandlerResult(success=True)
|
|
495
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
496
|
+
await self._publish_workflow_failure(
|
|
497
|
+
{"workflow_id": workflow_id}, "publishing", exc
|
|
498
|
+
)
|
|
499
|
+
return EventHandlerResult(success=False, error_message=str(exc))
|
|
500
|
+
|
|
501
|
+
async def _handle_publish_completed(
|
|
502
|
+
self,
|
|
503
|
+
event: Event,
|
|
504
|
+
state_flags: dict[str, bool],
|
|
505
|
+
workflow_id: str,
|
|
506
|
+
options: OptionsProtocol,
|
|
507
|
+
commit_requested: bool,
|
|
508
|
+
publish_requested: bool,
|
|
509
|
+
event_context: dict[str, t.Any],
|
|
510
|
+
) -> EventHandlerResult:
|
|
511
|
+
"""Handle publishing completed event.
|
|
512
|
+
|
|
513
|
+
Args:
|
|
514
|
+
event: Publishing completed event
|
|
515
|
+
state_flags: Workflow state tracking flags
|
|
516
|
+
workflow_id: Unique workflow identifier
|
|
517
|
+
options: Workflow execution options
|
|
518
|
+
commit_requested: Whether commit was requested
|
|
519
|
+
publish_requested: Whether publishing was requested
|
|
520
|
+
event_context: Event context data
|
|
521
|
+
|
|
522
|
+
Returns:
|
|
523
|
+
EventHandlerResult with commit phase status
|
|
524
|
+
"""
|
|
525
|
+
if publish_requested and not event.payload.get("success", False):
|
|
526
|
+
return EventHandlerResult(success=False)
|
|
527
|
+
if state_flags["commit"]:
|
|
528
|
+
return EventHandlerResult(success=True)
|
|
529
|
+
state_flags["commit"] = True
|
|
530
|
+
|
|
531
|
+
try:
|
|
532
|
+
if commit_requested:
|
|
533
|
+
await self._publish_event(
|
|
534
|
+
WorkflowEvent.COMMIT_PHASE_STARTED,
|
|
535
|
+
{"workflow_id": workflow_id},
|
|
536
|
+
)
|
|
537
|
+
commit_success = await self._execute_commit_workflow(
|
|
538
|
+
options, workflow_id
|
|
539
|
+
)
|
|
540
|
+
await self._publish_event(
|
|
541
|
+
WorkflowEvent.COMMIT_PHASE_COMPLETED,
|
|
542
|
+
{
|
|
543
|
+
"workflow_id": workflow_id,
|
|
544
|
+
"success": commit_success,
|
|
545
|
+
},
|
|
546
|
+
)
|
|
547
|
+
if not commit_success:
|
|
548
|
+
await self._publish_workflow_failure(
|
|
549
|
+
{"workflow_id": workflow_id}, "commit"
|
|
550
|
+
)
|
|
551
|
+
return EventHandlerResult(success=False)
|
|
552
|
+
else:
|
|
553
|
+
await self._publish_event(
|
|
554
|
+
WorkflowEvent.COMMIT_PHASE_COMPLETED,
|
|
555
|
+
{
|
|
556
|
+
"workflow_id": workflow_id,
|
|
557
|
+
"success": True,
|
|
558
|
+
"skipped": True,
|
|
559
|
+
},
|
|
560
|
+
)
|
|
561
|
+
|
|
562
|
+
await self._publish_event(
|
|
563
|
+
WorkflowEvent.WORKFLOW_COMPLETED,
|
|
564
|
+
event_context | {"success": True},
|
|
565
|
+
)
|
|
566
|
+
return EventHandlerResult(success=True)
|
|
567
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
568
|
+
await self._publish_workflow_failure(
|
|
569
|
+
{"workflow_id": workflow_id}, "commit", exc
|
|
570
|
+
)
|
|
571
|
+
return EventHandlerResult(success=False, error_message=str(exc))
|
|
572
|
+
|
|
573
|
+
async def _handle_workflow_completed(
|
|
574
|
+
self,
|
|
575
|
+
event: Event,
|
|
576
|
+
start_time: float,
|
|
577
|
+
workflow_id: str,
|
|
578
|
+
completion_future: asyncio.Future[bool],
|
|
579
|
+
subscriptions: list[str],
|
|
580
|
+
) -> EventHandlerResult:
|
|
581
|
+
"""Handle workflow completed event.
|
|
582
|
+
|
|
583
|
+
Args:
|
|
584
|
+
event: Workflow completed event
|
|
585
|
+
start_time: Workflow start timestamp
|
|
586
|
+
workflow_id: Unique workflow identifier
|
|
587
|
+
completion_future: Future to set with result
|
|
588
|
+
subscriptions: List of event subscriptions to clean up
|
|
589
|
+
|
|
590
|
+
Returns:
|
|
591
|
+
EventHandlerResult with finalization status
|
|
592
|
+
"""
|
|
593
|
+
return await self._finalize_workflow(
|
|
594
|
+
start_time,
|
|
595
|
+
workflow_id,
|
|
596
|
+
True,
|
|
597
|
+
completion_future,
|
|
598
|
+
subscriptions,
|
|
599
|
+
event.payload,
|
|
600
|
+
)
|
|
601
|
+
|
|
602
|
+
async def _handle_workflow_failed(
|
|
603
|
+
self,
|
|
604
|
+
event: Event,
|
|
605
|
+
start_time: float,
|
|
606
|
+
workflow_id: str,
|
|
607
|
+
completion_future: asyncio.Future[bool],
|
|
608
|
+
subscriptions: list[str],
|
|
609
|
+
) -> EventHandlerResult:
|
|
610
|
+
"""Handle workflow failed event.
|
|
611
|
+
|
|
612
|
+
Args:
|
|
613
|
+
event: Workflow failed event
|
|
614
|
+
start_time: Workflow start timestamp
|
|
615
|
+
workflow_id: Unique workflow identifier
|
|
616
|
+
completion_future: Future to set with result
|
|
617
|
+
subscriptions: List of event subscriptions to clean up
|
|
618
|
+
|
|
619
|
+
Returns:
|
|
620
|
+
EventHandlerResult with finalization status
|
|
621
|
+
"""
|
|
622
|
+
return await self._finalize_workflow(
|
|
623
|
+
start_time,
|
|
624
|
+
workflow_id,
|
|
625
|
+
False,
|
|
626
|
+
completion_future,
|
|
627
|
+
subscriptions,
|
|
628
|
+
event.payload,
|
|
629
|
+
)
|
|
630
|
+
|
|
631
|
+
async def _run_event_driven_workflow(
|
|
632
|
+
self,
|
|
633
|
+
options: OptionsProtocol,
|
|
634
|
+
workflow_id: str,
|
|
635
|
+
event_context: dict[str, t.Any],
|
|
636
|
+
start_time: float,
|
|
637
|
+
) -> bool:
|
|
638
|
+
"""Execute workflow using event-driven architecture.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
options: Workflow execution options
|
|
642
|
+
workflow_id: Unique workflow identifier
|
|
643
|
+
event_context: Event context data
|
|
644
|
+
start_time: Workflow start timestamp
|
|
645
|
+
|
|
646
|
+
Returns:
|
|
647
|
+
True if workflow succeeded, False otherwise
|
|
648
|
+
|
|
649
|
+
Raises:
|
|
650
|
+
RuntimeError: If event bus is not configured
|
|
651
|
+
"""
|
|
652
|
+
if not self._event_bus:
|
|
653
|
+
raise RuntimeError("Workflow event bus is not configured.")
|
|
654
|
+
|
|
655
|
+
loop = asyncio.get_running_loop()
|
|
656
|
+
completion_future: asyncio.Future[bool] = loop.create_future()
|
|
657
|
+
subscriptions: list[str] = []
|
|
658
|
+
|
|
659
|
+
publish_requested = bool(
|
|
660
|
+
getattr(options, "publish", False) or getattr(options, "all", False)
|
|
661
|
+
)
|
|
662
|
+
commit_requested = bool(getattr(options, "commit", False))
|
|
663
|
+
|
|
664
|
+
state_flags = {
|
|
665
|
+
"configuration": False,
|
|
666
|
+
"quality": False,
|
|
667
|
+
"publishing": False,
|
|
668
|
+
"commit": False,
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
# Subscribe to events
|
|
672
|
+
async def on_session_ready(event: Event) -> EventHandlerResult:
|
|
673
|
+
return await self._handle_session_ready(
|
|
674
|
+
event, state_flags, workflow_id, options
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
async def on_config_completed(event: Event) -> EventHandlerResult:
|
|
678
|
+
return await self._handle_config_completed(
|
|
679
|
+
event, state_flags, workflow_id, options
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
async def on_quality_completed(event: Event) -> EventHandlerResult:
|
|
683
|
+
return await self._handle_quality_completed(
|
|
684
|
+
event, state_flags, workflow_id, options, publish_requested
|
|
685
|
+
)
|
|
686
|
+
|
|
687
|
+
async def on_publish_completed(event: Event) -> EventHandlerResult:
|
|
688
|
+
return await self._handle_publish_completed(
|
|
689
|
+
event,
|
|
690
|
+
state_flags,
|
|
691
|
+
workflow_id,
|
|
692
|
+
options,
|
|
693
|
+
commit_requested,
|
|
694
|
+
publish_requested,
|
|
695
|
+
event_context,
|
|
696
|
+
)
|
|
697
|
+
|
|
698
|
+
async def on_workflow_completed(event: Event) -> EventHandlerResult:
|
|
699
|
+
return await self._handle_workflow_completed(
|
|
700
|
+
event, start_time, workflow_id, completion_future, subscriptions
|
|
701
|
+
)
|
|
702
|
+
|
|
703
|
+
async def on_workflow_failed(event: Event) -> EventHandlerResult:
|
|
704
|
+
return await self._handle_workflow_failed(
|
|
705
|
+
event, start_time, workflow_id, completion_future, subscriptions
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
subscriptions.extend(
|
|
709
|
+
(
|
|
710
|
+
self._event_bus.subscribe(
|
|
711
|
+
WorkflowEvent.WORKFLOW_SESSION_READY,
|
|
712
|
+
on_session_ready,
|
|
713
|
+
),
|
|
714
|
+
self._event_bus.subscribe(
|
|
715
|
+
WorkflowEvent.CONFIG_PHASE_COMPLETED,
|
|
716
|
+
on_config_completed,
|
|
717
|
+
),
|
|
718
|
+
self._event_bus.subscribe(
|
|
719
|
+
WorkflowEvent.QUALITY_PHASE_COMPLETED,
|
|
720
|
+
on_quality_completed,
|
|
721
|
+
),
|
|
722
|
+
self._event_bus.subscribe(
|
|
723
|
+
WorkflowEvent.PUBLISH_PHASE_COMPLETED,
|
|
724
|
+
on_publish_completed,
|
|
725
|
+
),
|
|
726
|
+
self._event_bus.subscribe(
|
|
727
|
+
WorkflowEvent.WORKFLOW_COMPLETED,
|
|
728
|
+
on_workflow_completed,
|
|
729
|
+
),
|
|
730
|
+
self._event_bus.subscribe(
|
|
731
|
+
WorkflowEvent.WORKFLOW_FAILED,
|
|
732
|
+
on_workflow_failed,
|
|
733
|
+
),
|
|
734
|
+
)
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
try:
|
|
738
|
+
await self._publish_event(
|
|
739
|
+
WorkflowEvent.WORKFLOW_SESSION_INITIALIZING,
|
|
740
|
+
event_context,
|
|
741
|
+
)
|
|
742
|
+
if self._session_controller:
|
|
743
|
+
self._session_controller.initialize(options)
|
|
744
|
+
await self._publish_event(
|
|
745
|
+
WorkflowEvent.WORKFLOW_SESSION_READY,
|
|
746
|
+
event_context,
|
|
747
|
+
)
|
|
748
|
+
except Exception as exc: # pragma: no cover - defensive
|
|
749
|
+
await self._publish_workflow_failure(
|
|
750
|
+
event_context, "session_initialization", exc
|
|
751
|
+
)
|
|
752
|
+
await self._finalize_workflow(
|
|
753
|
+
start_time, workflow_id, False, completion_future, subscriptions
|
|
754
|
+
)
|
|
755
|
+
return False
|
|
756
|
+
|
|
757
|
+
return await completion_future
|
|
758
|
+
|
|
759
|
+
# ========================================
|
|
760
|
+
# Logging and Performance Methods
|
|
761
|
+
# ========================================
|
|
762
|
+
|
|
763
|
+
def _log_workflow_startup_debug(self, options: OptionsProtocol) -> None:
|
|
764
|
+
"""Log workflow startup details if debug mode is enabled.
|
|
765
|
+
|
|
766
|
+
Args:
|
|
767
|
+
options: Workflow execution options
|
|
768
|
+
"""
|
|
769
|
+
if not self._should_debug():
|
|
770
|
+
return
|
|
771
|
+
|
|
772
|
+
self.debugger.log_workflow_phase(
|
|
773
|
+
"workflow_execution",
|
|
774
|
+
"started",
|
|
775
|
+
details={
|
|
776
|
+
"testing": getattr(options, "test", False),
|
|
777
|
+
"skip_hooks": getattr(options, "skip_hooks", False),
|
|
778
|
+
"ai_agent": getattr(options, "ai_agent", False),
|
|
779
|
+
},
|
|
780
|
+
)
|
|
781
|
+
|
|
782
|
+
def _log_zuban_lsp_status(self) -> None:
|
|
783
|
+
"""Display current Zuban LSP server status during workflow startup."""
|
|
784
|
+
from crackerjack.services.server_manager import find_zuban_lsp_processes
|
|
785
|
+
|
|
786
|
+
try:
|
|
787
|
+
lsp_processes = find_zuban_lsp_processes()
|
|
788
|
+
|
|
789
|
+
if lsp_processes:
|
|
790
|
+
proc = lsp_processes[0] # Show first running process
|
|
791
|
+
self.logger.info(
|
|
792
|
+
f"🔍 Zuban LSP server running (PID: {proc['pid']}, "
|
|
793
|
+
f"CPU: {proc['cpu']}%, Memory: {proc['mem']}%)"
|
|
794
|
+
)
|
|
795
|
+
else:
|
|
796
|
+
self.logger.info("🔍 Zuban LSP server not running")
|
|
797
|
+
|
|
798
|
+
except Exception as e:
|
|
799
|
+
self.logger.debug(f"Failed to check Zuban LSP status: {e}")
|
|
800
|
+
|
|
801
|
+
async def _execute_workflow_with_timing(
|
|
802
|
+
self, options: OptionsProtocol, start_time: float, workflow_id: str
|
|
803
|
+
) -> bool:
|
|
804
|
+
"""Execute workflow phases with performance timing.
|
|
805
|
+
|
|
806
|
+
Args:
|
|
807
|
+
options: Workflow execution options
|
|
808
|
+
start_time: Workflow start timestamp
|
|
809
|
+
workflow_id: Unique workflow identifier
|
|
810
|
+
|
|
811
|
+
Returns:
|
|
812
|
+
True if workflow succeeded, False otherwise
|
|
813
|
+
"""
|
|
814
|
+
# Delegate to parent pipeline for phase execution
|
|
815
|
+
success = await self._workflow_pipeline._execute_workflow_phases(
|
|
816
|
+
options, workflow_id
|
|
817
|
+
)
|
|
818
|
+
if self.session:
|
|
819
|
+
self.session.finalize_session(start_time, success)
|
|
820
|
+
|
|
821
|
+
duration = time.time() - start_time
|
|
822
|
+
self._log_workflow_completion(success, duration)
|
|
823
|
+
self._log_workflow_completion_debug(success, duration)
|
|
824
|
+
await self._generate_performance_benchmark_report(
|
|
825
|
+
workflow_id, duration, success
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
return success
|
|
829
|
+
|
|
830
|
+
def _log_workflow_completion(self, success: bool, duration: float) -> None:
|
|
831
|
+
"""Log workflow completion status.
|
|
832
|
+
|
|
833
|
+
Args:
|
|
834
|
+
success: Whether workflow succeeded
|
|
835
|
+
duration: Workflow execution duration in seconds
|
|
836
|
+
"""
|
|
837
|
+
self.logger.info(
|
|
838
|
+
"Workflow execution completed",
|
|
839
|
+
success=success,
|
|
840
|
+
duration_seconds=round(duration, 2),
|
|
841
|
+
)
|
|
842
|
+
|
|
843
|
+
def _log_workflow_completion_debug(self, success: bool, duration: float) -> None:
|
|
844
|
+
"""Log workflow completion debug information.
|
|
845
|
+
|
|
846
|
+
Args:
|
|
847
|
+
success: Whether workflow succeeded
|
|
848
|
+
duration: Workflow execution duration in seconds
|
|
849
|
+
"""
|
|
850
|
+
if not self._should_debug():
|
|
851
|
+
return
|
|
852
|
+
|
|
853
|
+
self.debugger.set_workflow_success(success)
|
|
854
|
+
self.debugger.log_workflow_phase(
|
|
855
|
+
"workflow_execution",
|
|
856
|
+
"completed" if success else "failed",
|
|
857
|
+
duration=duration,
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
async def _generate_performance_benchmark_report(
|
|
861
|
+
self, workflow_id: str, duration: float, success: bool
|
|
862
|
+
) -> None:
|
|
863
|
+
"""Generate and display performance benchmark report for workflow execution.
|
|
864
|
+
|
|
865
|
+
Args:
|
|
866
|
+
workflow_id: Unique workflow identifier
|
|
867
|
+
duration: Workflow execution duration in seconds
|
|
868
|
+
success: Whether workflow succeeded
|
|
869
|
+
"""
|
|
870
|
+
if not self._performance_benchmarks:
|
|
871
|
+
return
|
|
872
|
+
|
|
873
|
+
try:
|
|
874
|
+
self._gather_performance_metrics(workflow_id, duration, success)
|
|
875
|
+
benchmark_results = await self._performance_benchmarks.run_benchmark_suite()
|
|
876
|
+
self._display_benchmark_results(benchmark_results, duration)
|
|
877
|
+
|
|
878
|
+
except Exception as e:
|
|
879
|
+
self.console.print(
|
|
880
|
+
f"[dim]⚠️ Performance benchmark failed: {str(e)[:50]}...[/dim]"
|
|
881
|
+
)
|
|
882
|
+
|
|
883
|
+
if self.debugger.enabled:
|
|
884
|
+
self.debugger.print_debug_summary()
|
|
885
|
+
|
|
886
|
+
def _gather_performance_metrics(
|
|
887
|
+
self, workflow_id: str, duration: float, success: bool
|
|
888
|
+
) -> dict[str, t.Any]:
|
|
889
|
+
"""Gather performance metrics from workflow execution.
|
|
890
|
+
|
|
891
|
+
Args:
|
|
892
|
+
workflow_id: Unique workflow identifier
|
|
893
|
+
duration: Workflow execution duration in seconds
|
|
894
|
+
success: Whether workflow succeeded
|
|
895
|
+
|
|
896
|
+
Returns:
|
|
897
|
+
Dictionary of performance metrics
|
|
898
|
+
"""
|
|
899
|
+
return {
|
|
900
|
+
"workflow_id": workflow_id,
|
|
901
|
+
"total_duration": duration,
|
|
902
|
+
"success": success,
|
|
903
|
+
"cache_metrics": self._cache.get_stats() if self._cache else {},
|
|
904
|
+
"memory_metrics": self._memory_optimizer.get_stats()
|
|
905
|
+
if hasattr(self._memory_optimizer, "get_stats")
|
|
906
|
+
else {},
|
|
907
|
+
}
|
|
908
|
+
|
|
909
|
+
def _display_benchmark_results(
|
|
910
|
+
self, benchmark_results: t.Any, duration: float
|
|
911
|
+
) -> None:
|
|
912
|
+
"""Display compact performance summary.
|
|
913
|
+
|
|
914
|
+
Args:
|
|
915
|
+
benchmark_results: Performance benchmark results
|
|
916
|
+
duration: Workflow execution duration in seconds
|
|
917
|
+
"""
|
|
918
|
+
if not benchmark_results:
|
|
919
|
+
return
|
|
920
|
+
|
|
921
|
+
self.console.print("\n[cyan]📊 Performance Benchmark Summary[/cyan]")
|
|
922
|
+
self.console.print(f"Workflow Duration: [bold]{duration:.2f}s[/bold]")
|
|
923
|
+
|
|
924
|
+
self._show_performance_improvements(benchmark_results)
|
|
925
|
+
|
|
926
|
+
def _show_performance_improvements(self, benchmark_results: t.Any) -> None:
|
|
927
|
+
"""Show key performance improvements from benchmark results.
|
|
928
|
+
|
|
929
|
+
Args:
|
|
930
|
+
benchmark_results: Performance benchmark results
|
|
931
|
+
"""
|
|
932
|
+
for result in benchmark_results.results[:3]: # Top 3 results
|
|
933
|
+
self._display_time_improvement(result)
|
|
934
|
+
self._display_cache_efficiency(result)
|
|
935
|
+
|
|
936
|
+
def _display_time_improvement(self, result: t.Any) -> None:
|
|
937
|
+
"""Display time improvement percentage if available.
|
|
938
|
+
|
|
939
|
+
Args:
|
|
940
|
+
result: Individual benchmark result
|
|
941
|
+
"""
|
|
942
|
+
if result.time_improvement_percentage > 0:
|
|
943
|
+
self.console.print(
|
|
944
|
+
f"[green]⚡[/green] {result.test_name}:"
|
|
945
|
+
f" {result.time_improvement_percentage:.1f}% faster"
|
|
946
|
+
)
|
|
947
|
+
|
|
948
|
+
def _display_cache_efficiency(self, result: t.Any) -> None:
|
|
949
|
+
"""Display cache hit ratio if available.
|
|
950
|
+
|
|
951
|
+
Args:
|
|
952
|
+
result: Individual benchmark result
|
|
953
|
+
"""
|
|
954
|
+
if result.cache_hit_ratio > 0:
|
|
955
|
+
self.console.print(
|
|
956
|
+
f"[blue]🎯[/blue] Cache efficiency: {result.cache_hit_ratio:.0%}"
|
|
957
|
+
)
|
|
958
|
+
|
|
959
|
+
def _handle_user_interruption(self) -> bool:
|
|
960
|
+
"""Handle user interruption (keyboard interrupt).
|
|
961
|
+
|
|
962
|
+
Returns:
|
|
963
|
+
False (workflow failed due to interruption)
|
|
964
|
+
"""
|
|
965
|
+
self.console.print("Interrupted by user")
|
|
966
|
+
if self.session:
|
|
967
|
+
self.session.fail_task("workflow", "Interrupted by user")
|
|
968
|
+
self.logger.warning("Workflow interrupted by user")
|
|
969
|
+
return False
|
|
970
|
+
|
|
971
|
+
def _handle_workflow_exception(self, error: Exception) -> bool:
|
|
972
|
+
"""Handle workflow exception.
|
|
973
|
+
|
|
974
|
+
Args:
|
|
975
|
+
error: Exception that occurred
|
|
976
|
+
|
|
977
|
+
Returns:
|
|
978
|
+
False (workflow failed due to exception)
|
|
979
|
+
"""
|
|
980
|
+
self.console.print(f"Error: {error}")
|
|
981
|
+
if self.session:
|
|
982
|
+
self.session.fail_task("workflow", f"Unexpected error: {error}")
|
|
983
|
+
self.logger.exception(
|
|
984
|
+
"Workflow execution failed",
|
|
985
|
+
error=str(error),
|
|
986
|
+
error_type=type(error).__name__,
|
|
987
|
+
)
|
|
988
|
+
return False
|
|
989
|
+
|
|
990
|
+
def _show_verbose_failure_details(
|
|
991
|
+
self, testing_passed: bool, comprehensive_passed: bool
|
|
992
|
+
) -> None:
|
|
993
|
+
"""Show verbose details about quality phase failures.
|
|
994
|
+
|
|
995
|
+
Args:
|
|
996
|
+
testing_passed: Whether testing phase passed
|
|
997
|
+
comprehensive_passed: Whether comprehensive hooks passed
|
|
998
|
+
"""
|
|
999
|
+
self.console.print(
|
|
1000
|
+
f"[yellow]⚠️ Quality phase results - testing_passed: {testing_passed}, comprehensive_passed: {comprehensive_passed}[/yellow]"
|
|
1001
|
+
)
|
|
1002
|
+
if not testing_passed:
|
|
1003
|
+
self.console.print("[yellow] → Tests reported failure[/yellow]")
|
|
1004
|
+
if not comprehensive_passed:
|
|
1005
|
+
self.console.print(
|
|
1006
|
+
"[yellow] → Comprehensive hooks reported failure[/yellow]"
|
|
1007
|
+
)
|
|
1008
|
+
|
|
1009
|
+
# ========================================
|
|
1010
|
+
# Workflow Context and Event Publishing
|
|
1011
|
+
# ========================================
|
|
1012
|
+
|
|
1013
|
+
def _workflow_context(
|
|
1014
|
+
self,
|
|
1015
|
+
workflow_id: str,
|
|
1016
|
+
options: OptionsProtocol,
|
|
1017
|
+
) -> dict[str, t.Any]:
|
|
1018
|
+
"""Build a consistent payload for workflow-level events.
|
|
1019
|
+
|
|
1020
|
+
Args:
|
|
1021
|
+
workflow_id: Unique workflow identifier
|
|
1022
|
+
options: Workflow execution options
|
|
1023
|
+
|
|
1024
|
+
Returns:
|
|
1025
|
+
Dictionary of workflow context data
|
|
1026
|
+
"""
|
|
1027
|
+
return {
|
|
1028
|
+
"workflow_id": workflow_id,
|
|
1029
|
+
"test_mode": getattr(options, "test", False),
|
|
1030
|
+
"skip_hooks": getattr(options, "skip_hooks", False),
|
|
1031
|
+
"publish": getattr(options, "publish", False),
|
|
1032
|
+
"all": getattr(options, "all", False),
|
|
1033
|
+
"commit": getattr(options, "commit", False),
|
|
1034
|
+
"ai_agent": getattr(options, "ai_agent", False),
|
|
1035
|
+
}
|
|
1036
|
+
|
|
1037
|
+
async def _publish_event(
|
|
1038
|
+
self, event: WorkflowEvent, payload: dict[str, t.Any]
|
|
1039
|
+
) -> None:
|
|
1040
|
+
"""Publish workflow events when the bus is available.
|
|
1041
|
+
|
|
1042
|
+
Args:
|
|
1043
|
+
event: Workflow event type to publish
|
|
1044
|
+
payload: Event payload data
|
|
1045
|
+
"""
|
|
1046
|
+
if not getattr(self, "_event_bus", None):
|
|
1047
|
+
return
|
|
1048
|
+
|
|
1049
|
+
try:
|
|
1050
|
+
await self._event_bus.publish(event, payload) # type: ignore[union-attr]
|
|
1051
|
+
except Exception as exc: # pragma: no cover - logging only
|
|
1052
|
+
self.logger.debug(
|
|
1053
|
+
"Failed to publish workflow event",
|
|
1054
|
+
extra={"event": event.value, "error": str(exc)},
|
|
1055
|
+
)
|
|
1056
|
+
|
|
1057
|
+
# ========================================
|
|
1058
|
+
# Phase Execution Delegates
|
|
1059
|
+
# ========================================
|
|
1060
|
+
|
|
1061
|
+
async def _execute_quality_phase(
|
|
1062
|
+
self, options: OptionsProtocol, workflow_id: str
|
|
1063
|
+
) -> bool:
|
|
1064
|
+
"""Execute quality phase via parent pipeline.
|
|
1065
|
+
|
|
1066
|
+
Args:
|
|
1067
|
+
options: Workflow execution options
|
|
1068
|
+
workflow_id: Unique workflow identifier
|
|
1069
|
+
|
|
1070
|
+
Returns:
|
|
1071
|
+
True if quality phase succeeded, False otherwise
|
|
1072
|
+
"""
|
|
1073
|
+
return await self._workflow_pipeline._execute_quality_phase(
|
|
1074
|
+
options, workflow_id
|
|
1075
|
+
)
|
|
1076
|
+
|
|
1077
|
+
async def _execute_publishing_workflow(
|
|
1078
|
+
self, options: OptionsProtocol, workflow_id: str
|
|
1079
|
+
) -> bool:
|
|
1080
|
+
"""Execute publishing workflow via parent pipeline.
|
|
1081
|
+
|
|
1082
|
+
Args:
|
|
1083
|
+
options: Workflow execution options
|
|
1084
|
+
workflow_id: Unique workflow identifier
|
|
1085
|
+
|
|
1086
|
+
Returns:
|
|
1087
|
+
True if publishing succeeded, False otherwise
|
|
1088
|
+
"""
|
|
1089
|
+
return await self._workflow_pipeline._execute_publishing_workflow(
|
|
1090
|
+
options, workflow_id
|
|
1091
|
+
)
|
|
1092
|
+
|
|
1093
|
+
async def _execute_commit_workflow(
|
|
1094
|
+
self, options: OptionsProtocol, workflow_id: str
|
|
1095
|
+
) -> bool:
|
|
1096
|
+
"""Execute commit workflow via parent pipeline.
|
|
1097
|
+
|
|
1098
|
+
Args:
|
|
1099
|
+
options: Workflow execution options
|
|
1100
|
+
workflow_id: Unique workflow identifier
|
|
1101
|
+
|
|
1102
|
+
Returns:
|
|
1103
|
+
True if commit succeeded, False otherwise
|
|
1104
|
+
"""
|
|
1105
|
+
return await self._workflow_pipeline._execute_commit_workflow(
|
|
1106
|
+
options, workflow_id
|
|
1107
|
+
)
|