crackerjack 0.37.9__py3-none-any.whl → 0.45.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- crackerjack/README.md +19 -0
- crackerjack/__init__.py +30 -1
- crackerjack/__main__.py +342 -1263
- crackerjack/adapters/README.md +18 -0
- crackerjack/adapters/__init__.py +27 -5
- crackerjack/adapters/_output_paths.py +167 -0
- crackerjack/adapters/_qa_adapter_base.py +309 -0
- crackerjack/adapters/_tool_adapter_base.py +706 -0
- crackerjack/adapters/ai/README.md +65 -0
- crackerjack/adapters/ai/__init__.py +5 -0
- crackerjack/adapters/ai/claude.py +853 -0
- crackerjack/adapters/complexity/README.md +53 -0
- crackerjack/adapters/complexity/__init__.py +10 -0
- crackerjack/adapters/complexity/complexipy.py +641 -0
- crackerjack/adapters/dependency/__init__.py +22 -0
- crackerjack/adapters/dependency/pip_audit.py +418 -0
- crackerjack/adapters/format/README.md +72 -0
- crackerjack/adapters/format/__init__.py +11 -0
- crackerjack/adapters/format/mdformat.py +313 -0
- crackerjack/adapters/format/ruff.py +516 -0
- crackerjack/adapters/lint/README.md +47 -0
- crackerjack/adapters/lint/__init__.py +11 -0
- crackerjack/adapters/lint/codespell.py +273 -0
- crackerjack/adapters/lsp/README.md +49 -0
- crackerjack/adapters/lsp/__init__.py +27 -0
- crackerjack/adapters/{rust_tool_manager.py → lsp/_manager.py} +3 -3
- crackerjack/adapters/{skylos_adapter.py → lsp/skylos.py} +59 -7
- crackerjack/adapters/{zuban_adapter.py → lsp/zuban.py} +3 -6
- crackerjack/adapters/refactor/README.md +59 -0
- crackerjack/adapters/refactor/__init__.py +12 -0
- crackerjack/adapters/refactor/creosote.py +318 -0
- crackerjack/adapters/refactor/refurb.py +406 -0
- crackerjack/adapters/refactor/skylos.py +494 -0
- crackerjack/adapters/sast/README.md +132 -0
- crackerjack/adapters/sast/__init__.py +32 -0
- crackerjack/adapters/sast/_base.py +201 -0
- crackerjack/adapters/sast/bandit.py +423 -0
- crackerjack/adapters/sast/pyscn.py +405 -0
- crackerjack/adapters/sast/semgrep.py +241 -0
- crackerjack/adapters/security/README.md +111 -0
- crackerjack/adapters/security/__init__.py +17 -0
- crackerjack/adapters/security/gitleaks.py +339 -0
- crackerjack/adapters/type/README.md +52 -0
- crackerjack/adapters/type/__init__.py +12 -0
- crackerjack/adapters/type/pyrefly.py +402 -0
- crackerjack/adapters/type/ty.py +402 -0
- crackerjack/adapters/type/zuban.py +522 -0
- crackerjack/adapters/utility/README.md +51 -0
- crackerjack/adapters/utility/__init__.py +10 -0
- crackerjack/adapters/utility/checks.py +884 -0
- crackerjack/agents/README.md +264 -0
- crackerjack/agents/__init__.py +40 -12
- crackerjack/agents/base.py +1 -0
- crackerjack/agents/claude_code_bridge.py +641 -0
- crackerjack/agents/coordinator.py +49 -53
- crackerjack/agents/dry_agent.py +187 -3
- crackerjack/agents/enhanced_coordinator.py +279 -0
- crackerjack/agents/enhanced_proactive_agent.py +185 -0
- crackerjack/agents/error_middleware.py +53 -0
- crackerjack/agents/formatting_agent.py +6 -8
- crackerjack/agents/helpers/__init__.py +9 -0
- crackerjack/agents/helpers/performance/__init__.py +22 -0
- crackerjack/agents/helpers/performance/performance_ast_analyzer.py +357 -0
- crackerjack/agents/helpers/performance/performance_pattern_detector.py +909 -0
- crackerjack/agents/helpers/performance/performance_recommender.py +572 -0
- crackerjack/agents/helpers/refactoring/__init__.py +22 -0
- crackerjack/agents/helpers/refactoring/code_transformer.py +536 -0
- crackerjack/agents/helpers/refactoring/complexity_analyzer.py +344 -0
- crackerjack/agents/helpers/refactoring/dead_code_detector.py +437 -0
- crackerjack/agents/helpers/test_creation/__init__.py +19 -0
- crackerjack/agents/helpers/test_creation/test_ast_analyzer.py +216 -0
- crackerjack/agents/helpers/test_creation/test_coverage_analyzer.py +643 -0
- crackerjack/agents/helpers/test_creation/test_template_generator.py +1031 -0
- crackerjack/agents/performance_agent.py +121 -1152
- crackerjack/agents/refactoring_agent.py +156 -655
- crackerjack/agents/semantic_agent.py +479 -0
- crackerjack/agents/semantic_helpers.py +356 -0
- crackerjack/agents/test_creation_agent.py +19 -1605
- crackerjack/api.py +5 -7
- crackerjack/cli/README.md +394 -0
- crackerjack/cli/__init__.py +1 -1
- crackerjack/cli/cache_handlers.py +23 -18
- crackerjack/cli/cache_handlers_enhanced.py +1 -4
- crackerjack/cli/facade.py +70 -8
- crackerjack/cli/formatting.py +13 -0
- crackerjack/cli/handlers/__init__.py +85 -0
- crackerjack/cli/handlers/advanced.py +103 -0
- crackerjack/cli/handlers/ai_features.py +62 -0
- crackerjack/cli/handlers/analytics.py +479 -0
- crackerjack/cli/handlers/changelog.py +271 -0
- crackerjack/cli/handlers/config_handlers.py +16 -0
- crackerjack/cli/handlers/coverage.py +84 -0
- crackerjack/cli/handlers/documentation.py +280 -0
- crackerjack/cli/handlers/main_handlers.py +497 -0
- crackerjack/cli/handlers/monitoring.py +371 -0
- crackerjack/cli/handlers.py +249 -49
- crackerjack/cli/interactive.py +8 -5
- crackerjack/cli/options.py +203 -110
- crackerjack/cli/semantic_handlers.py +292 -0
- crackerjack/cli/version.py +19 -0
- crackerjack/code_cleaner.py +60 -24
- crackerjack/config/README.md +472 -0
- crackerjack/config/__init__.py +256 -0
- crackerjack/config/global_lock_config.py +191 -54
- crackerjack/config/hooks.py +188 -16
- crackerjack/config/loader.py +239 -0
- crackerjack/config/settings.py +141 -0
- crackerjack/config/tool_commands.py +331 -0
- crackerjack/core/README.md +393 -0
- crackerjack/core/async_workflow_orchestrator.py +79 -53
- crackerjack/core/autofix_coordinator.py +22 -9
- crackerjack/core/container.py +10 -9
- crackerjack/core/enhanced_container.py +9 -9
- crackerjack/core/performance.py +1 -1
- crackerjack/core/performance_monitor.py +5 -3
- crackerjack/core/phase_coordinator.py +1018 -634
- crackerjack/core/proactive_workflow.py +3 -3
- crackerjack/core/retry.py +275 -0
- crackerjack/core/service_watchdog.py +167 -23
- crackerjack/core/session_coordinator.py +187 -382
- crackerjack/core/timeout_manager.py +161 -44
- crackerjack/core/workflow/__init__.py +21 -0
- crackerjack/core/workflow/workflow_ai_coordinator.py +863 -0
- crackerjack/core/workflow/workflow_event_orchestrator.py +1107 -0
- crackerjack/core/workflow/workflow_issue_parser.py +714 -0
- crackerjack/core/workflow/workflow_phase_executor.py +1158 -0
- crackerjack/core/workflow/workflow_security_gates.py +400 -0
- crackerjack/core/workflow_orchestrator.py +1247 -953
- crackerjack/data/README.md +11 -0
- crackerjack/data/__init__.py +8 -0
- crackerjack/data/models.py +79 -0
- crackerjack/data/repository.py +210 -0
- crackerjack/decorators/README.md +180 -0
- crackerjack/decorators/__init__.py +35 -0
- crackerjack/decorators/error_handling.py +649 -0
- crackerjack/decorators/error_handling_decorators.py +334 -0
- crackerjack/decorators/helpers.py +58 -0
- crackerjack/decorators/patterns.py +281 -0
- crackerjack/decorators/utils.py +58 -0
- crackerjack/docs/README.md +11 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +1 -1
- crackerjack/documentation/README.md +11 -0
- crackerjack/documentation/ai_templates.py +1 -1
- crackerjack/documentation/dual_output_generator.py +11 -9
- crackerjack/documentation/reference_generator.py +104 -59
- crackerjack/dynamic_config.py +52 -61
- crackerjack/errors.py +1 -1
- crackerjack/events/README.md +11 -0
- crackerjack/events/__init__.py +16 -0
- crackerjack/events/telemetry.py +175 -0
- crackerjack/events/workflow_bus.py +346 -0
- crackerjack/exceptions/README.md +301 -0
- crackerjack/exceptions/__init__.py +5 -0
- crackerjack/exceptions/config.py +4 -0
- crackerjack/exceptions/tool_execution_error.py +245 -0
- crackerjack/executors/README.md +591 -0
- crackerjack/executors/__init__.py +2 -0
- crackerjack/executors/async_hook_executor.py +539 -77
- crackerjack/executors/cached_hook_executor.py +3 -3
- crackerjack/executors/hook_executor.py +967 -102
- crackerjack/executors/hook_lock_manager.py +31 -22
- crackerjack/executors/individual_hook_executor.py +66 -32
- crackerjack/executors/lsp_aware_hook_executor.py +136 -57
- crackerjack/executors/progress_hook_executor.py +282 -0
- crackerjack/executors/tool_proxy.py +23 -7
- crackerjack/hooks/README.md +485 -0
- crackerjack/hooks/lsp_hook.py +8 -9
- crackerjack/intelligence/README.md +557 -0
- crackerjack/interactive.py +37 -10
- crackerjack/managers/README.md +369 -0
- crackerjack/managers/async_hook_manager.py +41 -57
- crackerjack/managers/hook_manager.py +449 -79
- crackerjack/managers/publish_manager.py +81 -36
- crackerjack/managers/test_command_builder.py +290 -12
- crackerjack/managers/test_executor.py +93 -8
- crackerjack/managers/test_manager.py +1082 -75
- crackerjack/managers/test_progress.py +118 -26
- crackerjack/mcp/README.md +374 -0
- crackerjack/mcp/cache.py +25 -2
- crackerjack/mcp/client_runner.py +35 -18
- crackerjack/mcp/context.py +9 -9
- crackerjack/mcp/dashboard.py +24 -8
- crackerjack/mcp/enhanced_progress_monitor.py +34 -23
- crackerjack/mcp/file_monitor.py +27 -6
- crackerjack/mcp/progress_components.py +45 -34
- crackerjack/mcp/progress_monitor.py +6 -9
- crackerjack/mcp/rate_limiter.py +11 -7
- crackerjack/mcp/server.py +2 -0
- crackerjack/mcp/server_core.py +187 -55
- crackerjack/mcp/service_watchdog.py +12 -9
- crackerjack/mcp/task_manager.py +2 -2
- crackerjack/mcp/tools/README.md +27 -0
- crackerjack/mcp/tools/__init__.py +2 -0
- crackerjack/mcp/tools/core_tools.py +75 -52
- crackerjack/mcp/tools/execution_tools.py +87 -31
- crackerjack/mcp/tools/intelligence_tools.py +2 -2
- crackerjack/mcp/tools/proactive_tools.py +1 -1
- crackerjack/mcp/tools/semantic_tools.py +584 -0
- crackerjack/mcp/tools/utility_tools.py +180 -132
- crackerjack/mcp/tools/workflow_executor.py +87 -46
- crackerjack/mcp/websocket/README.md +31 -0
- crackerjack/mcp/websocket/app.py +11 -1
- crackerjack/mcp/websocket/event_bridge.py +188 -0
- crackerjack/mcp/websocket/jobs.py +27 -4
- crackerjack/mcp/websocket/monitoring/__init__.py +25 -0
- crackerjack/mcp/websocket/monitoring/api/__init__.py +19 -0
- crackerjack/mcp/websocket/monitoring/api/dependencies.py +141 -0
- crackerjack/mcp/websocket/monitoring/api/heatmap.py +154 -0
- crackerjack/mcp/websocket/monitoring/api/intelligence.py +199 -0
- crackerjack/mcp/websocket/monitoring/api/metrics.py +203 -0
- crackerjack/mcp/websocket/monitoring/api/telemetry.py +101 -0
- crackerjack/mcp/websocket/monitoring/dashboard.py +18 -0
- crackerjack/mcp/websocket/monitoring/factory.py +109 -0
- crackerjack/mcp/websocket/monitoring/filters.py +10 -0
- crackerjack/mcp/websocket/monitoring/metrics.py +64 -0
- crackerjack/mcp/websocket/monitoring/models.py +90 -0
- crackerjack/mcp/websocket/monitoring/utils.py +171 -0
- crackerjack/mcp/websocket/monitoring/websocket_manager.py +78 -0
- crackerjack/mcp/websocket/monitoring/websockets/__init__.py +17 -0
- crackerjack/mcp/websocket/monitoring/websockets/dependencies.py +126 -0
- crackerjack/mcp/websocket/monitoring/websockets/heatmap.py +176 -0
- crackerjack/mcp/websocket/monitoring/websockets/intelligence.py +291 -0
- crackerjack/mcp/websocket/monitoring/websockets/metrics.py +291 -0
- crackerjack/mcp/websocket/monitoring_endpoints.py +16 -2930
- crackerjack/mcp/websocket/server.py +1 -3
- crackerjack/mcp/websocket/websocket_handler.py +107 -6
- crackerjack/models/README.md +308 -0
- crackerjack/models/__init__.py +10 -1
- crackerjack/models/config.py +639 -22
- crackerjack/models/config_adapter.py +6 -6
- crackerjack/models/protocols.py +1167 -23
- crackerjack/models/pydantic_models.py +320 -0
- crackerjack/models/qa_config.py +145 -0
- crackerjack/models/qa_results.py +134 -0
- crackerjack/models/results.py +35 -0
- crackerjack/models/semantic_models.py +258 -0
- crackerjack/models/task.py +19 -3
- crackerjack/models/test_models.py +60 -0
- crackerjack/monitoring/README.md +11 -0
- crackerjack/monitoring/ai_agent_watchdog.py +5 -4
- crackerjack/monitoring/metrics_collector.py +4 -3
- crackerjack/monitoring/regression_prevention.py +4 -3
- crackerjack/monitoring/websocket_server.py +4 -241
- crackerjack/orchestration/README.md +340 -0
- crackerjack/orchestration/__init__.py +43 -0
- crackerjack/orchestration/advanced_orchestrator.py +20 -67
- crackerjack/orchestration/cache/README.md +312 -0
- crackerjack/orchestration/cache/__init__.py +37 -0
- crackerjack/orchestration/cache/memory_cache.py +338 -0
- crackerjack/orchestration/cache/tool_proxy_cache.py +340 -0
- crackerjack/orchestration/config.py +297 -0
- crackerjack/orchestration/coverage_improvement.py +13 -6
- crackerjack/orchestration/execution_strategies.py +6 -6
- crackerjack/orchestration/hook_orchestrator.py +1398 -0
- crackerjack/orchestration/strategies/README.md +401 -0
- crackerjack/orchestration/strategies/__init__.py +39 -0
- crackerjack/orchestration/strategies/adaptive_strategy.py +630 -0
- crackerjack/orchestration/strategies/parallel_strategy.py +237 -0
- crackerjack/orchestration/strategies/sequential_strategy.py +299 -0
- crackerjack/orchestration/test_progress_streamer.py +1 -1
- crackerjack/plugins/README.md +11 -0
- crackerjack/plugins/hooks.py +3 -2
- crackerjack/plugins/loader.py +3 -3
- crackerjack/plugins/managers.py +1 -1
- crackerjack/py313.py +191 -0
- crackerjack/security/README.md +11 -0
- crackerjack/services/README.md +374 -0
- crackerjack/services/__init__.py +8 -21
- crackerjack/services/ai/README.md +295 -0
- crackerjack/services/ai/__init__.py +7 -0
- crackerjack/services/ai/advanced_optimizer.py +878 -0
- crackerjack/services/{contextual_ai_assistant.py → ai/contextual_ai_assistant.py} +5 -3
- crackerjack/services/ai/embeddings.py +444 -0
- crackerjack/services/ai/intelligent_commit.py +328 -0
- crackerjack/services/ai/predictive_analytics.py +510 -0
- crackerjack/services/api_extractor.py +5 -3
- crackerjack/services/bounded_status_operations.py +45 -5
- crackerjack/services/cache.py +249 -318
- crackerjack/services/changelog_automation.py +7 -3
- crackerjack/services/command_execution_service.py +305 -0
- crackerjack/services/config_integrity.py +83 -39
- crackerjack/services/config_merge.py +9 -6
- crackerjack/services/config_service.py +198 -0
- crackerjack/services/config_template.py +13 -26
- crackerjack/services/coverage_badge_service.py +6 -4
- crackerjack/services/coverage_ratchet.py +53 -27
- crackerjack/services/debug.py +18 -7
- crackerjack/services/dependency_analyzer.py +4 -4
- crackerjack/services/dependency_monitor.py +13 -13
- crackerjack/services/documentation_generator.py +4 -2
- crackerjack/services/documentation_service.py +62 -33
- crackerjack/services/enhanced_filesystem.py +81 -27
- crackerjack/services/enterprise_optimizer.py +1 -1
- crackerjack/services/error_pattern_analyzer.py +10 -10
- crackerjack/services/file_filter.py +221 -0
- crackerjack/services/file_hasher.py +5 -7
- crackerjack/services/file_io_service.py +361 -0
- crackerjack/services/file_modifier.py +615 -0
- crackerjack/services/filesystem.py +80 -109
- crackerjack/services/git.py +99 -5
- crackerjack/services/health_metrics.py +4 -6
- crackerjack/services/heatmap_generator.py +12 -3
- crackerjack/services/incremental_executor.py +380 -0
- crackerjack/services/initialization.py +101 -49
- crackerjack/services/log_manager.py +2 -2
- crackerjack/services/logging.py +120 -68
- crackerjack/services/lsp_client.py +12 -12
- crackerjack/services/memory_optimizer.py +27 -22
- crackerjack/services/monitoring/README.md +30 -0
- crackerjack/services/monitoring/__init__.py +9 -0
- crackerjack/services/monitoring/dependency_monitor.py +678 -0
- crackerjack/services/monitoring/error_pattern_analyzer.py +676 -0
- crackerjack/services/monitoring/health_metrics.py +716 -0
- crackerjack/services/monitoring/metrics.py +587 -0
- crackerjack/services/{performance_benchmarks.py → monitoring/performance_benchmarks.py} +100 -14
- crackerjack/services/{performance_cache.py → monitoring/performance_cache.py} +21 -15
- crackerjack/services/{performance_monitor.py → monitoring/performance_monitor.py} +10 -6
- crackerjack/services/parallel_executor.py +166 -55
- crackerjack/services/patterns/__init__.py +142 -0
- crackerjack/services/patterns/agents.py +107 -0
- crackerjack/services/patterns/code/__init__.py +15 -0
- crackerjack/services/patterns/code/detection.py +118 -0
- crackerjack/services/patterns/code/imports.py +107 -0
- crackerjack/services/patterns/code/paths.py +159 -0
- crackerjack/services/patterns/code/performance.py +119 -0
- crackerjack/services/patterns/code/replacement.py +36 -0
- crackerjack/services/patterns/core.py +212 -0
- crackerjack/services/patterns/documentation/__init__.py +14 -0
- crackerjack/services/patterns/documentation/badges_markdown.py +96 -0
- crackerjack/services/patterns/documentation/comments_blocks.py +83 -0
- crackerjack/services/patterns/documentation/docstrings.py +89 -0
- crackerjack/services/patterns/formatting.py +226 -0
- crackerjack/services/patterns/operations.py +339 -0
- crackerjack/services/patterns/security/__init__.py +23 -0
- crackerjack/services/patterns/security/code_injection.py +122 -0
- crackerjack/services/patterns/security/credentials.py +190 -0
- crackerjack/services/patterns/security/path_traversal.py +221 -0
- crackerjack/services/patterns/security/unsafe_operations.py +216 -0
- crackerjack/services/patterns/templates.py +62 -0
- crackerjack/services/patterns/testing/__init__.py +18 -0
- crackerjack/services/patterns/testing/error_patterns.py +107 -0
- crackerjack/services/patterns/testing/pytest_output.py +126 -0
- crackerjack/services/patterns/tool_output/__init__.py +16 -0
- crackerjack/services/patterns/tool_output/bandit.py +72 -0
- crackerjack/services/patterns/tool_output/other.py +97 -0
- crackerjack/services/patterns/tool_output/pyright.py +67 -0
- crackerjack/services/patterns/tool_output/ruff.py +44 -0
- crackerjack/services/patterns/url_sanitization.py +114 -0
- crackerjack/services/patterns/utilities.py +42 -0
- crackerjack/services/patterns/utils.py +339 -0
- crackerjack/services/patterns/validation.py +46 -0
- crackerjack/services/patterns/versioning.py +62 -0
- crackerjack/services/predictive_analytics.py +21 -8
- crackerjack/services/profiler.py +280 -0
- crackerjack/services/quality/README.md +415 -0
- crackerjack/services/quality/__init__.py +11 -0
- crackerjack/services/quality/anomaly_detector.py +392 -0
- crackerjack/services/quality/pattern_cache.py +333 -0
- crackerjack/services/quality/pattern_detector.py +479 -0
- crackerjack/services/quality/qa_orchestrator.py +491 -0
- crackerjack/services/{quality_baseline.py → quality/quality_baseline.py} +163 -2
- crackerjack/services/{quality_baseline_enhanced.py → quality/quality_baseline_enhanced.py} +4 -1
- crackerjack/services/{quality_intelligence.py → quality/quality_intelligence.py} +180 -16
- crackerjack/services/regex_patterns.py +58 -2987
- crackerjack/services/regex_utils.py +55 -29
- crackerjack/services/secure_status_formatter.py +42 -15
- crackerjack/services/secure_subprocess.py +35 -2
- crackerjack/services/security.py +16 -8
- crackerjack/services/server_manager.py +40 -51
- crackerjack/services/smart_scheduling.py +46 -6
- crackerjack/services/status_authentication.py +3 -3
- crackerjack/services/thread_safe_status_collector.py +1 -0
- crackerjack/services/tool_filter.py +368 -0
- crackerjack/services/tool_version_service.py +9 -5
- crackerjack/services/unified_config.py +43 -351
- crackerjack/services/vector_store.py +689 -0
- crackerjack/services/version_analyzer.py +6 -4
- crackerjack/services/version_checker.py +14 -8
- crackerjack/services/zuban_lsp_service.py +5 -4
- crackerjack/slash_commands/README.md +11 -0
- crackerjack/slash_commands/init.md +2 -12
- crackerjack/slash_commands/run.md +84 -50
- crackerjack/tools/README.md +11 -0
- crackerjack/tools/__init__.py +30 -0
- crackerjack/tools/_git_utils.py +105 -0
- crackerjack/tools/check_added_large_files.py +139 -0
- crackerjack/tools/check_ast.py +105 -0
- crackerjack/tools/check_json.py +103 -0
- crackerjack/tools/check_jsonschema.py +297 -0
- crackerjack/tools/check_toml.py +103 -0
- crackerjack/tools/check_yaml.py +110 -0
- crackerjack/tools/codespell_wrapper.py +72 -0
- crackerjack/tools/end_of_file_fixer.py +202 -0
- crackerjack/tools/format_json.py +128 -0
- crackerjack/tools/mdformat_wrapper.py +114 -0
- crackerjack/tools/trailing_whitespace.py +198 -0
- crackerjack/tools/validate_regex_patterns.py +7 -3
- crackerjack/ui/README.md +11 -0
- crackerjack/ui/dashboard_renderer.py +28 -0
- crackerjack/ui/templates/README.md +11 -0
- crackerjack/utils/console_utils.py +13 -0
- crackerjack/utils/dependency_guard.py +230 -0
- crackerjack/utils/retry_utils.py +275 -0
- crackerjack/workflows/README.md +590 -0
- crackerjack/workflows/__init__.py +46 -0
- crackerjack/workflows/actions.py +811 -0
- crackerjack/workflows/auto_fix.py +444 -0
- crackerjack/workflows/container_builder.py +499 -0
- crackerjack/workflows/definitions.py +443 -0
- crackerjack/workflows/engine.py +177 -0
- crackerjack/workflows/event_bridge.py +242 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/METADATA +678 -98
- crackerjack-0.45.2.dist-info/RECORD +478 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/WHEEL +1 -1
- crackerjack/managers/test_manager_backup.py +0 -1075
- crackerjack/mcp/tools/execution_tools_backup.py +0 -1011
- crackerjack/mixins/__init__.py +0 -3
- crackerjack/mixins/error_handling.py +0 -145
- crackerjack/services/config.py +0 -358
- crackerjack/ui/server_panels.py +0 -125
- crackerjack-0.37.9.dist-info/RECORD +0 -231
- /crackerjack/adapters/{rust_tool_adapter.py → lsp/_base.py} +0 -0
- /crackerjack/adapters/{lsp_client.py → lsp/_client.py} +0 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.37.9.dist-info → crackerjack-0.45.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,2935 +1,21 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""Backward compatibility wrapper for refactored monitoring endpoints.
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
from datetime import datetime
|
|
7
|
-
from pathlib import Path
|
|
3
|
+
This module re-exports the main factory function from the new monitoring/ directory
|
|
4
|
+
to maintain backward compatibility with existing code that imports from
|
|
5
|
+
crackerjack.mcp.websocket.monitoring_endpoints.
|
|
8
6
|
|
|
9
|
-
|
|
10
|
-
|
|
7
|
+
The actual implementation has been split into organized modules:
|
|
8
|
+
- monitoring/models.py - Pydantic data models
|
|
9
|
+
- monitoring/websocket_manager.py - WebSocket connection management
|
|
10
|
+
- monitoring/utils.py - Utility functions
|
|
11
|
+
- monitoring/dashboard.py - Dashboard HTML rendering
|
|
12
|
+
- monitoring/websockets/ - WebSocket endpoint modules (metrics, intelligence, dependencies, heatmap)
|
|
13
|
+
- monitoring/api/ - REST API endpoint modules (telemetry, metrics, intelligence, dependencies, heatmap)
|
|
14
|
+
- monitoring/factory.py - Endpoint registration orchestration
|
|
11
15
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
DependencyAnalyzer,
|
|
15
|
-
DependencyGraph,
|
|
16
|
-
)
|
|
17
|
-
from crackerjack.services.error_pattern_analyzer import (
|
|
18
|
-
ErrorPatternAnalyzer,
|
|
19
|
-
)
|
|
20
|
-
from crackerjack.services.quality_baseline_enhanced import (
|
|
21
|
-
EnhancedQualityBaselineService,
|
|
22
|
-
QualityAlert,
|
|
23
|
-
SystemHealthStatus,
|
|
24
|
-
TrendDirection,
|
|
25
|
-
UnifiedMetrics,
|
|
26
|
-
)
|
|
27
|
-
from crackerjack.services.quality_intelligence import (
|
|
28
|
-
QualityIntelligenceService,
|
|
29
|
-
)
|
|
16
|
+
All imports should work exactly as before via the main factory function.
|
|
17
|
+
"""
|
|
30
18
|
|
|
31
|
-
from .
|
|
19
|
+
from .monitoring import MonitoringWebSocketManager, create_monitoring_endpoints
|
|
32
20
|
|
|
33
|
-
|
|
34
|
-
class MonitoringWebSocketManager:
|
|
35
|
-
"""Manages WebSocket connections for real-time monitoring."""
|
|
36
|
-
|
|
37
|
-
def __init__(self) -> None:
|
|
38
|
-
self.active_connections: dict[str, WebSocket] = {}
|
|
39
|
-
self.metrics_subscribers: set[WebSocket] = set()
|
|
40
|
-
self.alerts_subscribers: set[WebSocket] = set()
|
|
41
|
-
|
|
42
|
-
async def connect_metrics(self, websocket: WebSocket, client_id: str) -> None:
|
|
43
|
-
"""Connect a client for metrics streaming."""
|
|
44
|
-
await websocket.accept()
|
|
45
|
-
self.active_connections[client_id] = websocket
|
|
46
|
-
self.metrics_subscribers.add(websocket)
|
|
47
|
-
|
|
48
|
-
async def connect_alerts(self, websocket: WebSocket, client_id: str) -> None:
|
|
49
|
-
"""Connect a client for alert notifications."""
|
|
50
|
-
await websocket.accept()
|
|
51
|
-
self.active_connections[client_id] = websocket
|
|
52
|
-
self.alerts_subscribers.add(websocket)
|
|
53
|
-
|
|
54
|
-
def disconnect(self, websocket: WebSocket, client_id: str) -> None:
|
|
55
|
-
"""Disconnect a client."""
|
|
56
|
-
if client_id in self.active_connections:
|
|
57
|
-
del self.active_connections[client_id]
|
|
58
|
-
self.metrics_subscribers.discard(websocket)
|
|
59
|
-
self.alerts_subscribers.discard(websocket)
|
|
60
|
-
|
|
61
|
-
async def broadcast_metrics(self, metrics: UnifiedMetrics) -> None:
|
|
62
|
-
"""Broadcast metrics to all connected metrics subscribers."""
|
|
63
|
-
message = {
|
|
64
|
-
"type": "metrics_update",
|
|
65
|
-
"data": metrics.to_dict(),
|
|
66
|
-
"timestamp": datetime.now().isoformat(),
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
disconnected = []
|
|
70
|
-
for websocket in self.metrics_subscribers:
|
|
71
|
-
try:
|
|
72
|
-
await websocket.send_text(json.dumps(message))
|
|
73
|
-
except Exception:
|
|
74
|
-
disconnected.append(websocket)
|
|
75
|
-
|
|
76
|
-
# Clean up disconnected clients
|
|
77
|
-
self.metrics_subscribers.difference_update(disconnected)
|
|
78
|
-
|
|
79
|
-
async def broadcast_alert(self, alert: QualityAlert) -> None:
|
|
80
|
-
"""Broadcast alert to all connected alert subscribers."""
|
|
81
|
-
message = {
|
|
82
|
-
"type": "alert",
|
|
83
|
-
"data": alert.to_dict(),
|
|
84
|
-
"timestamp": datetime.now().isoformat(),
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
disconnected = []
|
|
88
|
-
for websocket in self.alerts_subscribers:
|
|
89
|
-
try:
|
|
90
|
-
await websocket.send_text(json.dumps(message))
|
|
91
|
-
except Exception:
|
|
92
|
-
disconnected.append(websocket)
|
|
93
|
-
|
|
94
|
-
# Clean up disconnected clients
|
|
95
|
-
self.alerts_subscribers.difference_update(disconnected)
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
def create_monitoring_endpoints(
|
|
99
|
-
app: FastAPI,
|
|
100
|
-
job_manager: JobManager,
|
|
101
|
-
progress_dir: Path,
|
|
102
|
-
ws_manager: MonitoringWebSocketManager,
|
|
103
|
-
) -> None:
|
|
104
|
-
"""Add monitoring endpoints to the FastAPI app."""
|
|
105
|
-
services = _initialize_monitoring_services(progress_dir)
|
|
106
|
-
|
|
107
|
-
_register_websocket_endpoints(app, job_manager, ws_manager, services)
|
|
108
|
-
_register_rest_api_endpoints(app, job_manager, services)
|
|
109
|
-
_register_dashboard_endpoint(app)
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
def _initialize_monitoring_services(progress_dir: Path) -> dict[str, t.Any]:
|
|
113
|
-
"""Initialize all monitoring services."""
|
|
114
|
-
cache = CrackerjackCache()
|
|
115
|
-
quality_service = EnhancedQualityBaselineService(cache=cache)
|
|
116
|
-
intelligence_service = QualityIntelligenceService(quality_service)
|
|
117
|
-
dependency_analyzer = DependencyAnalyzer(progress_dir.parent)
|
|
118
|
-
error_analyzer = ErrorPatternAnalyzer(progress_dir.parent)
|
|
119
|
-
|
|
120
|
-
return {
|
|
121
|
-
"cache": cache,
|
|
122
|
-
"quality_service": quality_service,
|
|
123
|
-
"intelligence_service": intelligence_service,
|
|
124
|
-
"dependency_analyzer": dependency_analyzer,
|
|
125
|
-
"error_analyzer": error_analyzer,
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
def _register_websocket_endpoints(
|
|
130
|
-
app: FastAPI,
|
|
131
|
-
job_manager: JobManager,
|
|
132
|
-
ws_manager: MonitoringWebSocketManager,
|
|
133
|
-
services: dict[str, t.Any],
|
|
134
|
-
) -> None:
|
|
135
|
-
"""Register all WebSocket endpoints."""
|
|
136
|
-
_register_metrics_websockets(app, job_manager, ws_manager, services)
|
|
137
|
-
_register_intelligence_websockets(app, ws_manager, services)
|
|
138
|
-
_register_dependency_websockets(app, ws_manager, services)
|
|
139
|
-
_register_heatmap_websockets(app, ws_manager, services)
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
def _register_metrics_websockets(
|
|
143
|
-
app: FastAPI,
|
|
144
|
-
job_manager: JobManager,
|
|
145
|
-
ws_manager: MonitoringWebSocketManager,
|
|
146
|
-
services: dict[str, t.Any],
|
|
147
|
-
) -> None:
|
|
148
|
-
"""Register metrics-related WebSocket endpoints."""
|
|
149
|
-
quality_service = services["quality_service"]
|
|
150
|
-
|
|
151
|
-
@app.websocket("/ws/metrics/live")
|
|
152
|
-
async def websocket_metrics_live(websocket: WebSocket) -> None:
|
|
153
|
-
"""WebSocket endpoint for live metrics streaming."""
|
|
154
|
-
await _handle_live_metrics_websocket(
|
|
155
|
-
websocket, ws_manager, quality_service, job_manager
|
|
156
|
-
)
|
|
157
|
-
|
|
158
|
-
@app.websocket("/ws/metrics/historical/{days}")
|
|
159
|
-
async def websocket_metrics_historical(websocket: WebSocket, days: int) -> None:
|
|
160
|
-
"""WebSocket endpoint for historical metrics data."""
|
|
161
|
-
await _handle_historical_metrics_websocket(
|
|
162
|
-
websocket, ws_manager, quality_service, days
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
@app.websocket("/ws/alerts/subscribe")
|
|
166
|
-
async def websocket_alerts_subscribe(websocket: WebSocket) -> None:
|
|
167
|
-
"""WebSocket endpoint for alert subscriptions."""
|
|
168
|
-
await _handle_alerts_websocket(websocket, ws_manager)
|
|
169
|
-
|
|
170
|
-
@app.websocket("/ws/dashboard/overview")
|
|
171
|
-
async def websocket_dashboard_overview(websocket: WebSocket) -> None:
|
|
172
|
-
"""WebSocket endpoint for comprehensive dashboard data."""
|
|
173
|
-
await _handle_dashboard_websocket(
|
|
174
|
-
websocket, ws_manager, quality_service, job_manager
|
|
175
|
-
)
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
async def _handle_live_metrics_websocket(
|
|
179
|
-
websocket: WebSocket,
|
|
180
|
-
ws_manager: MonitoringWebSocketManager,
|
|
181
|
-
quality_service: EnhancedQualityBaselineService,
|
|
182
|
-
job_manager: JobManager,
|
|
183
|
-
) -> None:
|
|
184
|
-
"""Handle live metrics WebSocket connection."""
|
|
185
|
-
client_id = f"metrics_{datetime.now().timestamp()}"
|
|
186
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
187
|
-
|
|
188
|
-
try:
|
|
189
|
-
# Send initial metrics
|
|
190
|
-
current_metrics = await get_current_metrics(quality_service, job_manager)
|
|
191
|
-
await websocket.send_text(
|
|
192
|
-
json.dumps(
|
|
193
|
-
{
|
|
194
|
-
"type": "initial_metrics",
|
|
195
|
-
"data": current_metrics.to_dict(),
|
|
196
|
-
"timestamp": datetime.now().isoformat(),
|
|
197
|
-
}
|
|
198
|
-
)
|
|
199
|
-
)
|
|
200
|
-
|
|
201
|
-
# Keep connection alive and handle client messages
|
|
202
|
-
while True:
|
|
203
|
-
try:
|
|
204
|
-
message = await asyncio.wait_for(websocket.receive_text(), timeout=30.0)
|
|
205
|
-
data = json.loads(message)
|
|
206
|
-
|
|
207
|
-
if data.get("type") == "request_update":
|
|
208
|
-
metrics = await get_current_metrics(quality_service, job_manager)
|
|
209
|
-
await websocket.send_text(
|
|
210
|
-
json.dumps(
|
|
211
|
-
{
|
|
212
|
-
"type": "metrics_update",
|
|
213
|
-
"data": metrics.to_dict(),
|
|
214
|
-
"timestamp": datetime.now().isoformat(),
|
|
215
|
-
}
|
|
216
|
-
)
|
|
217
|
-
)
|
|
218
|
-
|
|
219
|
-
except TimeoutError:
|
|
220
|
-
metrics = await get_current_metrics(quality_service, job_manager)
|
|
221
|
-
await ws_manager.broadcast_metrics(metrics)
|
|
222
|
-
|
|
223
|
-
except WebSocketDisconnect:
|
|
224
|
-
ws_manager.disconnect(websocket, client_id)
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
async def _handle_historical_metrics_websocket(
|
|
228
|
-
websocket: WebSocket,
|
|
229
|
-
ws_manager: MonitoringWebSocketManager,
|
|
230
|
-
quality_service: EnhancedQualityBaselineService,
|
|
231
|
-
days: int,
|
|
232
|
-
) -> None:
|
|
233
|
-
"""Handle historical metrics WebSocket connection."""
|
|
234
|
-
if days > 365:
|
|
235
|
-
await websocket.close(code=1008, reason="Days parameter too large")
|
|
236
|
-
return
|
|
237
|
-
|
|
238
|
-
client_id = f"historical_{datetime.now().timestamp()}"
|
|
239
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
240
|
-
|
|
241
|
-
try:
|
|
242
|
-
historical_data = _convert_baselines_to_metrics(
|
|
243
|
-
quality_service.get_recent_baselines(limit=days)
|
|
244
|
-
)
|
|
245
|
-
|
|
246
|
-
await _send_historical_data_chunks(websocket, historical_data)
|
|
247
|
-
|
|
248
|
-
# Keep connection open for updates
|
|
249
|
-
while True:
|
|
250
|
-
await websocket.receive_text()
|
|
251
|
-
|
|
252
|
-
except WebSocketDisconnect:
|
|
253
|
-
ws_manager.disconnect(websocket, client_id)
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
def _convert_baselines_to_metrics(
|
|
257
|
-
baselines: list[t.Any],
|
|
258
|
-
) -> list[UnifiedMetrics]:
|
|
259
|
-
"""Convert quality baselines to UnifiedMetrics objects."""
|
|
260
|
-
return [
|
|
261
|
-
UnifiedMetrics(
|
|
262
|
-
timestamp=baseline.timestamp,
|
|
263
|
-
quality_score=baseline.quality_score,
|
|
264
|
-
test_coverage=baseline.coverage_percent,
|
|
265
|
-
hook_duration=0.0,
|
|
266
|
-
active_jobs=0,
|
|
267
|
-
error_count=baseline.hook_failures
|
|
268
|
-
+ baseline.security_issues
|
|
269
|
-
+ baseline.type_errors
|
|
270
|
-
+ baseline.linting_issues,
|
|
271
|
-
trend_direction=TrendDirection.STABLE,
|
|
272
|
-
predictions={},
|
|
273
|
-
)
|
|
274
|
-
for baseline in baselines
|
|
275
|
-
]
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
async def _send_historical_data_chunks(
|
|
279
|
-
websocket: WebSocket, historical_data: list[UnifiedMetrics]
|
|
280
|
-
) -> None:
|
|
281
|
-
"""Send historical data in chunks to avoid overwhelming the client."""
|
|
282
|
-
chunk_size = 100
|
|
283
|
-
|
|
284
|
-
for i in range(0, len(historical_data), chunk_size):
|
|
285
|
-
chunk = historical_data[i : i + chunk_size]
|
|
286
|
-
await websocket.send_text(
|
|
287
|
-
json.dumps(
|
|
288
|
-
{
|
|
289
|
-
"type": "historical_chunk",
|
|
290
|
-
"data": [m.to_dict() for m in chunk],
|
|
291
|
-
"chunk_index": i // chunk_size,
|
|
292
|
-
"total_chunks": (len(historical_data) + chunk_size - 1)
|
|
293
|
-
// chunk_size,
|
|
294
|
-
"timestamp": datetime.now().isoformat(),
|
|
295
|
-
}
|
|
296
|
-
)
|
|
297
|
-
)
|
|
298
|
-
await asyncio.sleep(0.1)
|
|
299
|
-
|
|
300
|
-
# Send completion signal
|
|
301
|
-
await websocket.send_text(
|
|
302
|
-
json.dumps(
|
|
303
|
-
{
|
|
304
|
-
"type": "historical_complete",
|
|
305
|
-
"total_records": len(historical_data),
|
|
306
|
-
"timestamp": datetime.now().isoformat(),
|
|
307
|
-
}
|
|
308
|
-
)
|
|
309
|
-
)
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
async def _handle_alerts_websocket(
|
|
313
|
-
websocket: WebSocket, ws_manager: MonitoringWebSocketManager
|
|
314
|
-
) -> None:
|
|
315
|
-
"""Handle alerts WebSocket connection."""
|
|
316
|
-
client_id = f"alerts_{datetime.now().timestamp()}"
|
|
317
|
-
await ws_manager.connect_alerts(websocket, client_id)
|
|
318
|
-
|
|
319
|
-
try:
|
|
320
|
-
# Send current active alerts - would need to track these separately
|
|
321
|
-
active_alerts = [] # For now, empty list
|
|
322
|
-
await websocket.send_text(
|
|
323
|
-
json.dumps(
|
|
324
|
-
{
|
|
325
|
-
"type": "active_alerts",
|
|
326
|
-
"data": [alert.to_dict() for alert in active_alerts],
|
|
327
|
-
"timestamp": datetime.now().isoformat(),
|
|
328
|
-
}
|
|
329
|
-
)
|
|
330
|
-
)
|
|
331
|
-
|
|
332
|
-
# Keep connection alive
|
|
333
|
-
while True:
|
|
334
|
-
await websocket.receive_text()
|
|
335
|
-
|
|
336
|
-
except WebSocketDisconnect:
|
|
337
|
-
ws_manager.disconnect(websocket, client_id)
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
async def _handle_dashboard_websocket(
|
|
341
|
-
websocket: WebSocket,
|
|
342
|
-
ws_manager: MonitoringWebSocketManager,
|
|
343
|
-
quality_service: EnhancedQualityBaselineService,
|
|
344
|
-
job_manager: JobManager,
|
|
345
|
-
) -> None:
|
|
346
|
-
"""Handle dashboard overview WebSocket connection."""
|
|
347
|
-
client_id = f"dashboard_{datetime.now().timestamp()}"
|
|
348
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
349
|
-
|
|
350
|
-
try:
|
|
351
|
-
while True:
|
|
352
|
-
current_metrics = await get_current_metrics(quality_service, job_manager)
|
|
353
|
-
|
|
354
|
-
metrics_dict = _create_dashboard_metrics_dict(current_metrics)
|
|
355
|
-
|
|
356
|
-
dashboard_state = quality_service.create_dashboard_state(
|
|
357
|
-
current_metrics=metrics_dict,
|
|
358
|
-
active_job_count=len(job_manager.active_connections),
|
|
359
|
-
historical_days=7,
|
|
360
|
-
)
|
|
361
|
-
|
|
362
|
-
await websocket.send_text(
|
|
363
|
-
json.dumps(
|
|
364
|
-
{
|
|
365
|
-
"type": "dashboard_update",
|
|
366
|
-
"data": dashboard_state.to_dict(),
|
|
367
|
-
"timestamp": datetime.now().isoformat(),
|
|
368
|
-
}
|
|
369
|
-
)
|
|
370
|
-
)
|
|
371
|
-
|
|
372
|
-
await asyncio.sleep(10)
|
|
373
|
-
|
|
374
|
-
except WebSocketDisconnect:
|
|
375
|
-
ws_manager.disconnect(websocket, client_id)
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
def _create_dashboard_metrics_dict(current_metrics: UnifiedMetrics) -> dict[str, t.Any]:
|
|
379
|
-
"""Create basic metrics dict for dashboard state."""
|
|
380
|
-
return {
|
|
381
|
-
"coverage_percent": current_metrics.test_coverage,
|
|
382
|
-
"test_count": 0,
|
|
383
|
-
"test_pass_rate": 100.0,
|
|
384
|
-
"hook_failures": 0,
|
|
385
|
-
"complexity_violations": 0,
|
|
386
|
-
"security_issues": 0,
|
|
387
|
-
"type_errors": 0,
|
|
388
|
-
"linting_issues": 0,
|
|
389
|
-
}
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
def _register_intelligence_websockets(
|
|
393
|
-
app: FastAPI,
|
|
394
|
-
ws_manager: MonitoringWebSocketManager,
|
|
395
|
-
services: dict[str, t.Any],
|
|
396
|
-
) -> None:
|
|
397
|
-
"""Register intelligence-related WebSocket endpoints."""
|
|
398
|
-
intelligence_service = services["intelligence_service"]
|
|
399
|
-
|
|
400
|
-
@app.websocket("/ws/intelligence/anomalies")
|
|
401
|
-
async def websocket_anomaly_detection(websocket: WebSocket) -> None:
|
|
402
|
-
"""WebSocket endpoint for real-time anomaly detection."""
|
|
403
|
-
await _handle_anomaly_detection_websocket(
|
|
404
|
-
websocket, ws_manager, intelligence_service
|
|
405
|
-
)
|
|
406
|
-
|
|
407
|
-
@app.websocket("/ws/intelligence/predictions")
|
|
408
|
-
async def websocket_predictions(websocket: WebSocket) -> None:
|
|
409
|
-
"""WebSocket endpoint for quality predictions."""
|
|
410
|
-
await _handle_predictions_websocket(websocket, ws_manager, intelligence_service)
|
|
411
|
-
|
|
412
|
-
@app.websocket("/ws/intelligence/patterns")
|
|
413
|
-
async def websocket_pattern_analysis(websocket: WebSocket) -> None:
|
|
414
|
-
"""WebSocket endpoint for pattern recognition and correlation analysis."""
|
|
415
|
-
await _handle_pattern_analysis_websocket(
|
|
416
|
-
websocket, ws_manager, intelligence_service
|
|
417
|
-
)
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
async def _handle_anomaly_detection_websocket(
|
|
421
|
-
websocket: WebSocket,
|
|
422
|
-
ws_manager: MonitoringWebSocketManager,
|
|
423
|
-
intelligence_service: QualityIntelligenceService,
|
|
424
|
-
) -> None:
|
|
425
|
-
"""Handle anomaly detection WebSocket connection."""
|
|
426
|
-
client_id = f"anomalies_{datetime.now().timestamp()}"
|
|
427
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
428
|
-
|
|
429
|
-
try:
|
|
430
|
-
# Send initial anomaly analysis
|
|
431
|
-
anomalies = intelligence_service.detect_anomalies(days=7)
|
|
432
|
-
await websocket.send_text(
|
|
433
|
-
json.dumps(
|
|
434
|
-
{
|
|
435
|
-
"type": "anomalies_initial",
|
|
436
|
-
"data": [anomaly.to_dict() for anomaly in anomalies],
|
|
437
|
-
"timestamp": datetime.now().isoformat(),
|
|
438
|
-
}
|
|
439
|
-
)
|
|
440
|
-
)
|
|
441
|
-
|
|
442
|
-
# Stream ongoing anomaly detection
|
|
443
|
-
while True:
|
|
444
|
-
try:
|
|
445
|
-
message = await asyncio.wait_for(websocket.receive_text(), timeout=60.0)
|
|
446
|
-
data = json.loads(message)
|
|
447
|
-
|
|
448
|
-
if data.get("type") == "request_analysis":
|
|
449
|
-
await _handle_anomaly_request(websocket, intelligence_service, data)
|
|
450
|
-
|
|
451
|
-
except TimeoutError:
|
|
452
|
-
await _send_periodic_anomaly_check(websocket, intelligence_service)
|
|
453
|
-
|
|
454
|
-
except WebSocketDisconnect:
|
|
455
|
-
ws_manager.disconnect(websocket, client_id)
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
async def _handle_anomaly_request(
|
|
459
|
-
websocket: WebSocket,
|
|
460
|
-
intelligence_service: QualityIntelligenceService,
|
|
461
|
-
data: dict[str, t.Any],
|
|
462
|
-
) -> None:
|
|
463
|
-
"""Handle anomaly analysis request."""
|
|
464
|
-
days = data.get("days", 7)
|
|
465
|
-
metrics_filter = data.get("metrics")
|
|
466
|
-
|
|
467
|
-
anomalies = intelligence_service.detect_anomalies(days=days, metrics=metrics_filter)
|
|
468
|
-
await websocket.send_text(
|
|
469
|
-
json.dumps(
|
|
470
|
-
{
|
|
471
|
-
"type": "anomalies_update",
|
|
472
|
-
"data": [anomaly.to_dict() for anomaly in anomalies],
|
|
473
|
-
"timestamp": datetime.now().isoformat(),
|
|
474
|
-
}
|
|
475
|
-
)
|
|
476
|
-
)
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
async def _send_periodic_anomaly_check(
|
|
480
|
-
websocket: WebSocket, intelligence_service: QualityIntelligenceService
|
|
481
|
-
) -> None:
|
|
482
|
-
"""Send periodic anomaly check."""
|
|
483
|
-
anomalies = intelligence_service.detect_anomalies(days=1)
|
|
484
|
-
if anomalies:
|
|
485
|
-
await websocket.send_text(
|
|
486
|
-
json.dumps(
|
|
487
|
-
{
|
|
488
|
-
"type": "anomalies_alert",
|
|
489
|
-
"data": [anomaly.to_dict() for anomaly in anomalies],
|
|
490
|
-
"timestamp": datetime.now().isoformat(),
|
|
491
|
-
}
|
|
492
|
-
)
|
|
493
|
-
)
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
async def _handle_predictions_websocket(
|
|
497
|
-
websocket: WebSocket,
|
|
498
|
-
ws_manager: MonitoringWebSocketManager,
|
|
499
|
-
intelligence_service: QualityIntelligenceService,
|
|
500
|
-
) -> None:
|
|
501
|
-
"""Handle predictions WebSocket connection."""
|
|
502
|
-
client_id = f"predictions_{datetime.now().timestamp()}"
|
|
503
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
504
|
-
|
|
505
|
-
try:
|
|
506
|
-
# Send initial predictions
|
|
507
|
-
insights = intelligence_service.generate_comprehensive_insights(days=30)
|
|
508
|
-
await websocket.send_text(
|
|
509
|
-
json.dumps(
|
|
510
|
-
{
|
|
511
|
-
"type": "predictions_initial",
|
|
512
|
-
"data": insights.to_dict(),
|
|
513
|
-
"timestamp": datetime.now().isoformat(),
|
|
514
|
-
}
|
|
515
|
-
)
|
|
516
|
-
)
|
|
517
|
-
|
|
518
|
-
# Stream prediction updates
|
|
519
|
-
while True:
|
|
520
|
-
try:
|
|
521
|
-
message = await asyncio.wait_for(
|
|
522
|
-
websocket.receive_text(), timeout=300.0
|
|
523
|
-
)
|
|
524
|
-
data = json.loads(message)
|
|
525
|
-
|
|
526
|
-
if data.get("type") == "request_predictions":
|
|
527
|
-
await _handle_prediction_request(
|
|
528
|
-
websocket, intelligence_service, data
|
|
529
|
-
)
|
|
530
|
-
|
|
531
|
-
except TimeoutError:
|
|
532
|
-
await _send_periodic_prediction_update(websocket, intelligence_service)
|
|
533
|
-
|
|
534
|
-
except WebSocketDisconnect:
|
|
535
|
-
ws_manager.disconnect(websocket, client_id)
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
async def _handle_prediction_request(
|
|
539
|
-
websocket: WebSocket,
|
|
540
|
-
intelligence_service: QualityIntelligenceService,
|
|
541
|
-
data: dict[str, t.Any],
|
|
542
|
-
) -> None:
|
|
543
|
-
"""Handle prediction request."""
|
|
544
|
-
days = data.get("days", 30)
|
|
545
|
-
horizon = data.get("horizon", 7)
|
|
546
|
-
|
|
547
|
-
insights = intelligence_service.generate_comprehensive_insights(days=days)
|
|
548
|
-
|
|
549
|
-
# Generate specific predictions for requested horizon
|
|
550
|
-
predictions = {}
|
|
551
|
-
all_predictions = intelligence_service.generate_advanced_predictions(
|
|
552
|
-
horizon_days=horizon
|
|
553
|
-
)
|
|
554
|
-
for metric in ("quality_score", "test_coverage", "hook_duration"):
|
|
555
|
-
# Find the prediction for this specific metric
|
|
556
|
-
pred = next((p for p in all_predictions if p.metric_name == metric), None)
|
|
557
|
-
if pred:
|
|
558
|
-
predictions[metric] = pred.to_dict()
|
|
559
|
-
|
|
560
|
-
await websocket.send_text(
|
|
561
|
-
json.dumps(
|
|
562
|
-
{
|
|
563
|
-
"type": "predictions_update",
|
|
564
|
-
"data": {
|
|
565
|
-
"insights": insights.to_dict(),
|
|
566
|
-
"predictions": predictions,
|
|
567
|
-
},
|
|
568
|
-
"timestamp": datetime.now().isoformat(),
|
|
569
|
-
}
|
|
570
|
-
)
|
|
571
|
-
)
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
async def _send_periodic_prediction_update(
|
|
575
|
-
websocket: WebSocket, intelligence_service: QualityIntelligenceService
|
|
576
|
-
) -> None:
|
|
577
|
-
"""Send periodic predictions update."""
|
|
578
|
-
insights = intelligence_service.generate_comprehensive_insights(days=7)
|
|
579
|
-
await websocket.send_text(
|
|
580
|
-
json.dumps(
|
|
581
|
-
{
|
|
582
|
-
"type": "predictions_periodic",
|
|
583
|
-
"data": insights.to_dict(),
|
|
584
|
-
"timestamp": datetime.now().isoformat(),
|
|
585
|
-
}
|
|
586
|
-
)
|
|
587
|
-
)
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
async def _handle_pattern_analysis_websocket(
|
|
591
|
-
websocket: WebSocket,
|
|
592
|
-
ws_manager: MonitoringWebSocketManager,
|
|
593
|
-
intelligence_service: QualityIntelligenceService,
|
|
594
|
-
) -> None:
|
|
595
|
-
"""Handle pattern analysis WebSocket connection."""
|
|
596
|
-
client_id = f"patterns_{datetime.now().timestamp()}"
|
|
597
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
598
|
-
|
|
599
|
-
try:
|
|
600
|
-
# Send initial pattern analysis
|
|
601
|
-
patterns = intelligence_service.identify_patterns(days=30)
|
|
602
|
-
await websocket.send_text(
|
|
603
|
-
json.dumps(
|
|
604
|
-
{
|
|
605
|
-
"type": "patterns_initial",
|
|
606
|
-
"data": patterns,
|
|
607
|
-
"timestamp": datetime.now().isoformat(),
|
|
608
|
-
}
|
|
609
|
-
)
|
|
610
|
-
)
|
|
611
|
-
|
|
612
|
-
# Stream pattern updates
|
|
613
|
-
while True:
|
|
614
|
-
try:
|
|
615
|
-
message = await asyncio.wait_for(
|
|
616
|
-
websocket.receive_text(), timeout=180.0
|
|
617
|
-
)
|
|
618
|
-
data = json.loads(message)
|
|
619
|
-
|
|
620
|
-
if data.get("type") == "request_patterns":
|
|
621
|
-
await _handle_pattern_request(websocket, intelligence_service, data)
|
|
622
|
-
|
|
623
|
-
except TimeoutError:
|
|
624
|
-
await _send_periodic_pattern_update(websocket, intelligence_service)
|
|
625
|
-
|
|
626
|
-
except WebSocketDisconnect:
|
|
627
|
-
ws_manager.disconnect(websocket, client_id)
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
async def _handle_pattern_request(
|
|
631
|
-
websocket: WebSocket,
|
|
632
|
-
intelligence_service: QualityIntelligenceService,
|
|
633
|
-
data: dict[str, t.Any],
|
|
634
|
-
) -> None:
|
|
635
|
-
"""Handle pattern analysis request."""
|
|
636
|
-
days = data.get("days", 30)
|
|
637
|
-
patterns = intelligence_service.identify_patterns(days=days)
|
|
638
|
-
|
|
639
|
-
await websocket.send_text(
|
|
640
|
-
json.dumps(
|
|
641
|
-
{
|
|
642
|
-
"type": "patterns_update",
|
|
643
|
-
"data": patterns,
|
|
644
|
-
"timestamp": datetime.now().isoformat(),
|
|
645
|
-
}
|
|
646
|
-
)
|
|
647
|
-
)
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
async def _send_periodic_pattern_update(
|
|
651
|
-
websocket: WebSocket, intelligence_service: QualityIntelligenceService
|
|
652
|
-
) -> None:
|
|
653
|
-
"""Send periodic pattern analysis update."""
|
|
654
|
-
patterns = intelligence_service.identify_patterns(days=7)
|
|
655
|
-
await websocket.send_text(
|
|
656
|
-
json.dumps(
|
|
657
|
-
{
|
|
658
|
-
"type": "patterns_periodic",
|
|
659
|
-
"data": patterns,
|
|
660
|
-
"timestamp": datetime.now().isoformat(),
|
|
661
|
-
}
|
|
662
|
-
)
|
|
663
|
-
)
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
def _register_dependency_websockets(
|
|
667
|
-
app: FastAPI,
|
|
668
|
-
ws_manager: MonitoringWebSocketManager,
|
|
669
|
-
services: dict[str, t.Any],
|
|
670
|
-
) -> None:
|
|
671
|
-
"""Register dependency-related WebSocket endpoints."""
|
|
672
|
-
dependency_analyzer = services["dependency_analyzer"]
|
|
673
|
-
|
|
674
|
-
@app.websocket("/ws/dependencies/graph")
|
|
675
|
-
async def websocket_dependency_graph(websocket: WebSocket) -> None:
|
|
676
|
-
"""WebSocket endpoint for dependency graph data."""
|
|
677
|
-
await _handle_dependency_graph_websocket(
|
|
678
|
-
websocket, ws_manager, dependency_analyzer
|
|
679
|
-
)
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
async def _handle_dependency_graph_websocket(
|
|
683
|
-
websocket: WebSocket,
|
|
684
|
-
ws_manager: MonitoringWebSocketManager,
|
|
685
|
-
dependency_analyzer: DependencyAnalyzer,
|
|
686
|
-
) -> None:
|
|
687
|
-
"""Handle dependency graph WebSocket connection."""
|
|
688
|
-
client_id = f"dependencies_{datetime.now().timestamp()}"
|
|
689
|
-
await ws_manager.connect_metrics(websocket, client_id)
|
|
690
|
-
|
|
691
|
-
try:
|
|
692
|
-
# Send initial message
|
|
693
|
-
await websocket.send_text(
|
|
694
|
-
json.dumps(
|
|
695
|
-
{
|
|
696
|
-
"type": "analysis_started",
|
|
697
|
-
"message": "Starting dependency analysis...",
|
|
698
|
-
"timestamp": datetime.now().isoformat(),
|
|
699
|
-
}
|
|
700
|
-
)
|
|
701
|
-
)
|
|
702
|
-
|
|
703
|
-
# Generate dependency graph
|
|
704
|
-
graph = dependency_analyzer.analyze_project()
|
|
705
|
-
|
|
706
|
-
# Send the complete graph data
|
|
707
|
-
await websocket.send_text(
|
|
708
|
-
json.dumps(
|
|
709
|
-
{
|
|
710
|
-
"type": "graph_data",
|
|
711
|
-
"data": graph.to_dict(),
|
|
712
|
-
"timestamp": datetime.now().isoformat(),
|
|
713
|
-
}
|
|
714
|
-
)
|
|
715
|
-
)
|
|
716
|
-
|
|
717
|
-
# Listen for client requests
|
|
718
|
-
while True:
|
|
719
|
-
try:
|
|
720
|
-
message = await asyncio.wait_for(websocket.receive_text(), timeout=30.0)
|
|
721
|
-
data = json.loads(message)
|
|
722
|
-
|
|
723
|
-
await _handle_dependency_request(
|
|
724
|
-
websocket, dependency_analyzer, graph, data
|
|
725
|
-
)
|
|
726
|
-
|
|
727
|
-
except TimeoutError:
|
|
728
|
-
await websocket.send_text(
|
|
729
|
-
json.dumps(
|
|
730
|
-
{
|
|
731
|
-
"type": "keepalive",
|
|
732
|
-
"timestamp": datetime.now().isoformat(),
|
|
733
|
-
}
|
|
734
|
-
)
|
|
735
|
-
)
|
|
736
|
-
|
|
737
|
-
except WebSocketDisconnect:
|
|
738
|
-
ws_manager.disconnect(websocket, client_id)
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
async def _handle_dependency_request(
|
|
742
|
-
websocket: WebSocket,
|
|
743
|
-
dependency_analyzer: DependencyAnalyzer,
|
|
744
|
-
graph: DependencyGraph,
|
|
745
|
-
data: dict[str, t.Any],
|
|
746
|
-
) -> None:
|
|
747
|
-
"""Handle dependency graph request."""
|
|
748
|
-
if data.get("type") == "filter_request":
|
|
749
|
-
filtered_graph = await _apply_graph_filters(graph, data.get("filters", {}))
|
|
750
|
-
await websocket.send_text(
|
|
751
|
-
json.dumps(
|
|
752
|
-
{
|
|
753
|
-
"type": "filtered_graph",
|
|
754
|
-
"data": filtered_graph.to_dict(),
|
|
755
|
-
"timestamp": datetime.now().isoformat(),
|
|
756
|
-
}
|
|
757
|
-
)
|
|
758
|
-
)
|
|
759
|
-
|
|
760
|
-
elif data.get("type") == "refresh_request":
|
|
761
|
-
fresh_graph = dependency_analyzer.analyze_project()
|
|
762
|
-
await websocket.send_text(
|
|
763
|
-
json.dumps(
|
|
764
|
-
{
|
|
765
|
-
"type": "graph_data",
|
|
766
|
-
"data": fresh_graph.to_dict(),
|
|
767
|
-
"timestamp": datetime.now().isoformat(),
|
|
768
|
-
}
|
|
769
|
-
)
|
|
770
|
-
)
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
def _register_rest_api_endpoints(
|
|
774
|
-
app: FastAPI, job_manager: JobManager, services: dict[str, t.Any]
|
|
775
|
-
) -> None:
|
|
776
|
-
"""Register all REST API endpoints."""
|
|
777
|
-
_register_metrics_api_endpoints(app, job_manager, services)
|
|
778
|
-
_register_intelligence_api_endpoints(app, services)
|
|
779
|
-
_register_dependency_api_endpoints(app, services)
|
|
780
|
-
_register_heatmap_api_endpoints(app, services)
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
def _register_metrics_api_endpoints(
|
|
784
|
-
app: FastAPI, job_manager: JobManager, services: dict[str, t.Any]
|
|
785
|
-
) -> None:
|
|
786
|
-
"""Register metrics-related REST API endpoints."""
|
|
787
|
-
quality_service = services["quality_service"]
|
|
788
|
-
|
|
789
|
-
@app.get("/api/metrics/summary")
|
|
790
|
-
async def get_metrics_summary() -> None:
|
|
791
|
-
"""Get current system summary."""
|
|
792
|
-
try:
|
|
793
|
-
current_metrics = await get_current_metrics(quality_service, job_manager)
|
|
794
|
-
return JSONResponse(
|
|
795
|
-
{
|
|
796
|
-
"status": "success",
|
|
797
|
-
"data": current_metrics.to_dict(),
|
|
798
|
-
"timestamp": datetime.now().isoformat(),
|
|
799
|
-
}
|
|
800
|
-
)
|
|
801
|
-
except Exception as e:
|
|
802
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
803
|
-
|
|
804
|
-
@app.get("/api/trends/quality")
|
|
805
|
-
async def get_quality_trends(days: int = 30) -> None:
|
|
806
|
-
"""Get quality trend analysis."""
|
|
807
|
-
return await _handle_quality_trends_request(quality_service, days)
|
|
808
|
-
|
|
809
|
-
@app.get("/api/alerts/configure")
|
|
810
|
-
async def get_alert_configuration() -> None:
|
|
811
|
-
"""Get current alert configuration."""
|
|
812
|
-
return await _handle_get_alert_configuration(quality_service)
|
|
813
|
-
|
|
814
|
-
@app.post("/api/alerts/configure")
|
|
815
|
-
async def update_alert_configuration(config: dict) -> None:
|
|
816
|
-
"""Update alert configuration."""
|
|
817
|
-
return await _handle_update_alert_configuration(quality_service, config)
|
|
818
|
-
|
|
819
|
-
@app.get("/api/export/data")
|
|
820
|
-
async def export_data(days: int = 30, format: str = "json") -> None:
|
|
821
|
-
"""Export historical data for external analysis."""
|
|
822
|
-
return await _handle_export_data_request(quality_service, days, format)
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
async def _handle_quality_trends_request(
|
|
826
|
-
quality_service: EnhancedQualityBaselineService, days: int
|
|
827
|
-
) -> JSONResponse:
|
|
828
|
-
"""Handle quality trends API request."""
|
|
829
|
-
try:
|
|
830
|
-
if days > 365:
|
|
831
|
-
raise HTTPException(status_code=400, detail="Days parameter too large")
|
|
832
|
-
|
|
833
|
-
trends = quality_service.analyze_quality_trend(days=days)
|
|
834
|
-
return JSONResponse(
|
|
835
|
-
{
|
|
836
|
-
"status": "success",
|
|
837
|
-
"data": trends.to_dict(),
|
|
838
|
-
"timestamp": datetime.now().isoformat(),
|
|
839
|
-
}
|
|
840
|
-
)
|
|
841
|
-
except Exception as e:
|
|
842
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
async def _handle_get_alert_configuration(
|
|
846
|
-
quality_service: EnhancedQualityBaselineService,
|
|
847
|
-
) -> JSONResponse:
|
|
848
|
-
"""Handle get alert configuration API request."""
|
|
849
|
-
try:
|
|
850
|
-
config = quality_service.get_alert_thresholds()
|
|
851
|
-
return JSONResponse(
|
|
852
|
-
{
|
|
853
|
-
"status": "success",
|
|
854
|
-
"data": config,
|
|
855
|
-
"timestamp": datetime.now().isoformat(),
|
|
856
|
-
}
|
|
857
|
-
)
|
|
858
|
-
except Exception as e:
|
|
859
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
async def _handle_update_alert_configuration(
|
|
863
|
-
quality_service: EnhancedQualityBaselineService, config: dict
|
|
864
|
-
) -> JSONResponse:
|
|
865
|
-
"""Handle update alert configuration API request."""
|
|
866
|
-
try:
|
|
867
|
-
# Update individual thresholds
|
|
868
|
-
for metric, threshold in config.items():
|
|
869
|
-
quality_service.set_alert_threshold(metric, threshold)
|
|
870
|
-
return JSONResponse(
|
|
871
|
-
{
|
|
872
|
-
"status": "success",
|
|
873
|
-
"message": "Alert configuration updated",
|
|
874
|
-
"timestamp": datetime.now().isoformat(),
|
|
875
|
-
}
|
|
876
|
-
)
|
|
877
|
-
except Exception as e:
|
|
878
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
async def _handle_export_data_request(
|
|
882
|
-
quality_service: EnhancedQualityBaselineService, days: int, format_type: str
|
|
883
|
-
) -> JSONResponse | t.Any:
|
|
884
|
-
"""Handle export data API request."""
|
|
885
|
-
try:
|
|
886
|
-
if days > 365:
|
|
887
|
-
raise HTTPException(status_code=400, detail="Days parameter too large")
|
|
888
|
-
|
|
889
|
-
if format_type not in ("json", "csv"):
|
|
890
|
-
raise HTTPException(
|
|
891
|
-
status_code=400, detail="Format must be 'json' or 'csv'"
|
|
892
|
-
)
|
|
893
|
-
|
|
894
|
-
historical_baselines = quality_service.get_recent_baselines(limit=days)
|
|
895
|
-
|
|
896
|
-
if format_type == "csv":
|
|
897
|
-
return _export_csv_data(historical_baselines, days)
|
|
898
|
-
else:
|
|
899
|
-
data = [baseline.to_dict() for baseline in historical_baselines]
|
|
900
|
-
return JSONResponse(
|
|
901
|
-
{
|
|
902
|
-
"status": "success",
|
|
903
|
-
"data": data,
|
|
904
|
-
"timestamp": datetime.now().isoformat(),
|
|
905
|
-
}
|
|
906
|
-
)
|
|
907
|
-
except Exception as e:
|
|
908
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
def _export_csv_data(historical_baselines: list[t.Any], days: int) -> t.Any:
|
|
912
|
-
"""Export data in CSV format."""
|
|
913
|
-
import csv
|
|
914
|
-
from io import StringIO
|
|
915
|
-
|
|
916
|
-
from fastapi.responses import Response
|
|
917
|
-
|
|
918
|
-
output = StringIO()
|
|
919
|
-
writer = csv.writer(output)
|
|
920
|
-
|
|
921
|
-
# Write header
|
|
922
|
-
writer.writerow(
|
|
923
|
-
[
|
|
924
|
-
"timestamp",
|
|
925
|
-
"git_hash",
|
|
926
|
-
"quality_score",
|
|
927
|
-
"coverage_percent",
|
|
928
|
-
"test_count",
|
|
929
|
-
"test_pass_rate",
|
|
930
|
-
"hook_failures",
|
|
931
|
-
"complexity_violations",
|
|
932
|
-
"security_issues",
|
|
933
|
-
"type_errors",
|
|
934
|
-
"linting_issues",
|
|
935
|
-
]
|
|
936
|
-
)
|
|
937
|
-
|
|
938
|
-
# Write data
|
|
939
|
-
for baseline in historical_baselines:
|
|
940
|
-
writer.writerow(
|
|
941
|
-
[
|
|
942
|
-
baseline.timestamp.isoformat(),
|
|
943
|
-
baseline.git_hash,
|
|
944
|
-
baseline.quality_score,
|
|
945
|
-
baseline.coverage_percent,
|
|
946
|
-
baseline.test_count,
|
|
947
|
-
baseline.test_pass_rate,
|
|
948
|
-
baseline.hook_failures,
|
|
949
|
-
baseline.complexity_violations,
|
|
950
|
-
baseline.security_issues,
|
|
951
|
-
baseline.type_errors,
|
|
952
|
-
baseline.linting_issues,
|
|
953
|
-
]
|
|
954
|
-
)
|
|
955
|
-
|
|
956
|
-
return Response(
|
|
957
|
-
content=output.getvalue(),
|
|
958
|
-
media_type="text/csv",
|
|
959
|
-
headers={
|
|
960
|
-
"Content-Disposition": (
|
|
961
|
-
f"attachment; filename=crackerjack_metrics_{days}d.csv"
|
|
962
|
-
)
|
|
963
|
-
},
|
|
964
|
-
)
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
def _register_intelligence_api_endpoints(
|
|
968
|
-
app: FastAPI, services: dict[str, t.Any]
|
|
969
|
-
) -> None:
|
|
970
|
-
"""Register intelligence-related REST API endpoints."""
|
|
971
|
-
intelligence_service = services["intelligence_service"]
|
|
972
|
-
|
|
973
|
-
@app.get("/api/intelligence/anomalies")
|
|
974
|
-
async def get_anomalies(days: int = 7, metrics: str = None) -> None:
|
|
975
|
-
"""Get anomaly detection results."""
|
|
976
|
-
return await _handle_anomalies_request(intelligence_service, days, metrics)
|
|
977
|
-
|
|
978
|
-
@app.get("/api/intelligence/predictions/{metric}")
|
|
979
|
-
async def get_metric_prediction(metric: str, horizon_days: int = 7) -> None:
|
|
980
|
-
"""Get prediction for a specific metric."""
|
|
981
|
-
return await _handle_metric_prediction_request(
|
|
982
|
-
intelligence_service, metric, horizon_days
|
|
983
|
-
)
|
|
984
|
-
|
|
985
|
-
@app.get("/api/intelligence/insights")
|
|
986
|
-
async def get_quality_insights(days: int = 30) -> None:
|
|
987
|
-
"""Get comprehensive quality insights."""
|
|
988
|
-
return await _handle_quality_insights_request(intelligence_service, days)
|
|
989
|
-
|
|
990
|
-
@app.get("/api/intelligence/patterns")
|
|
991
|
-
async def get_pattern_analysis(days: int = 30) -> None:
|
|
992
|
-
"""Get pattern recognition analysis."""
|
|
993
|
-
return await _handle_pattern_analysis_request(intelligence_service, days)
|
|
994
|
-
|
|
995
|
-
@app.post("/api/intelligence/analyze")
|
|
996
|
-
async def run_comprehensive_analysis(request: dict) -> None:
|
|
997
|
-
"""Run comprehensive intelligence analysis."""
|
|
998
|
-
return await _handle_comprehensive_analysis_request(
|
|
999
|
-
intelligence_service, request
|
|
1000
|
-
)
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
async def _handle_anomalies_request(
|
|
1004
|
-
intelligence_service: QualityIntelligenceService, days: int, metrics: str | None
|
|
1005
|
-
) -> JSONResponse:
|
|
1006
|
-
"""Handle anomalies API request."""
|
|
1007
|
-
try:
|
|
1008
|
-
if days > 365:
|
|
1009
|
-
raise HTTPException(status_code=400, detail="Days parameter too large")
|
|
1010
|
-
|
|
1011
|
-
metrics_list = metrics.split(",") if metrics else None
|
|
1012
|
-
anomalies = intelligence_service.detect_anomalies(
|
|
1013
|
-
days=days, metrics=metrics_list
|
|
1014
|
-
)
|
|
1015
|
-
|
|
1016
|
-
return JSONResponse(
|
|
1017
|
-
{
|
|
1018
|
-
"status": "success",
|
|
1019
|
-
"data": [anomaly.to_dict() for anomaly in anomalies],
|
|
1020
|
-
"timestamp": datetime.now().isoformat(),
|
|
1021
|
-
}
|
|
1022
|
-
)
|
|
1023
|
-
except Exception as e:
|
|
1024
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
async def _handle_metric_prediction_request(
|
|
1028
|
-
intelligence_service: QualityIntelligenceService, metric: str, horizon_days: int
|
|
1029
|
-
) -> JSONResponse:
|
|
1030
|
-
"""Handle metric prediction API request."""
|
|
1031
|
-
try:
|
|
1032
|
-
if horizon_days > 30:
|
|
1033
|
-
raise HTTPException(status_code=400, detail="Horizon too far in the future")
|
|
1034
|
-
|
|
1035
|
-
all_predictions = intelligence_service.generate_advanced_predictions(
|
|
1036
|
-
horizon_days
|
|
1037
|
-
)
|
|
1038
|
-
prediction = next((p for p in all_predictions if p.metric_name == metric), None)
|
|
1039
|
-
if not prediction:
|
|
1040
|
-
raise HTTPException(status_code=404, detail="Prediction not available")
|
|
1041
|
-
|
|
1042
|
-
return JSONResponse(
|
|
1043
|
-
{
|
|
1044
|
-
"status": "success",
|
|
1045
|
-
"data": prediction.to_dict(),
|
|
1046
|
-
"timestamp": datetime.now().isoformat(),
|
|
1047
|
-
}
|
|
1048
|
-
)
|
|
1049
|
-
except Exception as e:
|
|
1050
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
async def _handle_quality_insights_request(
|
|
1054
|
-
intelligence_service: QualityIntelligenceService, days: int
|
|
1055
|
-
) -> JSONResponse:
|
|
1056
|
-
"""Handle quality insights API request."""
|
|
1057
|
-
try:
|
|
1058
|
-
if days > 365:
|
|
1059
|
-
raise HTTPException(status_code=400, detail="Days parameter too large")
|
|
1060
|
-
|
|
1061
|
-
insights = intelligence_service.generate_comprehensive_insights(days=days)
|
|
1062
|
-
|
|
1063
|
-
return JSONResponse(
|
|
1064
|
-
{
|
|
1065
|
-
"status": "success",
|
|
1066
|
-
"data": insights.to_dict(),
|
|
1067
|
-
"timestamp": datetime.now().isoformat(),
|
|
1068
|
-
}
|
|
1069
|
-
)
|
|
1070
|
-
except Exception as e:
|
|
1071
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
async def _handle_pattern_analysis_request(
|
|
1075
|
-
intelligence_service: QualityIntelligenceService, days: int
|
|
1076
|
-
) -> JSONResponse:
|
|
1077
|
-
"""Handle pattern analysis API request."""
|
|
1078
|
-
try:
|
|
1079
|
-
if days > 365:
|
|
1080
|
-
raise HTTPException(status_code=400, detail="Days parameter too large")
|
|
1081
|
-
|
|
1082
|
-
patterns = intelligence_service.identify_patterns(days=days)
|
|
1083
|
-
|
|
1084
|
-
return JSONResponse(
|
|
1085
|
-
{
|
|
1086
|
-
"status": "success",
|
|
1087
|
-
"data": patterns,
|
|
1088
|
-
"timestamp": datetime.now().isoformat(),
|
|
1089
|
-
}
|
|
1090
|
-
)
|
|
1091
|
-
except Exception as e:
|
|
1092
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
async def _handle_comprehensive_analysis_request(
|
|
1096
|
-
intelligence_service: QualityIntelligenceService, request: dict
|
|
1097
|
-
) -> JSONResponse:
|
|
1098
|
-
"""Handle comprehensive analysis API request."""
|
|
1099
|
-
try:
|
|
1100
|
-
days = request.get("days", 30)
|
|
1101
|
-
|
|
1102
|
-
if days > 365:
|
|
1103
|
-
raise HTTPException(status_code=400, detail="Days parameter too large")
|
|
1104
|
-
|
|
1105
|
-
results = await _build_comprehensive_analysis_results(
|
|
1106
|
-
intelligence_service, request, days
|
|
1107
|
-
)
|
|
1108
|
-
|
|
1109
|
-
return JSONResponse(
|
|
1110
|
-
{
|
|
1111
|
-
"status": "success",
|
|
1112
|
-
"data": results,
|
|
1113
|
-
"timestamp": datetime.now().isoformat(),
|
|
1114
|
-
}
|
|
1115
|
-
)
|
|
1116
|
-
except Exception as e:
|
|
1117
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
async def _build_comprehensive_analysis_results(
|
|
1121
|
-
intelligence_service: QualityIntelligenceService, request: dict, days: int
|
|
1122
|
-
) -> dict[str, t.Any]:
|
|
1123
|
-
"""Build comprehensive analysis results based on request parameters."""
|
|
1124
|
-
results = {}
|
|
1125
|
-
|
|
1126
|
-
if request.get("include_anomalies", True):
|
|
1127
|
-
results["anomalies"] = [
|
|
1128
|
-
anomaly.to_dict()
|
|
1129
|
-
for anomaly in intelligence_service.detect_anomalies(days=days)
|
|
1130
|
-
]
|
|
1131
|
-
|
|
1132
|
-
if request.get("include_predictions", True):
|
|
1133
|
-
insights = intelligence_service.generate_comprehensive_insights(days=days)
|
|
1134
|
-
results["insights"] = insights.to_dict()
|
|
1135
|
-
|
|
1136
|
-
# Generate specific predictions
|
|
1137
|
-
predictions = {}
|
|
1138
|
-
for metric in ("quality_score", "test_coverage", "hook_duration"):
|
|
1139
|
-
pred = intelligence_service.generate_advanced_predictions(
|
|
1140
|
-
metric, horizon_days=7
|
|
1141
|
-
)
|
|
1142
|
-
if pred:
|
|
1143
|
-
predictions[metric] = pred.to_dict()
|
|
1144
|
-
results["predictions"] = predictions
|
|
1145
|
-
|
|
1146
|
-
if request.get("include_patterns", True):
|
|
1147
|
-
results["patterns"] = intelligence_service.identify_patterns(days=days)
|
|
1148
|
-
|
|
1149
|
-
return results
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
def _register_dependency_api_endpoints(
|
|
1153
|
-
app: FastAPI, services: dict[str, t.Any]
|
|
1154
|
-
) -> None:
|
|
1155
|
-
"""Register dependency-related REST API endpoints."""
|
|
1156
|
-
dependency_analyzer = services["dependency_analyzer"]
|
|
1157
|
-
|
|
1158
|
-
@app.get("/api/dependencies/graph")
|
|
1159
|
-
async def get_dependency_graph(
|
|
1160
|
-
filter_type: str = None,
|
|
1161
|
-
max_nodes: int = 1000,
|
|
1162
|
-
include_external: bool = False,
|
|
1163
|
-
) -> None:
|
|
1164
|
-
"""Get dependency graph data."""
|
|
1165
|
-
return await _handle_dependency_graph_request(
|
|
1166
|
-
dependency_analyzer, filter_type, max_nodes, include_external
|
|
1167
|
-
)
|
|
1168
|
-
|
|
1169
|
-
@app.get("/api/dependencies/metrics")
|
|
1170
|
-
async def get_dependency_metrics() -> None:
|
|
1171
|
-
"""Get dependency graph metrics."""
|
|
1172
|
-
return await _handle_dependency_metrics_request(dependency_analyzer)
|
|
1173
|
-
|
|
1174
|
-
@app.get("/api/dependencies/clusters")
|
|
1175
|
-
async def get_dependency_clusters() -> None:
|
|
1176
|
-
"""Get dependency graph clusters."""
|
|
1177
|
-
return await _handle_dependency_clusters_request(dependency_analyzer)
|
|
1178
|
-
|
|
1179
|
-
@app.post("/api/dependencies/analyze")
|
|
1180
|
-
async def trigger_dependency_analysis(request: dict) -> None:
|
|
1181
|
-
"""Trigger fresh dependency analysis."""
|
|
1182
|
-
return await _handle_dependency_analysis_request(dependency_analyzer, request)
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
async def _handle_dependency_graph_request(
|
|
1186
|
-
dependency_analyzer: DependencyAnalyzer,
|
|
1187
|
-
filter_type: str | None,
|
|
1188
|
-
max_nodes: int,
|
|
1189
|
-
include_external: bool,
|
|
1190
|
-
) -> JSONResponse:
|
|
1191
|
-
"""Handle dependency graph API request."""
|
|
1192
|
-
try:
|
|
1193
|
-
graph = dependency_analyzer.analyze_project()
|
|
1194
|
-
|
|
1195
|
-
# Apply filters if requested
|
|
1196
|
-
if filter_type or max_nodes < len(graph.nodes):
|
|
1197
|
-
filters = {
|
|
1198
|
-
"type": filter_type,
|
|
1199
|
-
"max_nodes": max_nodes,
|
|
1200
|
-
"include_external": include_external,
|
|
1201
|
-
}
|
|
1202
|
-
graph = await _apply_graph_filters(graph, filters)
|
|
1203
|
-
|
|
1204
|
-
return JSONResponse(
|
|
1205
|
-
{
|
|
1206
|
-
"status": "success",
|
|
1207
|
-
"data": graph.to_dict(),
|
|
1208
|
-
"timestamp": datetime.now().isoformat(),
|
|
1209
|
-
}
|
|
1210
|
-
)
|
|
1211
|
-
except Exception as e:
|
|
1212
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
async def _handle_dependency_metrics_request(
|
|
1216
|
-
dependency_analyzer: DependencyAnalyzer,
|
|
1217
|
-
) -> JSONResponse:
|
|
1218
|
-
"""Handle dependency metrics API request."""
|
|
1219
|
-
try:
|
|
1220
|
-
graph = dependency_analyzer.analyze_project()
|
|
1221
|
-
|
|
1222
|
-
return JSONResponse(
|
|
1223
|
-
{
|
|
1224
|
-
"status": "success",
|
|
1225
|
-
"data": graph.metrics,
|
|
1226
|
-
"timestamp": datetime.now().isoformat(),
|
|
1227
|
-
}
|
|
1228
|
-
)
|
|
1229
|
-
except Exception as e:
|
|
1230
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
async def _handle_dependency_clusters_request(
|
|
1234
|
-
dependency_analyzer: DependencyAnalyzer,
|
|
1235
|
-
) -> JSONResponse:
|
|
1236
|
-
"""Handle dependency clusters API request."""
|
|
1237
|
-
try:
|
|
1238
|
-
graph = dependency_analyzer.analyze_project()
|
|
1239
|
-
|
|
1240
|
-
return JSONResponse(
|
|
1241
|
-
{
|
|
1242
|
-
"status": "success",
|
|
1243
|
-
"data": graph.clusters,
|
|
1244
|
-
"timestamp": datetime.now().isoformat(),
|
|
1245
|
-
}
|
|
1246
|
-
)
|
|
1247
|
-
except Exception as e:
|
|
1248
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
async def _handle_dependency_analysis_request(
|
|
1252
|
-
dependency_analyzer: DependencyAnalyzer, request: dict
|
|
1253
|
-
) -> JSONResponse:
|
|
1254
|
-
"""Handle dependency analysis trigger API request."""
|
|
1255
|
-
try:
|
|
1256
|
-
# Reset analyzer for fresh analysis
|
|
1257
|
-
dependency_analyzer.dependency_graph = DependencyGraph()
|
|
1258
|
-
graph = dependency_analyzer.analyze_project()
|
|
1259
|
-
|
|
1260
|
-
return JSONResponse(
|
|
1261
|
-
{
|
|
1262
|
-
"status": "success",
|
|
1263
|
-
"message": "Dependency analysis completed",
|
|
1264
|
-
"data": {
|
|
1265
|
-
"nodes": len(graph.nodes),
|
|
1266
|
-
"edges": len(graph.edges),
|
|
1267
|
-
"clusters": len(graph.clusters),
|
|
1268
|
-
"metrics": graph.metrics,
|
|
1269
|
-
},
|
|
1270
|
-
"timestamp": datetime.now().isoformat(),
|
|
1271
|
-
}
|
|
1272
|
-
)
|
|
1273
|
-
except Exception as e:
|
|
1274
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
def _register_heatmap_websockets(
|
|
1278
|
-
app: FastAPI,
|
|
1279
|
-
ws_manager: MonitoringWebSocketManager,
|
|
1280
|
-
services: dict[str, t.Any],
|
|
1281
|
-
) -> None:
|
|
1282
|
-
"""Register heatmap-related WebSocket endpoints."""
|
|
1283
|
-
error_analyzer = services["error_analyzer"]
|
|
1284
|
-
|
|
1285
|
-
@app.websocket("/ws/heatmap/errors")
|
|
1286
|
-
async def websocket_error_heatmap(websocket: WebSocket) -> None:
|
|
1287
|
-
"""WebSocket endpoint for real-time error heat map streaming."""
|
|
1288
|
-
await _handle_error_heatmap_websocket(websocket, error_analyzer)
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
async def _handle_error_heatmap_websocket(
|
|
1292
|
-
websocket: WebSocket, error_analyzer: ErrorPatternAnalyzer
|
|
1293
|
-
) -> None:
|
|
1294
|
-
"""Handle error heatmap WebSocket connection."""
|
|
1295
|
-
await websocket.accept()
|
|
1296
|
-
|
|
1297
|
-
try:
|
|
1298
|
-
# Analyze error patterns and send initial data
|
|
1299
|
-
error_patterns = error_analyzer.analyze_error_patterns(days=30)
|
|
1300
|
-
await _send_initial_heatmap_data(websocket, error_analyzer, error_patterns)
|
|
1301
|
-
|
|
1302
|
-
# Handle client messages
|
|
1303
|
-
while True:
|
|
1304
|
-
try:
|
|
1305
|
-
message = await asyncio.wait_for(websocket.receive_text(), timeout=30.0)
|
|
1306
|
-
data = json.loads(message)
|
|
1307
|
-
|
|
1308
|
-
await _handle_heatmap_request(websocket, error_analyzer, data)
|
|
1309
|
-
|
|
1310
|
-
except TimeoutError:
|
|
1311
|
-
await websocket.send_text(
|
|
1312
|
-
json.dumps(
|
|
1313
|
-
{
|
|
1314
|
-
"type": "heartbeat",
|
|
1315
|
-
"timestamp": datetime.now().isoformat(),
|
|
1316
|
-
}
|
|
1317
|
-
)
|
|
1318
|
-
)
|
|
1319
|
-
|
|
1320
|
-
except WebSocketDisconnect:
|
|
1321
|
-
pass
|
|
1322
|
-
except Exception as e:
|
|
1323
|
-
await websocket.send_text(
|
|
1324
|
-
json.dumps(
|
|
1325
|
-
{
|
|
1326
|
-
"type": "error",
|
|
1327
|
-
"message": str(e),
|
|
1328
|
-
"timestamp": datetime.now().isoformat(),
|
|
1329
|
-
}
|
|
1330
|
-
)
|
|
1331
|
-
)
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
async def _send_initial_heatmap_data(
|
|
1335
|
-
websocket: WebSocket,
|
|
1336
|
-
error_analyzer: ErrorPatternAnalyzer,
|
|
1337
|
-
error_patterns: list[t.Any],
|
|
1338
|
-
) -> None:
|
|
1339
|
-
"""Send initial heatmap data to client."""
|
|
1340
|
-
# Send file-based heat map
|
|
1341
|
-
file_heatmap = error_analyzer.generate_file_error_heatmap()
|
|
1342
|
-
await websocket.send_text(
|
|
1343
|
-
json.dumps(
|
|
1344
|
-
{
|
|
1345
|
-
"type": "file_heatmap",
|
|
1346
|
-
"data": file_heatmap.to_dict(),
|
|
1347
|
-
"timestamp": datetime.now().isoformat(),
|
|
1348
|
-
}
|
|
1349
|
-
)
|
|
1350
|
-
)
|
|
1351
|
-
|
|
1352
|
-
# Send temporal heat map
|
|
1353
|
-
temporal_heatmap = error_analyzer.generate_temporal_heatmap()
|
|
1354
|
-
await websocket.send_text(
|
|
1355
|
-
json.dumps(
|
|
1356
|
-
{
|
|
1357
|
-
"type": "temporal_heatmap",
|
|
1358
|
-
"data": temporal_heatmap.to_dict(),
|
|
1359
|
-
"timestamp": datetime.now().isoformat(),
|
|
1360
|
-
}
|
|
1361
|
-
)
|
|
1362
|
-
)
|
|
1363
|
-
|
|
1364
|
-
# Send function-based heat map
|
|
1365
|
-
function_heatmap = error_analyzer.generate_function_error_heatmap()
|
|
1366
|
-
await websocket.send_text(
|
|
1367
|
-
json.dumps(
|
|
1368
|
-
{
|
|
1369
|
-
"type": "function_heatmap",
|
|
1370
|
-
"data": function_heatmap.to_dict(),
|
|
1371
|
-
"timestamp": datetime.now().isoformat(),
|
|
1372
|
-
}
|
|
1373
|
-
)
|
|
1374
|
-
)
|
|
1375
|
-
|
|
1376
|
-
# Send error patterns summary
|
|
1377
|
-
patterns_data = [pattern.to_dict() for pattern in error_patterns]
|
|
1378
|
-
await websocket.send_text(
|
|
1379
|
-
json.dumps(
|
|
1380
|
-
{
|
|
1381
|
-
"type": "error_patterns",
|
|
1382
|
-
"data": patterns_data,
|
|
1383
|
-
"timestamp": datetime.now().isoformat(),
|
|
1384
|
-
}
|
|
1385
|
-
)
|
|
1386
|
-
)
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
async def _handle_heatmap_request(
|
|
1390
|
-
websocket: WebSocket,
|
|
1391
|
-
error_analyzer: ErrorPatternAnalyzer,
|
|
1392
|
-
data: dict[str, t.Any],
|
|
1393
|
-
) -> None:
|
|
1394
|
-
"""Handle heatmap request from client."""
|
|
1395
|
-
if data.get("type") == "refresh_heatmap":
|
|
1396
|
-
await _handle_heatmap_refresh(websocket, error_analyzer, data)
|
|
1397
|
-
elif data.get("type") == "keepalive":
|
|
1398
|
-
await websocket.send_text(
|
|
1399
|
-
json.dumps(
|
|
1400
|
-
{
|
|
1401
|
-
"type": "pong",
|
|
1402
|
-
"timestamp": datetime.now().isoformat(),
|
|
1403
|
-
}
|
|
1404
|
-
)
|
|
1405
|
-
)
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
async def _handle_heatmap_refresh(
|
|
1409
|
-
websocket: WebSocket,
|
|
1410
|
-
error_analyzer: ErrorPatternAnalyzer,
|
|
1411
|
-
data: dict[str, t.Any],
|
|
1412
|
-
) -> None:
|
|
1413
|
-
"""Handle heatmap refresh request."""
|
|
1414
|
-
error_analyzer.analyze_error_patterns(days=data.get("days", 30))
|
|
1415
|
-
|
|
1416
|
-
heatmap_type = data.get("heatmap_type", "file")
|
|
1417
|
-
|
|
1418
|
-
if heatmap_type == "file":
|
|
1419
|
-
heatmap = error_analyzer.generate_file_error_heatmap()
|
|
1420
|
-
elif heatmap_type == "temporal":
|
|
1421
|
-
heatmap = error_analyzer.generate_temporal_heatmap(
|
|
1422
|
-
time_buckets=data.get("time_buckets", 24)
|
|
1423
|
-
)
|
|
1424
|
-
elif heatmap_type == "function":
|
|
1425
|
-
heatmap = error_analyzer.generate_function_error_heatmap()
|
|
1426
|
-
else:
|
|
1427
|
-
return
|
|
1428
|
-
|
|
1429
|
-
await websocket.send_text(
|
|
1430
|
-
json.dumps(
|
|
1431
|
-
{
|
|
1432
|
-
"type": f"{heatmap_type}_heatmap_refresh",
|
|
1433
|
-
"data": heatmap.to_dict(),
|
|
1434
|
-
"timestamp": datetime.now().isoformat(),
|
|
1435
|
-
}
|
|
1436
|
-
)
|
|
1437
|
-
)
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
def _register_heatmap_api_endpoints(app: FastAPI, services: dict[str, t.Any]) -> None:
|
|
1441
|
-
"""Register heatmap-related REST API endpoints."""
|
|
1442
|
-
error_analyzer = services["error_analyzer"]
|
|
1443
|
-
cache = services["cache"]
|
|
1444
|
-
|
|
1445
|
-
@app.get("/api/heatmap/file_errors")
|
|
1446
|
-
async def get_file_error_heatmap() -> None:
|
|
1447
|
-
"""Get error heat map by file."""
|
|
1448
|
-
return await _handle_file_error_heatmap_request(error_analyzer)
|
|
1449
|
-
|
|
1450
|
-
@app.get("/api/heatmap/temporal_errors")
|
|
1451
|
-
async def get_temporal_error_heatmap(time_buckets: int = 24) -> None:
|
|
1452
|
-
"""Get error heat map over time."""
|
|
1453
|
-
return await _handle_temporal_error_heatmap_request(
|
|
1454
|
-
error_analyzer, time_buckets
|
|
1455
|
-
)
|
|
1456
|
-
|
|
1457
|
-
@app.get("/api/heatmap/function_errors")
|
|
1458
|
-
async def get_function_error_heatmap() -> None:
|
|
1459
|
-
"""Get error heat map by function."""
|
|
1460
|
-
return await _handle_function_error_heatmap_request(error_analyzer)
|
|
1461
|
-
|
|
1462
|
-
@app.get("/api/error_patterns")
|
|
1463
|
-
async def get_error_patterns(
|
|
1464
|
-
days: int = 30, min_occurrences: int = 2, severity: str | None = None
|
|
1465
|
-
) -> None:
|
|
1466
|
-
"""Get analyzed error patterns."""
|
|
1467
|
-
return await _handle_error_patterns_request(
|
|
1468
|
-
error_analyzer, days, min_occurrences, severity
|
|
1469
|
-
)
|
|
1470
|
-
|
|
1471
|
-
@app.post("/api/trigger_error_analysis")
|
|
1472
|
-
async def trigger_error_analysis(request: dict) -> None:
|
|
1473
|
-
"""Trigger fresh error pattern analysis."""
|
|
1474
|
-
return await _handle_trigger_error_analysis_request(
|
|
1475
|
-
error_analyzer, cache, request
|
|
1476
|
-
)
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
async def _handle_file_error_heatmap_request(
|
|
1480
|
-
error_analyzer: ErrorPatternAnalyzer,
|
|
1481
|
-
) -> JSONResponse:
|
|
1482
|
-
"""Handle file error heatmap API request."""
|
|
1483
|
-
try:
|
|
1484
|
-
error_analyzer.analyze_error_patterns(days=30)
|
|
1485
|
-
heatmap = error_analyzer.generate_file_error_heatmap()
|
|
1486
|
-
return JSONResponse(heatmap.to_dict())
|
|
1487
|
-
except Exception as e:
|
|
1488
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
async def _handle_temporal_error_heatmap_request(
|
|
1492
|
-
error_analyzer: ErrorPatternAnalyzer, time_buckets: int
|
|
1493
|
-
) -> JSONResponse:
|
|
1494
|
-
"""Handle temporal error heatmap API request."""
|
|
1495
|
-
try:
|
|
1496
|
-
error_analyzer.analyze_error_patterns(days=30)
|
|
1497
|
-
heatmap = error_analyzer.generate_temporal_heatmap(time_buckets=time_buckets)
|
|
1498
|
-
return JSONResponse(heatmap.to_dict())
|
|
1499
|
-
except Exception as e:
|
|
1500
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
async def _handle_function_error_heatmap_request(
|
|
1504
|
-
error_analyzer: ErrorPatternAnalyzer,
|
|
1505
|
-
) -> JSONResponse:
|
|
1506
|
-
"""Handle function error heatmap API request."""
|
|
1507
|
-
try:
|
|
1508
|
-
error_analyzer.analyze_error_patterns(days=30)
|
|
1509
|
-
heatmap = error_analyzer.generate_function_error_heatmap()
|
|
1510
|
-
return JSONResponse(heatmap.to_dict())
|
|
1511
|
-
except Exception as e:
|
|
1512
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
async def _handle_error_patterns_request(
|
|
1516
|
-
error_analyzer: ErrorPatternAnalyzer,
|
|
1517
|
-
days: int,
|
|
1518
|
-
min_occurrences: int,
|
|
1519
|
-
severity: str | None,
|
|
1520
|
-
) -> JSONResponse:
|
|
1521
|
-
"""Handle error patterns API request."""
|
|
1522
|
-
try:
|
|
1523
|
-
patterns = error_analyzer.analyze_error_patterns(
|
|
1524
|
-
days=days, min_occurrences=min_occurrences
|
|
1525
|
-
)
|
|
1526
|
-
|
|
1527
|
-
# Filter by severity if specified
|
|
1528
|
-
if severity:
|
|
1529
|
-
patterns = [p for p in patterns if p.severity == severity]
|
|
1530
|
-
|
|
1531
|
-
return JSONResponse(
|
|
1532
|
-
{
|
|
1533
|
-
"patterns": [pattern.to_dict() for pattern in patterns],
|
|
1534
|
-
"total_count": len(patterns),
|
|
1535
|
-
"analysis_period_days": days,
|
|
1536
|
-
"generated_at": datetime.now().isoformat(),
|
|
1537
|
-
}
|
|
1538
|
-
)
|
|
1539
|
-
except Exception as e:
|
|
1540
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
async def _handle_trigger_error_analysis_request(
|
|
1544
|
-
error_analyzer: ErrorPatternAnalyzer, cache: CrackerjackCache, request: dict
|
|
1545
|
-
) -> JSONResponse:
|
|
1546
|
-
"""Handle trigger error analysis API request."""
|
|
1547
|
-
try:
|
|
1548
|
-
days = request.get("days", 30)
|
|
1549
|
-
min_occurrences = request.get("min_occurrences", 2)
|
|
1550
|
-
|
|
1551
|
-
# Perform fresh analysis
|
|
1552
|
-
patterns = error_analyzer.analyze_error_patterns(
|
|
1553
|
-
days=days, min_occurrences=min_occurrences
|
|
1554
|
-
)
|
|
1555
|
-
|
|
1556
|
-
# Store results in cache
|
|
1557
|
-
cache_key = f"error_patterns_{days}d"
|
|
1558
|
-
cache.set(cache_key, [p.to_dict() for p in patterns], ttl_seconds=1800)
|
|
1559
|
-
|
|
1560
|
-
severity_breakdown = {
|
|
1561
|
-
severity: len([p for p in patterns if p.severity == severity])
|
|
1562
|
-
for severity in ("low", "medium", "high", "critical")
|
|
1563
|
-
}
|
|
1564
|
-
|
|
1565
|
-
return JSONResponse(
|
|
1566
|
-
{
|
|
1567
|
-
"status": "success",
|
|
1568
|
-
"message": "Error pattern analysis completed",
|
|
1569
|
-
"patterns_found": len(patterns),
|
|
1570
|
-
"analysis_period_days": days,
|
|
1571
|
-
"severity_breakdown": severity_breakdown,
|
|
1572
|
-
"generated_at": datetime.now().isoformat(),
|
|
1573
|
-
}
|
|
1574
|
-
)
|
|
1575
|
-
except Exception as e:
|
|
1576
|
-
raise HTTPException(status_code=500, detail=str(e))
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
def _register_dashboard_endpoint(app: FastAPI) -> None:
|
|
1580
|
-
"""Register the dashboard HTML endpoint."""
|
|
1581
|
-
|
|
1582
|
-
@app.get("/dashboard")
|
|
1583
|
-
async def get_dashboard_html() -> None:
|
|
1584
|
-
"""Serve the monitoring dashboard HTML."""
|
|
1585
|
-
return HTMLResponse(_get_dashboard_html())
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
async def get_current_metrics(
|
|
1589
|
-
quality_service: EnhancedQualityBaselineService, job_manager: JobManager
|
|
1590
|
-
) -> UnifiedMetrics:
|
|
1591
|
-
"""Get current unified metrics."""
|
|
1592
|
-
try:
|
|
1593
|
-
# Get baseline from quality service
|
|
1594
|
-
baseline = quality_service.get_baseline()
|
|
1595
|
-
if not baseline:
|
|
1596
|
-
# Return default metrics if no baseline exists
|
|
1597
|
-
return UnifiedMetrics(
|
|
1598
|
-
timestamp=datetime.now(),
|
|
1599
|
-
quality_score=0,
|
|
1600
|
-
test_coverage=0.0,
|
|
1601
|
-
hook_duration=0.0,
|
|
1602
|
-
active_jobs=len(job_manager.active_connections),
|
|
1603
|
-
error_count=0,
|
|
1604
|
-
trend_direction=TrendDirection.STABLE,
|
|
1605
|
-
predictions={},
|
|
1606
|
-
)
|
|
1607
|
-
|
|
1608
|
-
# Create metrics dict from baseline
|
|
1609
|
-
current_metrics = {
|
|
1610
|
-
"coverage_percent": baseline.coverage_percent,
|
|
1611
|
-
"test_count": baseline.test_count,
|
|
1612
|
-
"test_pass_rate": baseline.test_pass_rate,
|
|
1613
|
-
"hook_failures": baseline.hook_failures,
|
|
1614
|
-
"complexity_violations": baseline.complexity_violations,
|
|
1615
|
-
"security_issues": baseline.security_issues,
|
|
1616
|
-
"type_errors": baseline.type_errors,
|
|
1617
|
-
"linting_issues": baseline.linting_issues,
|
|
1618
|
-
"hook_duration": 0.0, # Would need to be tracked separately
|
|
1619
|
-
}
|
|
1620
|
-
|
|
1621
|
-
return quality_service.create_unified_metrics(
|
|
1622
|
-
current_metrics, active_job_count=len(job_manager.active_connections)
|
|
1623
|
-
)
|
|
1624
|
-
except Exception:
|
|
1625
|
-
# Fallback to basic metrics if service fails
|
|
1626
|
-
return UnifiedMetrics(
|
|
1627
|
-
timestamp=datetime.now(),
|
|
1628
|
-
quality_score=0,
|
|
1629
|
-
test_coverage=0.0,
|
|
1630
|
-
hook_duration=0.0,
|
|
1631
|
-
active_jobs=len(job_manager.active_connections),
|
|
1632
|
-
error_count=0,
|
|
1633
|
-
trend_direction=TrendDirection.STABLE,
|
|
1634
|
-
predictions={},
|
|
1635
|
-
)
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
async def get_system_health_status(
|
|
1639
|
-
quality_service: EnhancedQualityBaselineService,
|
|
1640
|
-
) -> SystemHealthStatus:
|
|
1641
|
-
"""Get system health status."""
|
|
1642
|
-
return quality_service.get_system_health()
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
async def _apply_graph_filters(
|
|
1646
|
-
graph: DependencyGraph, filters: dict[str, t.Any]
|
|
1647
|
-
) -> DependencyGraph:
|
|
1648
|
-
"""Apply filters to dependency graph."""
|
|
1649
|
-
filtered_graph = DependencyGraph(
|
|
1650
|
-
generated_at=graph.generated_at,
|
|
1651
|
-
metrics=graph.metrics.copy(),
|
|
1652
|
-
clusters=graph.clusters.copy(),
|
|
1653
|
-
)
|
|
1654
|
-
|
|
1655
|
-
# Filter nodes by type
|
|
1656
|
-
filter_type = filters.get("type")
|
|
1657
|
-
max_nodes = filters.get("max_nodes", 1000)
|
|
1658
|
-
include_external = filters.get("include_external", False)
|
|
1659
|
-
|
|
1660
|
-
# Start with all nodes
|
|
1661
|
-
candidate_nodes = list(graph.nodes.values())
|
|
1662
|
-
|
|
1663
|
-
# Filter by type if specified
|
|
1664
|
-
if filter_type:
|
|
1665
|
-
candidate_nodes = [node for node in candidate_nodes if node.type == filter_type]
|
|
1666
|
-
|
|
1667
|
-
# Filter external dependencies if not included
|
|
1668
|
-
if not include_external:
|
|
1669
|
-
project_nodes = [
|
|
1670
|
-
node
|
|
1671
|
-
for node in candidate_nodes
|
|
1672
|
-
if not node.file_path or "site-packages" not in node.file_path
|
|
1673
|
-
]
|
|
1674
|
-
candidate_nodes = project_nodes
|
|
1675
|
-
|
|
1676
|
-
# Limit number of nodes
|
|
1677
|
-
if len(candidate_nodes) > max_nodes:
|
|
1678
|
-
# Prioritize by complexity and connectivity
|
|
1679
|
-
def node_priority(node: t.Any) -> int:
|
|
1680
|
-
# Count edges involving this node
|
|
1681
|
-
edge_count = sum(
|
|
1682
|
-
1 for edge in graph.edges if node.id in (edge.source, edge.target)
|
|
1683
|
-
)
|
|
1684
|
-
return int(node.complexity * edge_count)
|
|
1685
|
-
|
|
1686
|
-
candidate_nodes.sort(key=node_priority, reverse=True)
|
|
1687
|
-
candidate_nodes = candidate_nodes[:max_nodes]
|
|
1688
|
-
|
|
1689
|
-
# Add filtered nodes to graph
|
|
1690
|
-
node_ids = {node.id for node in candidate_nodes}
|
|
1691
|
-
for node in candidate_nodes:
|
|
1692
|
-
filtered_graph.nodes[node.id] = node
|
|
1693
|
-
|
|
1694
|
-
# Add edges between filtered nodes
|
|
1695
|
-
for edge in graph.edges:
|
|
1696
|
-
if edge.source in node_ids and edge.target in node_ids:
|
|
1697
|
-
filtered_graph.edges.append(edge)
|
|
1698
|
-
|
|
1699
|
-
# Update clusters to only include filtered nodes
|
|
1700
|
-
filtered_clusters = {}
|
|
1701
|
-
for cluster_name, cluster_nodes in graph.clusters.items():
|
|
1702
|
-
filtered_cluster_nodes = [
|
|
1703
|
-
node_id for node_id in cluster_nodes if node_id in node_ids
|
|
1704
|
-
]
|
|
1705
|
-
if filtered_cluster_nodes:
|
|
1706
|
-
filtered_clusters[cluster_name] = filtered_cluster_nodes
|
|
1707
|
-
|
|
1708
|
-
filtered_graph.clusters = filtered_clusters
|
|
1709
|
-
|
|
1710
|
-
return filtered_graph
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
def _get_dashboard_html() -> str:
|
|
1714
|
-
"""Generate the monitoring dashboard HTML."""
|
|
1715
|
-
return """<!DOCTYPE html>
|
|
1716
|
-
<html lang="en">
|
|
1717
|
-
<head>
|
|
1718
|
-
<meta charset="UTF-8">
|
|
1719
|
-
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
1720
|
-
<title>Crackerjack Monitoring Dashboard</title>
|
|
1721
|
-
<script src="https://d3js.org/d3.v7.min.js"></script>
|
|
1722
|
-
<script src="https://unpkg.com/react@18/umd/react.development.js"></script>
|
|
1723
|
-
<script src="https://unpkg.com/react-dom@18/umd/react-dom.development.js"></script>
|
|
1724
|
-
<style>
|
|
1725
|
-
body {{
|
|
1726
|
-
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI',
|
|
1727
|
-
'Roboto', sans-serif;
|
|
1728
|
-
margin: 0;
|
|
1729
|
-
padding: 20px;
|
|
1730
|
-
background-color: #f5f5f5;
|
|
1731
|
-
}}
|
|
1732
|
-
.dashboard-container {{
|
|
1733
|
-
max-width: 1400px;
|
|
1734
|
-
margin: 0 auto;
|
|
1735
|
-
}}
|
|
1736
|
-
.metric-card {{
|
|
1737
|
-
background: white;
|
|
1738
|
-
border-radius: 8px;
|
|
1739
|
-
padding: 20px;
|
|
1740
|
-
margin: 10px;
|
|
1741
|
-
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
|
1742
|
-
display: inline-block;
|
|
1743
|
-
min-width: 200px;
|
|
1744
|
-
}}
|
|
1745
|
-
.metric-value {{
|
|
1746
|
-
font-size: 2em;
|
|
1747
|
-
font-weight: bold;
|
|
1748
|
-
color: #333;
|
|
1749
|
-
}}
|
|
1750
|
-
.metric-label {{
|
|
1751
|
-
color: #666;
|
|
1752
|
-
font-size: 0.9em;
|
|
1753
|
-
margin-top: 5px;
|
|
1754
|
-
}}
|
|
1755
|
-
.trend-indicator {{
|
|
1756
|
-
font-size: 0.8em;
|
|
1757
|
-
padding: 2px 8px;
|
|
1758
|
-
border-radius: 12px;
|
|
1759
|
-
margin-left: 10px;
|
|
1760
|
-
}}
|
|
1761
|
-
.trend-improving {{ background-color: #d4edda; color: #155724; }}
|
|
1762
|
-
.trend-declining {{ background-color: #f8d7da; color: #721c24; }}
|
|
1763
|
-
.trend-stable {{ background-color: #d1ecf1; color: #0c5460; }}
|
|
1764
|
-
.chart-container {{
|
|
1765
|
-
background: white;
|
|
1766
|
-
border-radius: 8px;
|
|
1767
|
-
padding: 20px;
|
|
1768
|
-
margin: 20px 0;
|
|
1769
|
-
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
|
1770
|
-
}}
|
|
1771
|
-
.status-indicator {{
|
|
1772
|
-
display: inline-block;
|
|
1773
|
-
width: 12px;
|
|
1774
|
-
height: 12px;
|
|
1775
|
-
border-radius: 50%;
|
|
1776
|
-
margin-right: 8px;
|
|
1777
|
-
}}
|
|
1778
|
-
.status-healthy {{ background-color: #28a745; }}
|
|
1779
|
-
.status-warning {{ background-color: #ffc107; }}
|
|
1780
|
-
.status-error {{ background-color: #dc3545; }}
|
|
1781
|
-
#connection-status {{
|
|
1782
|
-
position: fixed;
|
|
1783
|
-
top: 20px;
|
|
1784
|
-
right: 20px;
|
|
1785
|
-
padding: 10px 15px;
|
|
1786
|
-
border-radius: 5px;
|
|
1787
|
-
color: white;
|
|
1788
|
-
font-weight: bold;
|
|
1789
|
-
}}
|
|
1790
|
-
.connected {{ background-color: #28a745; }}
|
|
1791
|
-
.disconnected {{ background-color: #dc3545; }}
|
|
1792
|
-
</style>
|
|
1793
|
-
</head>
|
|
1794
|
-
<body>
|
|
1795
|
-
<div class="dashboard-container">
|
|
1796
|
-
<div id="connection-status" class="disconnected">Connecting...</div>
|
|
1797
|
-
|
|
1798
|
-
<h1>🚀 Crackerjack Monitoring Dashboard</h1>
|
|
1799
|
-
|
|
1800
|
-
<div id="metrics-cards"></div>
|
|
1801
|
-
|
|
1802
|
-
<div class="chart-container">
|
|
1803
|
-
<h3>Quality Score Trend (7 Days)</h3>
|
|
1804
|
-
<div id="quality-chart"></div>
|
|
1805
|
-
</div>
|
|
1806
|
-
|
|
1807
|
-
<div class="chart-container">
|
|
1808
|
-
<h3>Test Coverage & Performance</h3>
|
|
1809
|
-
<div id="coverage-chart"></div>
|
|
1810
|
-
</div>
|
|
1811
|
-
|
|
1812
|
-
<div class="chart-container">
|
|
1813
|
-
<h3>System Health</h3>
|
|
1814
|
-
<div id="health-chart"></div>
|
|
1815
|
-
</div>
|
|
1816
|
-
|
|
1817
|
-
<div class="chart-container">
|
|
1818
|
-
<h3>Active Alerts</h3>
|
|
1819
|
-
<div id="alerts-panel"></div>
|
|
1820
|
-
</div>
|
|
1821
|
-
|
|
1822
|
-
<div class="chart-container">
|
|
1823
|
-
<h3>🧠 ML Anomaly Detection</h3>
|
|
1824
|
-
<div id="anomalies-panel"></div>
|
|
1825
|
-
</div>
|
|
1826
|
-
|
|
1827
|
-
<div class="chart-container">
|
|
1828
|
-
<h3>🔮 Quality Predictions</h3>
|
|
1829
|
-
<div id="predictions-panel"></div>
|
|
1830
|
-
</div>
|
|
1831
|
-
|
|
1832
|
-
<div class="chart-container">
|
|
1833
|
-
<h3>📊 Pattern Analysis</h3>
|
|
1834
|
-
<div id="patterns-panel"></div>
|
|
1835
|
-
</div>
|
|
1836
|
-
|
|
1837
|
-
<div class="chart-container">
|
|
1838
|
-
<h3>🕸️ Code Dependencies Network</h3>
|
|
1839
|
-
<div style="margin-bottom: 10px;">
|
|
1840
|
-
<button id="load-dependency-graph" onclick="loadDependencyGraph()">
|
|
1841
|
-
Load Dependency Graph
|
|
1842
|
-
</button>
|
|
1843
|
-
<button id="refresh-graph" onclick="refreshDependencyGraph()" disabled>
|
|
1844
|
-
Refresh
|
|
1845
|
-
</button>
|
|
1846
|
-
<select id="graph-filter" onchange="applyGraphFilter()" disabled>
|
|
1847
|
-
<option value="">All Types</option>
|
|
1848
|
-
<option value="module">Modules Only</option>
|
|
1849
|
-
<option value="class">Classes Only</option>
|
|
1850
|
-
<option value="function">Functions Only</option>
|
|
1851
|
-
</select>
|
|
1852
|
-
<label>
|
|
1853
|
-
<input type="checkbox" id="include-external"
|
|
1854
|
-
onchange="applyGraphFilter()" disabled>
|
|
1855
|
-
Include External Dependencies
|
|
1856
|
-
</label>
|
|
1857
|
-
</div>
|
|
1858
|
-
<div id="dependency-graph"
|
|
1859
|
-
style="width: 100%; height: 600px; border: 1px solid #ddd;">
|
|
1860
|
-
</div>
|
|
1861
|
-
</div>
|
|
1862
|
-
</div>
|
|
1863
|
-
|
|
1864
|
-
<!-- Error Pattern Heat Map Section -->
|
|
1865
|
-
<div class="section">
|
|
1866
|
-
<h2>Error Pattern Heat Maps</h2>
|
|
1867
|
-
<div class="controls">
|
|
1868
|
-
<label>Heat Map Type:</label>
|
|
1869
|
-
<select id="heatmap-type" onchange="updateHeatMap()">
|
|
1870
|
-
<option value="file">By File</option>
|
|
1871
|
-
<option value="temporal">Over Time</option>
|
|
1872
|
-
<option value="function">By Function</option>
|
|
1873
|
-
</select>
|
|
1874
|
-
<button id="load-heatmap" onclick="loadErrorHeatMap()">
|
|
1875
|
-
Load Heat Map
|
|
1876
|
-
</button>
|
|
1877
|
-
<label>
|
|
1878
|
-
<input type="number" id="analysis-days" value="30" min="1" max="365"
|
|
1879
|
-
onchange="updateHeatMap()">
|
|
1880
|
-
Analysis Days
|
|
1881
|
-
</label>
|
|
1882
|
-
<label>
|
|
1883
|
-
<input type="number" id="time-buckets" value="24" min="6" max="48"
|
|
1884
|
-
onchange="updateTemporalHeatMap()" disabled>
|
|
1885
|
-
Time Buckets
|
|
1886
|
-
</label>
|
|
1887
|
-
</div>
|
|
1888
|
-
<div class="tab-container">
|
|
1889
|
-
<div class="tab-buttons">
|
|
1890
|
-
<button class="tab-button active" onclick="showHeatMapTab('heatmap')">
|
|
1891
|
-
Heat Map
|
|
1892
|
-
</button>
|
|
1893
|
-
<button class="tab-button" onclick="showHeatMapTab('patterns')">
|
|
1894
|
-
Error Patterns
|
|
1895
|
-
</button>
|
|
1896
|
-
<button class="tab-button" onclick="showHeatMapTab('severity')">
|
|
1897
|
-
Severity Analysis
|
|
1898
|
-
</button>
|
|
1899
|
-
</div>
|
|
1900
|
-
<div id="heatmap-tab" class="tab-content active">
|
|
1901
|
-
<div id="error-heatmap" style="width: 100%; height: 600px; border: 1px solid #ddd; overflow: auto;"></div>
|
|
1902
|
-
</div>
|
|
1903
|
-
<div id="patterns-tab" class="tab-content">
|
|
1904
|
-
<div id="error-patterns-list" style="max-height: 600px; overflow-y: auto;"></div>
|
|
1905
|
-
</div>
|
|
1906
|
-
<div id="severity-tab" class="tab-content">
|
|
1907
|
-
<div id="severity-breakdown" style="max-height: 600px; overflow-y: auto;"></div>
|
|
1908
|
-
</div>
|
|
1909
|
-
</div>
|
|
1910
|
-
</div>
|
|
1911
|
-
|
|
1912
|
-
<script>
|
|
1913
|
-
// WebSocket connection management
|
|
1914
|
-
let ws = null;
|
|
1915
|
-
let reconnectInterval = 5000;
|
|
1916
|
-
let isConnected = false;
|
|
1917
|
-
|
|
1918
|
-
// Dashboard state
|
|
1919
|
-
let currentMetrics = {{}};
|
|
1920
|
-
let historicalData = [];
|
|
1921
|
-
let activeAlerts = [];
|
|
1922
|
-
let anomalies = [];
|
|
1923
|
-
let predictions = {{}};
|
|
1924
|
-
let patterns = {{}};
|
|
1925
|
-
let dependencyGraph = null;
|
|
1926
|
-
let dependencyWs = null;
|
|
1927
|
-
|
|
1928
|
-
// Intelligence WebSocket connections
|
|
1929
|
-
let anomaliesWs = null;
|
|
1930
|
-
let predictionsWs = null;
|
|
1931
|
-
let patternsWs = null;
|
|
1932
|
-
|
|
1933
|
-
// Heat map state
|
|
1934
|
-
let heatMapWs = null;
|
|
1935
|
-
let currentHeatMapData = null;
|
|
1936
|
-
let errorPatterns = [];
|
|
1937
|
-
let severityBreakdown = {{}};
|
|
1938
|
-
|
|
1939
|
-
function connect() {{
|
|
1940
|
-
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
|
1941
|
-
const wsUrl = `${{protocol}}//${{window.location.host}}/ws/dashboard/overview`;
|
|
1942
|
-
|
|
1943
|
-
ws = new WebSocket(wsUrl);
|
|
1944
|
-
|
|
1945
|
-
// Connect intelligence WebSockets
|
|
1946
|
-
connectIntelligenceStreams();
|
|
1947
|
-
|
|
1948
|
-
ws.onopen = function() {{
|
|
1949
|
-
isConnected = true;
|
|
1950
|
-
updateConnectionStatus();
|
|
1951
|
-
console.log('Connected to monitoring dashboard');
|
|
1952
|
-
}};
|
|
1953
|
-
|
|
1954
|
-
ws.onmessage = function(event) {{
|
|
1955
|
-
const message = JSON.parse(event.data);
|
|
1956
|
-
handleMessage(message);
|
|
1957
|
-
}};
|
|
1958
|
-
|
|
1959
|
-
ws.onclose = function() {{
|
|
1960
|
-
isConnected = false;
|
|
1961
|
-
updateConnectionStatus();
|
|
1962
|
-
console.log('Disconnected from monitoring dashboard');
|
|
1963
|
-
setTimeout(connect, reconnectInterval);
|
|
1964
|
-
}};
|
|
1965
|
-
|
|
1966
|
-
ws.onerror = function(error) {{
|
|
1967
|
-
console.error('WebSocket error:', error);
|
|
1968
|
-
}};
|
|
1969
|
-
}}
|
|
1970
|
-
|
|
1971
|
-
function handleMessage(message) {{
|
|
1972
|
-
switch(message.type) {{
|
|
1973
|
-
case 'dashboard_update':
|
|
1974
|
-
updateDashboard(message.data);
|
|
1975
|
-
break;
|
|
1976
|
-
case 'metrics_update':
|
|
1977
|
-
updateMetrics(message.data);
|
|
1978
|
-
break;
|
|
1979
|
-
case 'alert':
|
|
1980
|
-
handleAlert(message.data);
|
|
1981
|
-
break;
|
|
1982
|
-
}}
|
|
1983
|
-
}}
|
|
1984
|
-
|
|
1985
|
-
function updateConnectionStatus() {{
|
|
1986
|
-
const statusEl = document.getElementById('connection-status');
|
|
1987
|
-
if (isConnected) {{
|
|
1988
|
-
statusEl.textContent = 'Connected';
|
|
1989
|
-
statusEl.className = 'connected';
|
|
1990
|
-
}} else {{
|
|
1991
|
-
statusEl.textContent = 'Disconnected';
|
|
1992
|
-
statusEl.className = 'disconnected';
|
|
1993
|
-
}}
|
|
1994
|
-
}}
|
|
1995
|
-
|
|
1996
|
-
function updateDashboard(data) {{
|
|
1997
|
-
currentMetrics = data.current_metrics;
|
|
1998
|
-
historicalData = data.historical_data || [];
|
|
1999
|
-
activeAlerts = data.active_alerts || [];
|
|
2000
|
-
|
|
2001
|
-
renderMetricsCards();
|
|
2002
|
-
renderQualityChart();
|
|
2003
|
-
renderCoverageChart();
|
|
2004
|
-
renderHealthChart();
|
|
2005
|
-
renderAlertsPanel();
|
|
2006
|
-
renderAnomaliesPanel();
|
|
2007
|
-
renderPredictionsPanel();
|
|
2008
|
-
renderPatternsPanel();
|
|
2009
|
-
}}
|
|
2010
|
-
|
|
2011
|
-
function updateMetrics(data) {{
|
|
2012
|
-
currentMetrics = data;
|
|
2013
|
-
renderMetricsCards();
|
|
2014
|
-
}}
|
|
2015
|
-
|
|
2016
|
-
function handleAlert(alert) {{
|
|
2017
|
-
activeAlerts.unshift(alert);
|
|
2018
|
-
renderAlertsPanel();
|
|
2019
|
-
|
|
2020
|
-
// Show browser notification if permissions granted
|
|
2021
|
-
if (Notification.permission === 'granted') {{
|
|
2022
|
-
new Notification('Crackerjack Alert', {{
|
|
2023
|
-
body: alert.message,
|
|
2024
|
-
icon: '/favicon.ico'
|
|
2025
|
-
}});
|
|
2026
|
-
}}
|
|
2027
|
-
}}
|
|
2028
|
-
|
|
2029
|
-
function renderMetricsCards() {{
|
|
2030
|
-
const container = document.getElementById('metrics-cards');
|
|
2031
|
-
const metrics = currentMetrics;
|
|
2032
|
-
|
|
2033
|
-
if (!metrics) return;
|
|
2034
|
-
|
|
2035
|
-
const cards = [
|
|
2036
|
-
{{
|
|
2037
|
-
label: 'Quality Score',
|
|
2038
|
-
value: metrics.quality_score || 0,
|
|
2039
|
-
trend: metrics.trend_direction || 'stable'
|
|
2040
|
-
}},
|
|
2041
|
-
{{
|
|
2042
|
-
label: 'Test Coverage',
|
|
2043
|
-
value: `${{(metrics.test_coverage || 0).toFixed(1)}}%`,
|
|
2044
|
-
trend: metrics.test_coverage > 90 ? 'improving' : 'stable'
|
|
2045
|
-
}},
|
|
2046
|
-
{{
|
|
2047
|
-
label: 'Hook Duration',
|
|
2048
|
-
value: `${{(metrics.hook_duration || 0).toFixed(1)}}s`,
|
|
2049
|
-
trend: metrics.hook_duration < 60 ? 'improving' : 'declining'
|
|
2050
|
-
}},
|
|
2051
|
-
{{
|
|
2052
|
-
label: 'Active Jobs',
|
|
2053
|
-
value: metrics.active_jobs || 0,
|
|
2054
|
-
trend: 'stable'
|
|
2055
|
-
}},
|
|
2056
|
-
{{
|
|
2057
|
-
label: 'Error Count',
|
|
2058
|
-
value: metrics.error_count || 0,
|
|
2059
|
-
trend: metrics.error_count === 0 ? 'improving' : 'declining'
|
|
2060
|
-
}}
|
|
2061
|
-
];
|
|
2062
|
-
|
|
2063
|
-
container.innerHTML = cards.map(card => `
|
|
2064
|
-
<div class="metric-card">
|
|
2065
|
-
<div class="metric-value">${{card.value}}</div>
|
|
2066
|
-
<div class="metric-label">
|
|
2067
|
-
${{card.label}}
|
|
2068
|
-
<span class="trend-indicator trend-${{card.trend}}">
|
|
2069
|
-
${{card.trend === 'improving' ? '↗' : card.trend === 'declining' ? '↘' : '→'}}
|
|
2070
|
-
</span>
|
|
2071
|
-
</div>
|
|
2072
|
-
</div>
|
|
2073
|
-
`).join('');
|
|
2074
|
-
}}
|
|
2075
|
-
|
|
2076
|
-
function renderQualityChart() {{
|
|
2077
|
-
// D3.js quality score chart implementation
|
|
2078
|
-
const data = historicalData.map(d => ({{
|
|
2079
|
-
date: new Date(d.timestamp),
|
|
2080
|
-
score: d.quality_score
|
|
2081
|
-
}}));
|
|
2082
|
-
|
|
2083
|
-
if (data.length === 0) return;
|
|
2084
|
-
|
|
2085
|
-
const container = d3.select('#quality-chart');
|
|
2086
|
-
container.selectAll('*').remove();
|
|
2087
|
-
|
|
2088
|
-
const margin = {{top: 20, right: 30, bottom: 40, left: 50}};
|
|
2089
|
-
const width = 800 - margin.left - margin.right;
|
|
2090
|
-
const height = 300 - margin.top - margin.bottom;
|
|
2091
|
-
|
|
2092
|
-
const svg = container
|
|
2093
|
-
.append('svg')
|
|
2094
|
-
.attr('width', width + margin.left + margin.right)
|
|
2095
|
-
.attr('height', height + margin.top + margin.bottom);
|
|
2096
|
-
|
|
2097
|
-
const g = svg.append('g')
|
|
2098
|
-
.attr('transform', `translate(${{margin.left}},${{margin.top}})`);
|
|
2099
|
-
|
|
2100
|
-
const x = d3.scaleTime()
|
|
2101
|
-
.domain(d3.extent(data, d => d.date))
|
|
2102
|
-
.range([0, width]);
|
|
2103
|
-
|
|
2104
|
-
const y = d3.scaleLinear()
|
|
2105
|
-
.domain([0, 100])
|
|
2106
|
-
.range([height, 0]);
|
|
2107
|
-
|
|
2108
|
-
const line = d3.line()
|
|
2109
|
-
.x(d => x(d.date))
|
|
2110
|
-
.y(d => y(d.score))
|
|
2111
|
-
.curve(d3.curveMonotoneX);
|
|
2112
|
-
|
|
2113
|
-
g.append('g')
|
|
2114
|
-
.attr('transform', `translate(0,${{height}})`)
|
|
2115
|
-
.call(d3.axisBottom(x));
|
|
2116
|
-
|
|
2117
|
-
g.append('g')
|
|
2118
|
-
.call(d3.axisLeft(y));
|
|
2119
|
-
|
|
2120
|
-
g.append('path')
|
|
2121
|
-
.datum(data)
|
|
2122
|
-
.attr('fill', 'none')
|
|
2123
|
-
.attr('stroke', '#007bff')
|
|
2124
|
-
.attr('stroke-width', 2)
|
|
2125
|
-
.attr('d', line);
|
|
2126
|
-
|
|
2127
|
-
g.selectAll('.dot')
|
|
2128
|
-
.data(data)
|
|
2129
|
-
.enter().append('circle')
|
|
2130
|
-
.attr('class', 'dot')
|
|
2131
|
-
.attr('cx', d => x(d.date))
|
|
2132
|
-
.attr('cy', d => y(d.score))
|
|
2133
|
-
.attr('r', 3)
|
|
2134
|
-
.attr('fill', '#007bff');
|
|
2135
|
-
}}
|
|
2136
|
-
|
|
2137
|
-
function renderCoverageChart() {{
|
|
2138
|
-
// Similar D3.js implementation for coverage chart
|
|
2139
|
-
const container = document.getElementById('coverage-chart');
|
|
2140
|
-
container.innerHTML = '<p>Coverage and performance charts will be rendered here</p>';
|
|
2141
|
-
}}
|
|
2142
|
-
|
|
2143
|
-
function renderHealthChart() {{
|
|
2144
|
-
// System health visualization
|
|
2145
|
-
const container = document.getElementById('health-chart');
|
|
2146
|
-
container.innerHTML = '<p>System health metrics will be rendered here</p>';
|
|
2147
|
-
}}
|
|
2148
|
-
|
|
2149
|
-
function renderAlertsPanel() {{
|
|
2150
|
-
const container = document.getElementById('alerts-panel');
|
|
2151
|
-
|
|
2152
|
-
if (activeAlerts.length === 0) {{
|
|
2153
|
-
container.innerHTML = '<p style="color: #28a745;">✅ No active alerts</p>';
|
|
2154
|
-
return;
|
|
2155
|
-
}}
|
|
2156
|
-
|
|
2157
|
-
container.innerHTML = activeAlerts.slice(0, 10).map(alert => `
|
|
2158
|
-
<div style="padding: 10px; margin: 5px 0; border-left: 4px solid ${{
|
|
2159
|
-
alert.severity === 'critical' ? '#dc3545' :
|
|
2160
|
-
alert.severity === 'warning' ? '#ffc107' : '#17a2b8'
|
|
2161
|
-
}}; background-color: #f8f9fa;">
|
|
2162
|
-
<strong>${{alert.severity.toUpperCase()}}</strong>: ${{alert.message}}
|
|
2163
|
-
<br>
|
|
2164
|
-
<small>${{new Date(alert.timestamp).toLocaleString()}}</small>
|
|
2165
|
-
</div>
|
|
2166
|
-
`).join('');
|
|
2167
|
-
}}
|
|
2168
|
-
|
|
2169
|
-
function connectIntelligenceStreams() {{
|
|
2170
|
-
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
|
2171
|
-
const baseUrl = `${{protocol}}//${{window.location.host}}`;
|
|
2172
|
-
|
|
2173
|
-
// Anomaly detection stream
|
|
2174
|
-
anomaliesWs = new WebSocket(`${{baseUrl}}/ws/intelligence/anomalies`);
|
|
2175
|
-
anomaliesWs.onmessage = function(event) {{
|
|
2176
|
-
const message = JSON.parse(event.data);
|
|
2177
|
-
if (message.type.startsWith('anomalies_')) {{
|
|
2178
|
-
anomalies = message.data;
|
|
2179
|
-
renderAnomaliesPanel();
|
|
2180
|
-
}}
|
|
2181
|
-
}};
|
|
2182
|
-
|
|
2183
|
-
// Predictions stream
|
|
2184
|
-
predictionsWs = new WebSocket(`${{baseUrl}}/ws/intelligence/predictions`);
|
|
2185
|
-
predictionsWs.onmessage = function(event) {{
|
|
2186
|
-
const message = JSON.parse(event.data);
|
|
2187
|
-
if (message.type.startsWith('predictions_')) {{
|
|
2188
|
-
if (message.data.insights) {{
|
|
2189
|
-
predictions = message.data.insights;
|
|
2190
|
-
}} else {{
|
|
2191
|
-
predictions = message.data;
|
|
2192
|
-
}}
|
|
2193
|
-
renderPredictionsPanel();
|
|
2194
|
-
}}
|
|
2195
|
-
}};
|
|
2196
|
-
|
|
2197
|
-
// Patterns stream
|
|
2198
|
-
patternsWs = new WebSocket(`${{baseUrl}}/ws/intelligence/patterns`);
|
|
2199
|
-
patternsWs.onmessage = function(event) {{
|
|
2200
|
-
const message = JSON.parse(event.data);
|
|
2201
|
-
if (message.type.startsWith('patterns_')) {{
|
|
2202
|
-
patterns = message.data;
|
|
2203
|
-
renderPatternsPanel();
|
|
2204
|
-
}}
|
|
2205
|
-
}};
|
|
2206
|
-
|
|
2207
|
-
// Heat map stream
|
|
2208
|
-
heatMapWs = new WebSocket(`${{baseUrl}}/ws/heatmap/errors`);
|
|
2209
|
-
heatMapWs.onmessage = function(event) {{
|
|
2210
|
-
const message = JSON.parse(event.data);
|
|
2211
|
-
|
|
2212
|
-
if (message.type.includes('heatmap')) {{
|
|
2213
|
-
currentHeatMapData = message.data;
|
|
2214
|
-
renderHeatMap(currentHeatMapData, message.type);
|
|
2215
|
-
}} else if (message.type === 'error_patterns') {{
|
|
2216
|
-
errorPatterns = message.data;
|
|
2217
|
-
renderErrorPatterns();
|
|
2218
|
-
calculateSeverityBreakdown();
|
|
2219
|
-
}}
|
|
2220
|
-
}};
|
|
2221
|
-
}}
|
|
2222
|
-
|
|
2223
|
-
function renderAnomaliesPanel() {{
|
|
2224
|
-
const container = document.getElementById('anomalies-panel');
|
|
2225
|
-
|
|
2226
|
-
if (!anomalies || anomalies.length === 0) {{
|
|
2227
|
-
container.innerHTML = '<p style="color: #28a745;">✅ No anomalies detected</p>';
|
|
2228
|
-
return;
|
|
2229
|
-
}}
|
|
2230
|
-
|
|
2231
|
-
container.innerHTML = anomalies.slice(0, 5).map(anomaly => `
|
|
2232
|
-
<div style="padding: 10px; margin: 5px 0; border-left: 4px solid ${{
|
|
2233
|
-
anomaly.severity === 'critical' ? '#dc3545' :
|
|
2234
|
-
anomaly.severity === 'warning' ? '#ffc107' : '#17a2b8'
|
|
2235
|
-
}}; background-color: #f8f9fa;">
|
|
2236
|
-
<strong>🚨 ${{anomaly.metric}}</strong>: ${{anomaly.description}}
|
|
2237
|
-
<br>
|
|
2238
|
-
<small>Z-score: ${{anomaly.z_score.toFixed(2)}} | ${{new Date(anomaly.timestamp).toLocaleString()}}</small>
|
|
2239
|
-
</div>
|
|
2240
|
-
`).join('');
|
|
2241
|
-
}}
|
|
2242
|
-
|
|
2243
|
-
function renderPredictionsPanel() {{
|
|
2244
|
-
const container = document.getElementById('predictions-panel');
|
|
2245
|
-
|
|
2246
|
-
if (!predictions || Object.keys(predictions).length === 0) {{
|
|
2247
|
-
container.innerHTML = '<p>Loading predictions...</p>';
|
|
2248
|
-
return;
|
|
2249
|
-
}}
|
|
2250
|
-
|
|
2251
|
-
let content = '';
|
|
2252
|
-
|
|
2253
|
-
if (predictions.summary) {{
|
|
2254
|
-
content += `<div style="padding: 10px; background-color: #e7f3ff; border-radius: 5px; margin-bottom: 10px;">
|
|
2255
|
-
<strong>📈 Summary:</strong> ${{predictions.summary}}
|
|
2256
|
-
</div>`;
|
|
2257
|
-
}}
|
|
2258
|
-
|
|
2259
|
-
if (predictions.recommendations && predictions.recommendations.length > 0) {{
|
|
2260
|
-
content += '<h4>🎯 Recommendations:</h4><ul>';
|
|
2261
|
-
predictions.recommendations.slice(0, 3).forEach(rec => {{
|
|
2262
|
-
content += `<li>${{rec}}</li>`;
|
|
2263
|
-
}});
|
|
2264
|
-
content += '</ul>';
|
|
2265
|
-
}}
|
|
2266
|
-
|
|
2267
|
-
if (predictions.risk_factors && predictions.risk_factors.length > 0) {{
|
|
2268
|
-
content += '<h4>⚠️ Risk Factors:</h4><ul>';
|
|
2269
|
-
predictions.risk_factors.slice(0, 3).forEach(risk => {{
|
|
2270
|
-
content += `<li style="color: #dc3545;">${{risk}}</li>`;
|
|
2271
|
-
}});
|
|
2272
|
-
content += '</ul>';
|
|
2273
|
-
}}
|
|
2274
|
-
|
|
2275
|
-
container.innerHTML = content || '<p>No prediction data available</p>';
|
|
2276
|
-
}}
|
|
2277
|
-
|
|
2278
|
-
function renderPatternsPanel() {{
|
|
2279
|
-
const container = document.getElementById('patterns-panel');
|
|
2280
|
-
|
|
2281
|
-
if (!patterns || Object.keys(patterns).length === 0) {{
|
|
2282
|
-
container.innerHTML = '<p>Loading pattern analysis...</p>';
|
|
2283
|
-
return;
|
|
2284
|
-
}}
|
|
2285
|
-
|
|
2286
|
-
let content = '';
|
|
2287
|
-
|
|
2288
|
-
if (patterns.correlations && patterns.correlations.length > 0) {{
|
|
2289
|
-
content += '<h4>🔗 Strong Correlations:</h4><ul>';
|
|
2290
|
-
patterns.correlations.slice(0, 3).forEach(corr => {{
|
|
2291
|
-
const strength = Math.abs(corr.correlation) > 0.7 ? 'Strong' : 'Moderate';
|
|
2292
|
-
const direction = corr.correlation > 0 ? 'Positive' : 'Negative';
|
|
2293
|
-
content += `<li>${{corr.metric1}} ↔ ${{corr.metric2}}: ${{strength}} ${{direction}} (${{corr.correlation.toFixed(3)}})</li>`;
|
|
2294
|
-
}});
|
|
2295
|
-
content += '</ul>';
|
|
2296
|
-
}}
|
|
2297
|
-
|
|
2298
|
-
if (patterns.trends && patterns.trends.length > 0) {{
|
|
2299
|
-
content += '<h4>📊 Trending Patterns:</h4><ul>';
|
|
2300
|
-
patterns.trends.slice(0, 3).forEach(trend => {{
|
|
2301
|
-
content += `<li>${{trend}}</li>`;
|
|
2302
|
-
}});
|
|
2303
|
-
content += '</ul>';
|
|
2304
|
-
}}
|
|
2305
|
-
|
|
2306
|
-
container.innerHTML = content || '<p>No significant patterns detected</p>';
|
|
2307
|
-
}}
|
|
2308
|
-
|
|
2309
|
-
// Dependency Graph Functions
|
|
2310
|
-
function loadDependencyGraph() {{
|
|
2311
|
-
const button = document.getElementById('load-dependency-graph');
|
|
2312
|
-
button.textContent = 'Loading...';
|
|
2313
|
-
button.disabled = true;
|
|
2314
|
-
|
|
2315
|
-
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
|
2316
|
-
const wsUrl = `${{protocol}}//${{window.location.host}}/ws/dependencies/graph`;
|
|
2317
|
-
|
|
2318
|
-
dependencyWs = new WebSocket(wsUrl);
|
|
2319
|
-
|
|
2320
|
-
dependencyWs.onopen = function() {{
|
|
2321
|
-
console.log('Connected to dependency analysis');
|
|
2322
|
-
}};
|
|
2323
|
-
|
|
2324
|
-
dependencyWs.onmessage = function(event) {{
|
|
2325
|
-
const message = JSON.parse(event.data);
|
|
2326
|
-
|
|
2327
|
-
switch(message.type) {{
|
|
2328
|
-
case 'analysis_started':
|
|
2329
|
-
updateGraphStatus(message.message);
|
|
2330
|
-
break;
|
|
2331
|
-
case 'graph_data':
|
|
2332
|
-
case 'filtered_graph':
|
|
2333
|
-
dependencyGraph = message.data;
|
|
2334
|
-
renderDependencyGraph(dependencyGraph);
|
|
2335
|
-
enableGraphControls();
|
|
2336
|
-
updateGraphStatus(`Graph loaded: ${{dependencyGraph.nodes.length}} nodes, ${{dependencyGraph.edges.length}} edges`);
|
|
2337
|
-
break;
|
|
2338
|
-
case 'keepalive':
|
|
2339
|
-
// Connection alive
|
|
2340
|
-
break;
|
|
2341
|
-
}}
|
|
2342
|
-
}};
|
|
2343
|
-
|
|
2344
|
-
dependencyWs.onclose = function() {{
|
|
2345
|
-
console.log('Dependency analysis connection closed');
|
|
2346
|
-
button.textContent = 'Load Dependency Graph';
|
|
2347
|
-
button.disabled = false;
|
|
2348
|
-
}};
|
|
2349
|
-
|
|
2350
|
-
dependencyWs.onerror = function(error) {{
|
|
2351
|
-
console.error('Dependency WebSocket error:', error);
|
|
2352
|
-
updateGraphStatus('Error loading dependency graph');
|
|
2353
|
-
button.textContent = 'Load Dependency Graph';
|
|
2354
|
-
button.disabled = false;
|
|
2355
|
-
}};
|
|
2356
|
-
}}
|
|
2357
|
-
|
|
2358
|
-
function refreshDependencyGraph() {{
|
|
2359
|
-
if (dependencyWs && dependencyWs.readyState === WebSocket.OPEN) {{
|
|
2360
|
-
dependencyWs.send(JSON.stringify({{ type: 'refresh_request' }}));
|
|
2361
|
-
updateGraphStatus('Refreshing dependency analysis...');
|
|
2362
|
-
}}
|
|
2363
|
-
}}
|
|
2364
|
-
|
|
2365
|
-
function applyGraphFilter() {{
|
|
2366
|
-
if (!dependencyWs || dependencyWs.readyState !== WebSocket.OPEN) return;
|
|
2367
|
-
|
|
2368
|
-
const filterType = document.getElementById('graph-filter').value;
|
|
2369
|
-
const includeExternal = document.getElementById('include-external').checked;
|
|
2370
|
-
|
|
2371
|
-
const filters = {{
|
|
2372
|
-
type: filterType || null,
|
|
2373
|
-
include_external: includeExternal,
|
|
2374
|
-
max_nodes: 500 // Limit for performance
|
|
2375
|
-
}};
|
|
2376
|
-
|
|
2377
|
-
dependencyWs.send(JSON.stringify({{
|
|
2378
|
-
type: 'filter_request',
|
|
2379
|
-
filters: filters
|
|
2380
|
-
}}));
|
|
2381
|
-
|
|
2382
|
-
updateGraphStatus('Applying filters...');
|
|
2383
|
-
}}
|
|
2384
|
-
|
|
2385
|
-
function enableGraphControls() {{
|
|
2386
|
-
document.getElementById('refresh-graph').disabled = false;
|
|
2387
|
-
document.getElementById('graph-filter').disabled = false;
|
|
2388
|
-
document.getElementById('include-external').disabled = false;
|
|
2389
|
-
}}
|
|
2390
|
-
|
|
2391
|
-
function updateGraphStatus(message) {{
|
|
2392
|
-
const container = document.getElementById('dependency-graph');
|
|
2393
|
-
if (!dependencyGraph) {{
|
|
2394
|
-
container.innerHTML = `<p style="padding: 20px; text-align: center;">${{message}}</p>`;
|
|
2395
|
-
}}
|
|
2396
|
-
}}
|
|
2397
|
-
|
|
2398
|
-
function renderDependencyGraph(graphData) {{
|
|
2399
|
-
const container = d3.select('#dependency-graph');
|
|
2400
|
-
container.selectAll('*').remove();
|
|
2401
|
-
|
|
2402
|
-
if (!graphData || !graphData.nodes || graphData.nodes.length === 0) {{
|
|
2403
|
-
container.append('p')
|
|
2404
|
-
.style('padding', '20px')
|
|
2405
|
-
.style('text-align', 'center')
|
|
2406
|
-
.text('No dependency data available');
|
|
2407
|
-
return;
|
|
2408
|
-
}}
|
|
2409
|
-
|
|
2410
|
-
const width = 800;
|
|
2411
|
-
const height = 600;
|
|
2412
|
-
|
|
2413
|
-
const svg = container
|
|
2414
|
-
.append('svg')
|
|
2415
|
-
.attr('width', width)
|
|
2416
|
-
.attr('height', height);
|
|
2417
|
-
|
|
2418
|
-
// Create color scale for node types
|
|
2419
|
-
const colorScale = d3.scaleOrdinal()
|
|
2420
|
-
.domain(['module', 'class', 'function', 'method'])
|
|
2421
|
-
.range(['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728']);
|
|
2422
|
-
|
|
2423
|
-
// Create force simulation
|
|
2424
|
-
const simulation = d3.forceSimulation(graphData.nodes)
|
|
2425
|
-
.force('link', d3.forceLink(graphData.edges)
|
|
2426
|
-
.id(d => d.id)
|
|
2427
|
-
.distance(d => 50 + d.weight * 20))
|
|
2428
|
-
.force('charge', d3.forceManyBody()
|
|
2429
|
-
.strength(d => -100 - d.size * 10))
|
|
2430
|
-
.force('center', d3.forceCenter(width / 2, height / 2))
|
|
2431
|
-
.force('collision', d3.forceCollide(d => Math.max(5, d.size * 2)));
|
|
2432
|
-
|
|
2433
|
-
// Add zoom behavior
|
|
2434
|
-
const zoom = d3.zoom()
|
|
2435
|
-
.scaleExtent([0.1, 4])
|
|
2436
|
-
.on('zoom', function(event) {{
|
|
2437
|
-
g.attr('transform', event.transform);
|
|
2438
|
-
}});
|
|
2439
|
-
|
|
2440
|
-
svg.call(zoom);
|
|
2441
|
-
|
|
2442
|
-
const g = svg.append('g');
|
|
2443
|
-
|
|
2444
|
-
// Add links
|
|
2445
|
-
const link = g.append('g')
|
|
2446
|
-
.selectAll('line')
|
|
2447
|
-
.data(graphData.edges)
|
|
2448
|
-
.join('line')
|
|
2449
|
-
.attr('stroke', d => {{
|
|
2450
|
-
switch(d.type) {{
|
|
2451
|
-
case 'import': return '#999';
|
|
2452
|
-
case 'inheritance': return '#ff7f0e';
|
|
2453
|
-
case 'call': return '#2ca02c';
|
|
2454
|
-
default: return '#ccc';
|
|
2455
|
-
}}
|
|
2456
|
-
}})
|
|
2457
|
-
.attr('stroke-width', d => Math.max(1, d.weight * 2))
|
|
2458
|
-
.attr('stroke-opacity', 0.6);
|
|
2459
|
-
|
|
2460
|
-
// Add nodes
|
|
2461
|
-
const node = g.append('g')
|
|
2462
|
-
.selectAll('circle')
|
|
2463
|
-
.data(graphData.nodes)
|
|
2464
|
-
.join('circle')
|
|
2465
|
-
.attr('r', d => Math.max(3, Math.min(20, d.size + d.complexity)))
|
|
2466
|
-
.attr('fill', d => colorScale(d.type))
|
|
2467
|
-
.attr('stroke', '#fff')
|
|
2468
|
-
.attr('stroke-width', 1.5)
|
|
2469
|
-
.call(d3.drag()
|
|
2470
|
-
.on('start', function(event, d) {{
|
|
2471
|
-
if (!event.active) simulation.alphaTarget(0.3).restart();
|
|
2472
|
-
d.fx = d.x;
|
|
2473
|
-
d.fy = d.y;
|
|
2474
|
-
}})
|
|
2475
|
-
.on('drag', function(event, d) {{
|
|
2476
|
-
d.fx = event.x;
|
|
2477
|
-
d.fy = event.y;
|
|
2478
|
-
}})
|
|
2479
|
-
.on('end', function(event, d) {{
|
|
2480
|
-
if (!event.active) simulation.alphaTarget(0);
|
|
2481
|
-
d.fx = null;
|
|
2482
|
-
d.fy = null;
|
|
2483
|
-
}}));
|
|
2484
|
-
|
|
2485
|
-
// Add labels for important nodes
|
|
2486
|
-
const label = g.append('g')
|
|
2487
|
-
.selectAll('text')
|
|
2488
|
-
.data(graphData.nodes.filter(d => d.size > 5 || d.complexity > 10))
|
|
2489
|
-
.join('text')
|
|
2490
|
-
.text(d => d.name.split('.').pop()) // Show just the last part of the name
|
|
2491
|
-
.attr('font-size', '10px')
|
|
2492
|
-
.attr('font-family', 'Arial, sans-serif')
|
|
2493
|
-
.attr('fill', '#333')
|
|
2494
|
-
.attr('text-anchor', 'middle')
|
|
2495
|
-
.attr('dy', '0.3em');
|
|
2496
|
-
|
|
2497
|
-
// Add tooltips
|
|
2498
|
-
node.append('title')
|
|
2499
|
-
.text(d => `${{d.name}}\\nType: ${{d.type}}\\nComplexity: ${{d.complexity}}\\nFile: ${{d.file_path}}`);
|
|
2500
|
-
|
|
2501
|
-
link.append('title')
|
|
2502
|
-
.text(d => `${{d.source.id}} → ${{d.target.id}}\\nType: ${{d.type}}\\nWeight: ${{d.weight}}`);
|
|
2503
|
-
|
|
2504
|
-
// Update positions on simulation tick
|
|
2505
|
-
simulation.on('tick', function() {{
|
|
2506
|
-
link
|
|
2507
|
-
.attr('x1', d => d.source.x)
|
|
2508
|
-
.attr('y1', d => d.source.y)
|
|
2509
|
-
.attr('x2', d => d.target.x)
|
|
2510
|
-
.attr('y2', d => d.target.y);
|
|
2511
|
-
|
|
2512
|
-
node
|
|
2513
|
-
.attr('cx', d => d.x)
|
|
2514
|
-
.attr('cy', d => d.y);
|
|
2515
|
-
|
|
2516
|
-
label
|
|
2517
|
-
.attr('x', d => d.x)
|
|
2518
|
-
.attr('y', d => d.y);
|
|
2519
|
-
}});
|
|
2520
|
-
|
|
2521
|
-
// Add legend
|
|
2522
|
-
const legend = svg.append('g')
|
|
2523
|
-
.attr('transform', 'translate(20, 20)');
|
|
2524
|
-
|
|
2525
|
-
const legendData = [
|
|
2526
|
-
{{ type: 'module', color: colorScale('module') }},
|
|
2527
|
-
{{ type: 'class', color: colorScale('class') }},
|
|
2528
|
-
{{ type: 'function', color: colorScale('function') }},
|
|
2529
|
-
{{ type: 'method', color: colorScale('method') }}
|
|
2530
|
-
];
|
|
2531
|
-
|
|
2532
|
-
legend.selectAll('g')
|
|
2533
|
-
.data(legendData)
|
|
2534
|
-
.join('g')
|
|
2535
|
-
.attr('transform', (d, i) => `translate(0, ${{i * 20}})`)
|
|
2536
|
-
.each(function(d) {{
|
|
2537
|
-
const g = d3.select(this);
|
|
2538
|
-
g.append('circle')
|
|
2539
|
-
.attr('r', 6)
|
|
2540
|
-
.attr('fill', d.color);
|
|
2541
|
-
g.append('text')
|
|
2542
|
-
.attr('x', 15)
|
|
2543
|
-
.attr('y', 0)
|
|
2544
|
-
.attr('dy', '0.3em')
|
|
2545
|
-
.attr('font-size', '12px')
|
|
2546
|
-
.attr('fill', '#333')
|
|
2547
|
-
.text(d.type);
|
|
2548
|
-
}});
|
|
2549
|
-
}}
|
|
2550
|
-
|
|
2551
|
-
// Heat map functions
|
|
2552
|
-
function loadErrorHeatMap() {{
|
|
2553
|
-
const button = document.getElementById('load-heatmap');
|
|
2554
|
-
button.textContent = 'Loading...';
|
|
2555
|
-
button.disabled = true;
|
|
2556
|
-
|
|
2557
|
-
// WebSocket will handle the data loading
|
|
2558
|
-
setTimeout(() => {{
|
|
2559
|
-
button.textContent = 'Load Heat Map';
|
|
2560
|
-
button.disabled = false;
|
|
2561
|
-
}}, 2000);
|
|
2562
|
-
}}
|
|
2563
|
-
|
|
2564
|
-
function updateHeatMap() {{
|
|
2565
|
-
if (!heatMapWs || heatMapWs.readyState !== WebSocket.OPEN) return;
|
|
2566
|
-
|
|
2567
|
-
const heatmapType = document.getElementById('heatmap-type').value;
|
|
2568
|
-
const days = parseInt(document.getElementById('analysis-days').value);
|
|
2569
|
-
const timeBuckets = document.getElementById('time-buckets').value;
|
|
2570
|
-
|
|
2571
|
-
// Enable/disable time buckets input based on type
|
|
2572
|
-
document.getElementById('time-buckets').disabled = heatmapType !== 'temporal';
|
|
2573
|
-
|
|
2574
|
-
const message = {{
|
|
2575
|
-
type: 'refresh_heatmap',
|
|
2576
|
-
heatmap_type: heatmapType,
|
|
2577
|
-
days: days,
|
|
2578
|
-
time_buckets: heatmapType === 'temporal' ? parseInt(timeBuckets) : 24
|
|
2579
|
-
}};
|
|
2580
|
-
|
|
2581
|
-
heatMapWs.send(JSON.stringify(message));
|
|
2582
|
-
}}
|
|
2583
|
-
|
|
2584
|
-
function updateTemporalHeatMap() {{
|
|
2585
|
-
if (document.getElementById('heatmap-type').value === 'temporal') {{
|
|
2586
|
-
updateHeatMap();
|
|
2587
|
-
}}
|
|
2588
|
-
}}
|
|
2589
|
-
|
|
2590
|
-
function showHeatMapTab(tabName) {{
|
|
2591
|
-
// Hide all tab contents
|
|
2592
|
-
document.querySelectorAll('.tab-content').forEach(tab => {{
|
|
2593
|
-
tab.classList.remove('active');
|
|
2594
|
-
}});
|
|
2595
|
-
|
|
2596
|
-
// Show selected tab content
|
|
2597
|
-
document.getElementById(tabName + '-tab').classList.add('active');
|
|
2598
|
-
|
|
2599
|
-
// Update tab buttons
|
|
2600
|
-
document.querySelectorAll('.tab-button').forEach(button => {{
|
|
2601
|
-
button.classList.remove('active');
|
|
2602
|
-
}});
|
|
2603
|
-
event.target.classList.add('active');
|
|
2604
|
-
}}
|
|
2605
|
-
|
|
2606
|
-
function renderHeatMap(heatmapData, type) {{
|
|
2607
|
-
if (!heatmapData || !heatmapData.cells || heatmapData.cells.length === 0) {{
|
|
2608
|
-
document.getElementById('error-heatmap').innerHTML =
|
|
2609
|
-
'<div style="text-align: center; padding: 20px; color: #28a745;">✅ No error patterns found</div>';
|
|
2610
|
-
return;
|
|
2611
|
-
}}
|
|
2612
|
-
|
|
2613
|
-
const container = d3.select('#error-heatmap');
|
|
2614
|
-
container.selectAll('*').remove();
|
|
2615
|
-
|
|
2616
|
-
const margin = {{ top: 80, right: 100, bottom: 100, left: 200 }};
|
|
2617
|
-
const width = 1000 - margin.left - margin.right;
|
|
2618
|
-
const height = 600 - margin.bottom - margin.top;
|
|
2619
|
-
|
|
2620
|
-
// Create SVG
|
|
2621
|
-
const svg = container.append('svg')
|
|
2622
|
-
.attr('width', width + margin.left + margin.right)
|
|
2623
|
-
.attr('height', height + margin.bottom + margin.top);
|
|
2624
|
-
|
|
2625
|
-
const g = svg.append('g')
|
|
2626
|
-
.attr('transform', `translate(${{margin.left}},${{margin.top}})`);
|
|
2627
|
-
|
|
2628
|
-
// Create scales
|
|
2629
|
-
const xScale = d3.scaleBand()
|
|
2630
|
-
.domain(heatmapData.x_labels)
|
|
2631
|
-
.range([0, width])
|
|
2632
|
-
.padding(0.1);
|
|
2633
|
-
|
|
2634
|
-
const yScale = d3.scaleBand()
|
|
2635
|
-
.domain(heatmapData.y_labels)
|
|
2636
|
-
.range([height, 0])
|
|
2637
|
-
.padding(0.1);
|
|
2638
|
-
|
|
2639
|
-
// Color scale based on severity
|
|
2640
|
-
const colorScale = d3.scaleOrdinal()
|
|
2641
|
-
.domain(['low', 'medium', 'high', 'critical'])
|
|
2642
|
-
.range(['#fffbd4', '#ffeaa7', '#fd79a8', '#e84393']);
|
|
2643
|
-
|
|
2644
|
-
// Intensity scale for opacity
|
|
2645
|
-
const intensityScale = d3.scaleLinear()
|
|
2646
|
-
.domain([0, 1])
|
|
2647
|
-
.range([0.3, 1]);
|
|
2648
|
-
|
|
2649
|
-
// Create heat map cells
|
|
2650
|
-
const cells = g.selectAll('.heatmap-cell')
|
|
2651
|
-
.data(heatmapData.cells)
|
|
2652
|
-
.join('rect')
|
|
2653
|
-
.attr('class', 'heatmap-cell')
|
|
2654
|
-
.attr('x', d => xScale(d.x))
|
|
2655
|
-
.attr('y', d => yScale(d.y))
|
|
2656
|
-
.attr('width', xScale.bandwidth())
|
|
2657
|
-
.attr('height', yScale.bandwidth())
|
|
2658
|
-
.attr('fill', d => colorScale(d.severity))
|
|
2659
|
-
.attr('opacity', d => intensityScale(d.color_intensity))
|
|
2660
|
-
.attr('stroke', '#fff')
|
|
2661
|
-
.attr('stroke-width', 1);
|
|
2662
|
-
|
|
2663
|
-
// Add tooltips
|
|
2664
|
-
cells.append('title')
|
|
2665
|
-
.text(d => {{
|
|
2666
|
-
const tooltip = d.tooltip_data;
|
|
2667
|
-
return `${{tooltip.file || tooltip.time || tooltip.function}}
|
|
2668
|
-
Error: ${{tooltip.error_type}}
|
|
2669
|
-
Count: ${{tooltip.count}}
|
|
2670
|
-
Severity: ${{tooltip.severity}}`;
|
|
2671
|
-
}});
|
|
2672
|
-
|
|
2673
|
-
// Add value labels for high-intensity cells
|
|
2674
|
-
g.selectAll('.cell-label')
|
|
2675
|
-
.data(heatmapData.cells.filter(d => d.color_intensity > 0.6))
|
|
2676
|
-
.join('text')
|
|
2677
|
-
.attr('class', 'cell-label')
|
|
2678
|
-
.attr('x', d => xScale(d.x) + xScale.bandwidth() / 2)
|
|
2679
|
-
.attr('y', d => yScale(d.y) + yScale.bandwidth() / 2)
|
|
2680
|
-
.attr('dy', '0.35em')
|
|
2681
|
-
.attr('text-anchor', 'middle')
|
|
2682
|
-
.attr('fill', 'white')
|
|
2683
|
-
.attr('font-size', '10px')
|
|
2684
|
-
.attr('font-weight', 'bold')
|
|
2685
|
-
.text(d => d.value);
|
|
2686
|
-
|
|
2687
|
-
// Add axes
|
|
2688
|
-
const xAxis = g.append('g')
|
|
2689
|
-
.attr('transform', `translate(0, ${{height}})`)
|
|
2690
|
-
.call(d3.axisBottom(xScale))
|
|
2691
|
-
.selectAll('text')
|
|
2692
|
-
.style('text-anchor', 'end')
|
|
2693
|
-
.attr('dx', '-.8em')
|
|
2694
|
-
.attr('dy', '.15em')
|
|
2695
|
-
.attr('transform', 'rotate(-45)');
|
|
2696
|
-
|
|
2697
|
-
const yAxis = g.append('g')
|
|
2698
|
-
.call(d3.axisLeft(yScale));
|
|
2699
|
-
|
|
2700
|
-
// Add title
|
|
2701
|
-
svg.append('text')
|
|
2702
|
-
.attr('x', (width + margin.left + margin.right) / 2)
|
|
2703
|
-
.attr('y', margin.top / 2)
|
|
2704
|
-
.attr('text-anchor', 'middle')
|
|
2705
|
-
.attr('font-size', '16px')
|
|
2706
|
-
.attr('font-weight', 'bold')
|
|
2707
|
-
.text(heatmapData.title);
|
|
2708
|
-
|
|
2709
|
-
// Add subtitle
|
|
2710
|
-
svg.append('text')
|
|
2711
|
-
.attr('x', (width + margin.left + margin.right) / 2)
|
|
2712
|
-
.attr('y', margin.top * 0.7)
|
|
2713
|
-
.attr('text-anchor', 'middle')
|
|
2714
|
-
.attr('font-size', '12px')
|
|
2715
|
-
.attr('fill', '#666')
|
|
2716
|
-
.text(heatmapData.subtitle);
|
|
2717
|
-
|
|
2718
|
-
// Add legend
|
|
2719
|
-
const legend = svg.append('g')
|
|
2720
|
-
.attr('transform', `translate(${{width + margin.left + 20}}, ${{margin.top}})`);
|
|
2721
|
-
|
|
2722
|
-
const legendData = [
|
|
2723
|
-
{{ severity: 'low', color: colorScale('low') }},
|
|
2724
|
-
{{ severity: 'medium', color: colorScale('medium') }},
|
|
2725
|
-
{{ severity: 'high', color: colorScale('high') }},
|
|
2726
|
-
{{ severity: 'critical', color: colorScale('critical') }}
|
|
2727
|
-
];
|
|
2728
|
-
|
|
2729
|
-
legend.selectAll('g')
|
|
2730
|
-
.data(legendData)
|
|
2731
|
-
.join('g')
|
|
2732
|
-
.attr('transform', (d, i) => `translate(0, ${{i * 25}})`)
|
|
2733
|
-
.each(function(d) {{
|
|
2734
|
-
const g = d3.select(this);
|
|
2735
|
-
g.append('rect')
|
|
2736
|
-
.attr('width', 15)
|
|
2737
|
-
.attr('height', 15)
|
|
2738
|
-
.attr('fill', d.color);
|
|
2739
|
-
g.append('text')
|
|
2740
|
-
.attr('x', 20)
|
|
2741
|
-
.attr('y', 12)
|
|
2742
|
-
.attr('font-size', '12px')
|
|
2743
|
-
.text(d.severity);
|
|
2744
|
-
}});
|
|
2745
|
-
}}
|
|
2746
|
-
|
|
2747
|
-
function renderErrorPatterns() {{
|
|
2748
|
-
const container = document.getElementById('error-patterns-list');
|
|
2749
|
-
|
|
2750
|
-
if (!errorPatterns || errorPatterns.length === 0) {{
|
|
2751
|
-
container.innerHTML = '<p style="color: #28a745;">✅ No error patterns found</p>';
|
|
2752
|
-
return;
|
|
2753
|
-
}}
|
|
2754
|
-
|
|
2755
|
-
// Sort by severity and count
|
|
2756
|
-
const sortedPatterns = [...errorPatterns].sort((a, b) => {{
|
|
2757
|
-
const severityOrder = {{ critical: 4, high: 3, medium: 2, low: 1 }};
|
|
2758
|
-
if (severityOrder[b.severity] !== severityOrder[a.severity]) {{
|
|
2759
|
-
return severityOrder[b.severity] - severityOrder[a.severity];
|
|
2760
|
-
}}
|
|
2761
|
-
return b.count - a.count;
|
|
2762
|
-
}});
|
|
2763
|
-
|
|
2764
|
-
container.innerHTML = sortedPatterns.map(pattern => {{
|
|
2765
|
-
const severityColor = {{
|
|
2766
|
-
critical: '#e84393',
|
|
2767
|
-
high: '#fd79a8',
|
|
2768
|
-
medium: '#ffeaa7',
|
|
2769
|
-
low: '#fffbd4'
|
|
2770
|
-
}};
|
|
2771
|
-
|
|
2772
|
-
const trendIcon = {{
|
|
2773
|
-
increasing: '📈',
|
|
2774
|
-
stable: '➡️',
|
|
2775
|
-
decreasing: '📉'
|
|
2776
|
-
}};
|
|
2777
|
-
|
|
2778
|
-
return `
|
|
2779
|
-
<div style="
|
|
2780
|
-
padding: 15px;
|
|
2781
|
-
margin: 10px 0;
|
|
2782
|
-
border: 1px solid #ddd;
|
|
2783
|
-
border-radius: 8px;
|
|
2784
|
-
border-left: 4px solid ${{severityColor[pattern.severity]}};
|
|
2785
|
-
background-color: ${{pattern.severity === 'critical' ? '#fff5f5' : '#fafafa'}};
|
|
2786
|
-
">
|
|
2787
|
-
<div style="display: flex; justify-content: space-between; align-items: center; margin-bottom: 10px;">
|
|
2788
|
-
<h4 style="margin: 0; color: #333;">${{pattern.error_type}}</h4>
|
|
2789
|
-
<div style="display: flex; align-items: center; gap: 10px;">
|
|
2790
|
-
<span style="
|
|
2791
|
-
background: ${{severityColor[pattern.severity]}};
|
|
2792
|
-
padding: 2px 8px;
|
|
2793
|
-
border-radius: 12px;
|
|
2794
|
-
font-size: 12px;
|
|
2795
|
-
font-weight: bold;
|
|
2796
|
-
color: ${{pattern.severity === 'low' ? '#333' : '#fff'}};
|
|
2797
|
-
">${{pattern.severity.toUpperCase()}}</span>
|
|
2798
|
-
<span style="font-size: 14px;">${{trendIcon[pattern.trend]}} ${{pattern.trend}}</span>
|
|
2799
|
-
<span style="font-weight: bold; color: #e74c3c;">×${{pattern.count}}</span>
|
|
2800
|
-
</div>
|
|
2801
|
-
</div>
|
|
2802
|
-
<p style="margin: 5px 0; color: #666; font-family: monospace; background: #f8f9fa; padding: 5px; border-radius: 4px; font-size: 12px;">
|
|
2803
|
-
${{pattern.message}}
|
|
2804
|
-
</p>
|
|
2805
|
-
<div style="display: flex; justify-content: space-between; font-size: 12px; color: #888; margin-top: 10px;">
|
|
2806
|
-
<span>📁 ${{pattern.file_path}}${{pattern.function_name ? ':' + pattern.function_name : ''}}</span>
|
|
2807
|
-
<span>🕒 Last seen: ${{new Date(pattern.last_seen).toLocaleDateString()}}</span>
|
|
2808
|
-
</div>
|
|
2809
|
-
<div style="margin-top: 8px;">
|
|
2810
|
-
<div style="background: #e9ecef; height: 4px; border-radius: 2px; overflow: hidden;">
|
|
2811
|
-
<div style="
|
|
2812
|
-
background: ${{severityColor[pattern.severity]}};
|
|
2813
|
-
height: 100%;
|
|
2814
|
-
width: ${{Math.min(pattern.confidence * 100, 100)}}%;
|
|
2815
|
-
transition: width 0.3s ease;
|
|
2816
|
-
"></div>
|
|
2817
|
-
</div>
|
|
2818
|
-
<span style="font-size: 10px; color: #999;">Confidence: ${{Math.round(pattern.confidence * 100)}}%</span>
|
|
2819
|
-
</div>
|
|
2820
|
-
</div>
|
|
2821
|
-
`;
|
|
2822
|
-
}}).join('');
|
|
2823
|
-
}}
|
|
2824
|
-
|
|
2825
|
-
function calculateSeverityBreakdown() {{
|
|
2826
|
-
if (!errorPatterns) return;
|
|
2827
|
-
|
|
2828
|
-
severityBreakdown = errorPatterns.reduce((acc, pattern) => {{
|
|
2829
|
-
acc[pattern.severity] = (acc[pattern.severity] || 0) + 1;
|
|
2830
|
-
return acc;
|
|
2831
|
-
}}, {{}});
|
|
2832
|
-
|
|
2833
|
-
renderSeverityBreakdown();
|
|
2834
|
-
}}
|
|
2835
|
-
|
|
2836
|
-
function renderSeverityBreakdown() {{
|
|
2837
|
-
const container = document.getElementById('severity-breakdown');
|
|
2838
|
-
|
|
2839
|
-
if (!severityBreakdown || Object.keys(severityBreakdown).length === 0) {{
|
|
2840
|
-
container.innerHTML = '<p style="color: #28a745;">✅ No severity data available</p>';
|
|
2841
|
-
return;
|
|
2842
|
-
}}
|
|
2843
|
-
|
|
2844
|
-
const total = Object.values(severityBreakdown).reduce((sum, count) => sum + count, 0);
|
|
2845
|
-
const severityColors = {{
|
|
2846
|
-
critical: '#e84393',
|
|
2847
|
-
high: '#fd79a8',
|
|
2848
|
-
medium: '#ffeaa7',
|
|
2849
|
-
low: '#fffbd4'
|
|
2850
|
-
}};
|
|
2851
|
-
|
|
2852
|
-
const severityOrder = ['critical', 'high', 'medium', 'low'];
|
|
2853
|
-
|
|
2854
|
-
container.innerHTML = `
|
|
2855
|
-
<div style="padding: 20px;">
|
|
2856
|
-
<h3 style="margin-bottom: 20px;">Severity Distribution (${{total}} total patterns)</h3>
|
|
2857
|
-
${{severityOrder.map(severity => {{
|
|
2858
|
-
const count = severityBreakdown[severity] || 0;
|
|
2859
|
-
const percentage = total > 0 ? Math.round((count / total) * 100) : 0;
|
|
2860
|
-
|
|
2861
|
-
return `
|
|
2862
|
-
<div style="margin-bottom: 15px;">
|
|
2863
|
-
<div style="
|
|
2864
|
-
display: flex;
|
|
2865
|
-
justify-content: space-between;
|
|
2866
|
-
align-items: center;
|
|
2867
|
-
margin-bottom: 5px;
|
|
2868
|
-
">
|
|
2869
|
-
<span style="font-weight: bold; text-transform: capitalize;">
|
|
2870
|
-
${{severity}}
|
|
2871
|
-
</span>
|
|
2872
|
-
<span>${{count}} (${{percentage}}%)</span>
|
|
2873
|
-
</div>
|
|
2874
|
-
<div style="
|
|
2875
|
-
background: #e9ecef;
|
|
2876
|
-
height: 8px;
|
|
2877
|
-
border-radius: 4px;
|
|
2878
|
-
overflow: hidden;
|
|
2879
|
-
">
|
|
2880
|
-
<div style="
|
|
2881
|
-
background: ${{severityColors[severity]}};
|
|
2882
|
-
height: 100%;
|
|
2883
|
-
width: ${{percentage}}%;
|
|
2884
|
-
transition: width 0.5s ease;
|
|
2885
|
-
"></div>
|
|
2886
|
-
</div>
|
|
2887
|
-
</div>
|
|
2888
|
-
`;
|
|
2889
|
-
}}).join('')}}
|
|
2890
|
-
|
|
2891
|
-
<div style="
|
|
2892
|
-
margin-top: 30px;
|
|
2893
|
-
padding: 15px;
|
|
2894
|
-
background: #f8f9fa;
|
|
2895
|
-
border-radius: 8px;
|
|
2896
|
-
">
|
|
2897
|
-
<h4 style="margin-top: 0;">Recommendations</h4>
|
|
2898
|
-
<ul style="margin: 0; padding-left: 20px;">
|
|
2899
|
-
${{severityBreakdown.critical ? (
|
|
2900
|
-
'<li style="color: #e84393;">'
|
|
2901
|
-
+ '🚨 <strong>Critical errors require immediate attention</strong>'
|
|
2902
|
-
+ '</li>'
|
|
2903
|
-
) : ''}}
|
|
2904
|
-
${{severityBreakdown.high ? (
|
|
2905
|
-
'<li style="color: #fd79a8;">'
|
|
2906
|
-
+ '⚠️ High severity errors should be prioritized'
|
|
2907
|
-
+ '</li>'
|
|
2908
|
-
) : ''}}
|
|
2909
|
-
${{severityBreakdown.medium && severityBreakdown.medium > 5 ? (
|
|
2910
|
-
'<li style="color: #f39c12;">'
|
|
2911
|
-
+ '📊 Consider batch-fixing medium severity patterns'
|
|
2912
|
-
+ '</li>'
|
|
2913
|
-
) : ''}}
|
|
2914
|
-
${{severityBreakdown.low && severityBreakdown.low > 10 ? (
|
|
2915
|
-
'<li style="color: #3498db;">'
|
|
2916
|
-
+ '🔧 Low severity issues can be automated or batched'
|
|
2917
|
-
+ '</li>'
|
|
2918
|
-
) : ''}}
|
|
2919
|
-
</ul>
|
|
2920
|
-
</div>
|
|
2921
|
-
</div>
|
|
2922
|
-
`;
|
|
2923
|
-
}}
|
|
2924
|
-
|
|
2925
|
-
// Request notification permissions
|
|
2926
|
-
if ('Notification' in window && Notification.permission === 'default') {{
|
|
2927
|
-
Notification.requestPermission();
|
|
2928
|
-
}}
|
|
2929
|
-
|
|
2930
|
-
// Initialize dashboard
|
|
2931
|
-
connect();
|
|
2932
|
-
updateConnectionStatus();
|
|
2933
|
-
</script>
|
|
2934
|
-
</body>
|
|
2935
|
-
</html>"""
|
|
21
|
+
__all__ = ["create_monitoring_endpoints", "MonitoringWebSocketManager"]
|