crackerjack 0.18.2__py3-none-any.whl → 0.45.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- crackerjack/README.md +19 -0
- crackerjack/__init__.py +96 -2
- crackerjack/__main__.py +637 -138
- crackerjack/adapters/README.md +18 -0
- crackerjack/adapters/__init__.py +39 -0
- crackerjack/adapters/_output_paths.py +167 -0
- crackerjack/adapters/_qa_adapter_base.py +309 -0
- crackerjack/adapters/_tool_adapter_base.py +706 -0
- crackerjack/adapters/ai/README.md +65 -0
- crackerjack/adapters/ai/__init__.py +5 -0
- crackerjack/adapters/ai/claude.py +853 -0
- crackerjack/adapters/complexity/README.md +53 -0
- crackerjack/adapters/complexity/__init__.py +10 -0
- crackerjack/adapters/complexity/complexipy.py +641 -0
- crackerjack/adapters/dependency/__init__.py +22 -0
- crackerjack/adapters/dependency/pip_audit.py +418 -0
- crackerjack/adapters/format/README.md +72 -0
- crackerjack/adapters/format/__init__.py +11 -0
- crackerjack/adapters/format/mdformat.py +313 -0
- crackerjack/adapters/format/ruff.py +516 -0
- crackerjack/adapters/lint/README.md +47 -0
- crackerjack/adapters/lint/__init__.py +11 -0
- crackerjack/adapters/lint/codespell.py +273 -0
- crackerjack/adapters/lsp/README.md +49 -0
- crackerjack/adapters/lsp/__init__.py +27 -0
- crackerjack/adapters/lsp/_base.py +194 -0
- crackerjack/adapters/lsp/_client.py +358 -0
- crackerjack/adapters/lsp/_manager.py +193 -0
- crackerjack/adapters/lsp/skylos.py +283 -0
- crackerjack/adapters/lsp/zuban.py +557 -0
- crackerjack/adapters/refactor/README.md +59 -0
- crackerjack/adapters/refactor/__init__.py +12 -0
- crackerjack/adapters/refactor/creosote.py +318 -0
- crackerjack/adapters/refactor/refurb.py +406 -0
- crackerjack/adapters/refactor/skylos.py +494 -0
- crackerjack/adapters/sast/README.md +132 -0
- crackerjack/adapters/sast/__init__.py +32 -0
- crackerjack/adapters/sast/_base.py +201 -0
- crackerjack/adapters/sast/bandit.py +423 -0
- crackerjack/adapters/sast/pyscn.py +405 -0
- crackerjack/adapters/sast/semgrep.py +241 -0
- crackerjack/adapters/security/README.md +111 -0
- crackerjack/adapters/security/__init__.py +17 -0
- crackerjack/adapters/security/gitleaks.py +339 -0
- crackerjack/adapters/type/README.md +52 -0
- crackerjack/adapters/type/__init__.py +12 -0
- crackerjack/adapters/type/pyrefly.py +402 -0
- crackerjack/adapters/type/ty.py +402 -0
- crackerjack/adapters/type/zuban.py +522 -0
- crackerjack/adapters/utility/README.md +51 -0
- crackerjack/adapters/utility/__init__.py +10 -0
- crackerjack/adapters/utility/checks.py +884 -0
- crackerjack/agents/README.md +264 -0
- crackerjack/agents/__init__.py +66 -0
- crackerjack/agents/architect_agent.py +238 -0
- crackerjack/agents/base.py +167 -0
- crackerjack/agents/claude_code_bridge.py +641 -0
- crackerjack/agents/coordinator.py +600 -0
- crackerjack/agents/documentation_agent.py +520 -0
- crackerjack/agents/dry_agent.py +585 -0
- crackerjack/agents/enhanced_coordinator.py +279 -0
- crackerjack/agents/enhanced_proactive_agent.py +185 -0
- crackerjack/agents/error_middleware.py +53 -0
- crackerjack/agents/formatting_agent.py +230 -0
- crackerjack/agents/helpers/__init__.py +9 -0
- crackerjack/agents/helpers/performance/__init__.py +22 -0
- crackerjack/agents/helpers/performance/performance_ast_analyzer.py +357 -0
- crackerjack/agents/helpers/performance/performance_pattern_detector.py +909 -0
- crackerjack/agents/helpers/performance/performance_recommender.py +572 -0
- crackerjack/agents/helpers/refactoring/__init__.py +22 -0
- crackerjack/agents/helpers/refactoring/code_transformer.py +536 -0
- crackerjack/agents/helpers/refactoring/complexity_analyzer.py +344 -0
- crackerjack/agents/helpers/refactoring/dead_code_detector.py +437 -0
- crackerjack/agents/helpers/test_creation/__init__.py +19 -0
- crackerjack/agents/helpers/test_creation/test_ast_analyzer.py +216 -0
- crackerjack/agents/helpers/test_creation/test_coverage_analyzer.py +643 -0
- crackerjack/agents/helpers/test_creation/test_template_generator.py +1031 -0
- crackerjack/agents/import_optimization_agent.py +1181 -0
- crackerjack/agents/performance_agent.py +325 -0
- crackerjack/agents/performance_helpers.py +205 -0
- crackerjack/agents/proactive_agent.py +55 -0
- crackerjack/agents/refactoring_agent.py +511 -0
- crackerjack/agents/refactoring_helpers.py +247 -0
- crackerjack/agents/security_agent.py +793 -0
- crackerjack/agents/semantic_agent.py +479 -0
- crackerjack/agents/semantic_helpers.py +356 -0
- crackerjack/agents/test_creation_agent.py +570 -0
- crackerjack/agents/test_specialist_agent.py +526 -0
- crackerjack/agents/tracker.py +110 -0
- crackerjack/api.py +647 -0
- crackerjack/cli/README.md +394 -0
- crackerjack/cli/__init__.py +24 -0
- crackerjack/cli/cache_handlers.py +209 -0
- crackerjack/cli/cache_handlers_enhanced.py +680 -0
- crackerjack/cli/facade.py +162 -0
- crackerjack/cli/formatting.py +13 -0
- crackerjack/cli/handlers/__init__.py +85 -0
- crackerjack/cli/handlers/advanced.py +103 -0
- crackerjack/cli/handlers/ai_features.py +62 -0
- crackerjack/cli/handlers/analytics.py +479 -0
- crackerjack/cli/handlers/changelog.py +271 -0
- crackerjack/cli/handlers/config_handlers.py +16 -0
- crackerjack/cli/handlers/coverage.py +84 -0
- crackerjack/cli/handlers/documentation.py +280 -0
- crackerjack/cli/handlers/main_handlers.py +497 -0
- crackerjack/cli/handlers/monitoring.py +371 -0
- crackerjack/cli/handlers.py +700 -0
- crackerjack/cli/interactive.py +488 -0
- crackerjack/cli/options.py +1216 -0
- crackerjack/cli/semantic_handlers.py +292 -0
- crackerjack/cli/utils.py +19 -0
- crackerjack/cli/version.py +19 -0
- crackerjack/code_cleaner.py +1307 -0
- crackerjack/config/README.md +472 -0
- crackerjack/config/__init__.py +275 -0
- crackerjack/config/global_lock_config.py +207 -0
- crackerjack/config/hooks.py +390 -0
- crackerjack/config/loader.py +239 -0
- crackerjack/config/settings.py +141 -0
- crackerjack/config/tool_commands.py +331 -0
- crackerjack/core/README.md +393 -0
- crackerjack/core/__init__.py +0 -0
- crackerjack/core/async_workflow_orchestrator.py +738 -0
- crackerjack/core/autofix_coordinator.py +282 -0
- crackerjack/core/container.py +105 -0
- crackerjack/core/enhanced_container.py +583 -0
- crackerjack/core/file_lifecycle.py +472 -0
- crackerjack/core/performance.py +244 -0
- crackerjack/core/performance_monitor.py +357 -0
- crackerjack/core/phase_coordinator.py +1227 -0
- crackerjack/core/proactive_workflow.py +267 -0
- crackerjack/core/resource_manager.py +425 -0
- crackerjack/core/retry.py +275 -0
- crackerjack/core/service_watchdog.py +601 -0
- crackerjack/core/session_coordinator.py +239 -0
- crackerjack/core/timeout_manager.py +563 -0
- crackerjack/core/websocket_lifecycle.py +410 -0
- crackerjack/core/workflow/__init__.py +21 -0
- crackerjack/core/workflow/workflow_ai_coordinator.py +863 -0
- crackerjack/core/workflow/workflow_event_orchestrator.py +1107 -0
- crackerjack/core/workflow/workflow_issue_parser.py +714 -0
- crackerjack/core/workflow/workflow_phase_executor.py +1158 -0
- crackerjack/core/workflow/workflow_security_gates.py +400 -0
- crackerjack/core/workflow_orchestrator.py +2243 -0
- crackerjack/data/README.md +11 -0
- crackerjack/data/__init__.py +8 -0
- crackerjack/data/models.py +79 -0
- crackerjack/data/repository.py +210 -0
- crackerjack/decorators/README.md +180 -0
- crackerjack/decorators/__init__.py +35 -0
- crackerjack/decorators/error_handling.py +649 -0
- crackerjack/decorators/error_handling_decorators.py +334 -0
- crackerjack/decorators/helpers.py +58 -0
- crackerjack/decorators/patterns.py +281 -0
- crackerjack/decorators/utils.py +58 -0
- crackerjack/docs/INDEX.md +11 -0
- crackerjack/docs/README.md +11 -0
- crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
- crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
- crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
- crackerjack/docs/generated/api/SERVICES.md +1252 -0
- crackerjack/documentation/README.md +11 -0
- crackerjack/documentation/__init__.py +31 -0
- crackerjack/documentation/ai_templates.py +756 -0
- crackerjack/documentation/dual_output_generator.py +767 -0
- crackerjack/documentation/mkdocs_integration.py +518 -0
- crackerjack/documentation/reference_generator.py +1065 -0
- crackerjack/dynamic_config.py +678 -0
- crackerjack/errors.py +378 -0
- crackerjack/events/README.md +11 -0
- crackerjack/events/__init__.py +16 -0
- crackerjack/events/telemetry.py +175 -0
- crackerjack/events/workflow_bus.py +346 -0
- crackerjack/exceptions/README.md +301 -0
- crackerjack/exceptions/__init__.py +5 -0
- crackerjack/exceptions/config.py +4 -0
- crackerjack/exceptions/tool_execution_error.py +245 -0
- crackerjack/executors/README.md +591 -0
- crackerjack/executors/__init__.py +13 -0
- crackerjack/executors/async_hook_executor.py +938 -0
- crackerjack/executors/cached_hook_executor.py +316 -0
- crackerjack/executors/hook_executor.py +1295 -0
- crackerjack/executors/hook_lock_manager.py +708 -0
- crackerjack/executors/individual_hook_executor.py +739 -0
- crackerjack/executors/lsp_aware_hook_executor.py +349 -0
- crackerjack/executors/progress_hook_executor.py +282 -0
- crackerjack/executors/tool_proxy.py +433 -0
- crackerjack/hooks/README.md +485 -0
- crackerjack/hooks/lsp_hook.py +93 -0
- crackerjack/intelligence/README.md +557 -0
- crackerjack/intelligence/__init__.py +37 -0
- crackerjack/intelligence/adaptive_learning.py +693 -0
- crackerjack/intelligence/agent_orchestrator.py +485 -0
- crackerjack/intelligence/agent_registry.py +377 -0
- crackerjack/intelligence/agent_selector.py +439 -0
- crackerjack/intelligence/integration.py +250 -0
- crackerjack/interactive.py +719 -0
- crackerjack/managers/README.md +369 -0
- crackerjack/managers/__init__.py +11 -0
- crackerjack/managers/async_hook_manager.py +135 -0
- crackerjack/managers/hook_manager.py +585 -0
- crackerjack/managers/publish_manager.py +631 -0
- crackerjack/managers/test_command_builder.py +391 -0
- crackerjack/managers/test_executor.py +474 -0
- crackerjack/managers/test_manager.py +1357 -0
- crackerjack/managers/test_progress.py +187 -0
- crackerjack/mcp/README.md +374 -0
- crackerjack/mcp/__init__.py +0 -0
- crackerjack/mcp/cache.py +352 -0
- crackerjack/mcp/client_runner.py +121 -0
- crackerjack/mcp/context.py +802 -0
- crackerjack/mcp/dashboard.py +657 -0
- crackerjack/mcp/enhanced_progress_monitor.py +493 -0
- crackerjack/mcp/file_monitor.py +394 -0
- crackerjack/mcp/progress_components.py +607 -0
- crackerjack/mcp/progress_monitor.py +1016 -0
- crackerjack/mcp/rate_limiter.py +336 -0
- crackerjack/mcp/server.py +24 -0
- crackerjack/mcp/server_core.py +526 -0
- crackerjack/mcp/service_watchdog.py +505 -0
- crackerjack/mcp/state.py +407 -0
- crackerjack/mcp/task_manager.py +259 -0
- crackerjack/mcp/tools/README.md +27 -0
- crackerjack/mcp/tools/__init__.py +19 -0
- crackerjack/mcp/tools/core_tools.py +469 -0
- crackerjack/mcp/tools/error_analyzer.py +283 -0
- crackerjack/mcp/tools/execution_tools.py +384 -0
- crackerjack/mcp/tools/intelligence_tool_registry.py +46 -0
- crackerjack/mcp/tools/intelligence_tools.py +264 -0
- crackerjack/mcp/tools/monitoring_tools.py +628 -0
- crackerjack/mcp/tools/proactive_tools.py +367 -0
- crackerjack/mcp/tools/progress_tools.py +222 -0
- crackerjack/mcp/tools/semantic_tools.py +584 -0
- crackerjack/mcp/tools/utility_tools.py +358 -0
- crackerjack/mcp/tools/workflow_executor.py +699 -0
- crackerjack/mcp/websocket/README.md +31 -0
- crackerjack/mcp/websocket/__init__.py +14 -0
- crackerjack/mcp/websocket/app.py +54 -0
- crackerjack/mcp/websocket/endpoints.py +492 -0
- crackerjack/mcp/websocket/event_bridge.py +188 -0
- crackerjack/mcp/websocket/jobs.py +406 -0
- crackerjack/mcp/websocket/monitoring/__init__.py +25 -0
- crackerjack/mcp/websocket/monitoring/api/__init__.py +19 -0
- crackerjack/mcp/websocket/monitoring/api/dependencies.py +141 -0
- crackerjack/mcp/websocket/monitoring/api/heatmap.py +154 -0
- crackerjack/mcp/websocket/monitoring/api/intelligence.py +199 -0
- crackerjack/mcp/websocket/monitoring/api/metrics.py +203 -0
- crackerjack/mcp/websocket/monitoring/api/telemetry.py +101 -0
- crackerjack/mcp/websocket/monitoring/dashboard.py +18 -0
- crackerjack/mcp/websocket/monitoring/factory.py +109 -0
- crackerjack/mcp/websocket/monitoring/filters.py +10 -0
- crackerjack/mcp/websocket/monitoring/metrics.py +64 -0
- crackerjack/mcp/websocket/monitoring/models.py +90 -0
- crackerjack/mcp/websocket/monitoring/utils.py +171 -0
- crackerjack/mcp/websocket/monitoring/websocket_manager.py +78 -0
- crackerjack/mcp/websocket/monitoring/websockets/__init__.py +17 -0
- crackerjack/mcp/websocket/monitoring/websockets/dependencies.py +126 -0
- crackerjack/mcp/websocket/monitoring/websockets/heatmap.py +176 -0
- crackerjack/mcp/websocket/monitoring/websockets/intelligence.py +291 -0
- crackerjack/mcp/websocket/monitoring/websockets/metrics.py +291 -0
- crackerjack/mcp/websocket/monitoring_endpoints.py +21 -0
- crackerjack/mcp/websocket/server.py +174 -0
- crackerjack/mcp/websocket/websocket_handler.py +276 -0
- crackerjack/mcp/websocket_server.py +10 -0
- crackerjack/models/README.md +308 -0
- crackerjack/models/__init__.py +40 -0
- crackerjack/models/config.py +730 -0
- crackerjack/models/config_adapter.py +265 -0
- crackerjack/models/protocols.py +1535 -0
- crackerjack/models/pydantic_models.py +320 -0
- crackerjack/models/qa_config.py +145 -0
- crackerjack/models/qa_results.py +134 -0
- crackerjack/models/resource_protocols.py +299 -0
- crackerjack/models/results.py +35 -0
- crackerjack/models/semantic_models.py +258 -0
- crackerjack/models/task.py +173 -0
- crackerjack/models/test_models.py +60 -0
- crackerjack/monitoring/README.md +11 -0
- crackerjack/monitoring/__init__.py +0 -0
- crackerjack/monitoring/ai_agent_watchdog.py +405 -0
- crackerjack/monitoring/metrics_collector.py +427 -0
- crackerjack/monitoring/regression_prevention.py +580 -0
- crackerjack/monitoring/websocket_server.py +406 -0
- crackerjack/orchestration/README.md +340 -0
- crackerjack/orchestration/__init__.py +43 -0
- crackerjack/orchestration/advanced_orchestrator.py +894 -0
- crackerjack/orchestration/cache/README.md +312 -0
- crackerjack/orchestration/cache/__init__.py +37 -0
- crackerjack/orchestration/cache/memory_cache.py +338 -0
- crackerjack/orchestration/cache/tool_proxy_cache.py +340 -0
- crackerjack/orchestration/config.py +297 -0
- crackerjack/orchestration/coverage_improvement.py +180 -0
- crackerjack/orchestration/execution_strategies.py +361 -0
- crackerjack/orchestration/hook_orchestrator.py +1398 -0
- crackerjack/orchestration/strategies/README.md +401 -0
- crackerjack/orchestration/strategies/__init__.py +39 -0
- crackerjack/orchestration/strategies/adaptive_strategy.py +630 -0
- crackerjack/orchestration/strategies/parallel_strategy.py +237 -0
- crackerjack/orchestration/strategies/sequential_strategy.py +299 -0
- crackerjack/orchestration/test_progress_streamer.py +647 -0
- crackerjack/plugins/README.md +11 -0
- crackerjack/plugins/__init__.py +15 -0
- crackerjack/plugins/base.py +200 -0
- crackerjack/plugins/hooks.py +254 -0
- crackerjack/plugins/loader.py +335 -0
- crackerjack/plugins/managers.py +264 -0
- crackerjack/py313.py +191 -0
- crackerjack/security/README.md +11 -0
- crackerjack/security/__init__.py +0 -0
- crackerjack/security/audit.py +197 -0
- crackerjack/services/README.md +374 -0
- crackerjack/services/__init__.py +9 -0
- crackerjack/services/ai/README.md +295 -0
- crackerjack/services/ai/__init__.py +7 -0
- crackerjack/services/ai/advanced_optimizer.py +878 -0
- crackerjack/services/ai/contextual_ai_assistant.py +542 -0
- crackerjack/services/ai/embeddings.py +444 -0
- crackerjack/services/ai/intelligent_commit.py +328 -0
- crackerjack/services/ai/predictive_analytics.py +510 -0
- crackerjack/services/anomaly_detector.py +392 -0
- crackerjack/services/api_extractor.py +617 -0
- crackerjack/services/backup_service.py +467 -0
- crackerjack/services/bounded_status_operations.py +530 -0
- crackerjack/services/cache.py +369 -0
- crackerjack/services/changelog_automation.py +399 -0
- crackerjack/services/command_execution_service.py +305 -0
- crackerjack/services/config_integrity.py +132 -0
- crackerjack/services/config_merge.py +546 -0
- crackerjack/services/config_service.py +198 -0
- crackerjack/services/config_template.py +493 -0
- crackerjack/services/coverage_badge_service.py +173 -0
- crackerjack/services/coverage_ratchet.py +381 -0
- crackerjack/services/debug.py +733 -0
- crackerjack/services/dependency_analyzer.py +460 -0
- crackerjack/services/dependency_monitor.py +622 -0
- crackerjack/services/documentation_generator.py +493 -0
- crackerjack/services/documentation_service.py +704 -0
- crackerjack/services/enhanced_filesystem.py +497 -0
- crackerjack/services/enterprise_optimizer.py +865 -0
- crackerjack/services/error_pattern_analyzer.py +676 -0
- crackerjack/services/file_filter.py +221 -0
- crackerjack/services/file_hasher.py +149 -0
- crackerjack/services/file_io_service.py +361 -0
- crackerjack/services/file_modifier.py +615 -0
- crackerjack/services/filesystem.py +381 -0
- crackerjack/services/git.py +422 -0
- crackerjack/services/health_metrics.py +615 -0
- crackerjack/services/heatmap_generator.py +744 -0
- crackerjack/services/incremental_executor.py +380 -0
- crackerjack/services/initialization.py +823 -0
- crackerjack/services/input_validator.py +668 -0
- crackerjack/services/intelligent_commit.py +327 -0
- crackerjack/services/log_manager.py +289 -0
- crackerjack/services/logging.py +228 -0
- crackerjack/services/lsp_client.py +628 -0
- crackerjack/services/memory_optimizer.py +414 -0
- crackerjack/services/metrics.py +587 -0
- crackerjack/services/monitoring/README.md +30 -0
- crackerjack/services/monitoring/__init__.py +9 -0
- crackerjack/services/monitoring/dependency_monitor.py +678 -0
- crackerjack/services/monitoring/error_pattern_analyzer.py +676 -0
- crackerjack/services/monitoring/health_metrics.py +716 -0
- crackerjack/services/monitoring/metrics.py +587 -0
- crackerjack/services/monitoring/performance_benchmarks.py +410 -0
- crackerjack/services/monitoring/performance_cache.py +388 -0
- crackerjack/services/monitoring/performance_monitor.py +569 -0
- crackerjack/services/parallel_executor.py +527 -0
- crackerjack/services/pattern_cache.py +333 -0
- crackerjack/services/pattern_detector.py +478 -0
- crackerjack/services/patterns/__init__.py +142 -0
- crackerjack/services/patterns/agents.py +107 -0
- crackerjack/services/patterns/code/__init__.py +15 -0
- crackerjack/services/patterns/code/detection.py +118 -0
- crackerjack/services/patterns/code/imports.py +107 -0
- crackerjack/services/patterns/code/paths.py +159 -0
- crackerjack/services/patterns/code/performance.py +119 -0
- crackerjack/services/patterns/code/replacement.py +36 -0
- crackerjack/services/patterns/core.py +212 -0
- crackerjack/services/patterns/documentation/__init__.py +14 -0
- crackerjack/services/patterns/documentation/badges_markdown.py +96 -0
- crackerjack/services/patterns/documentation/comments_blocks.py +83 -0
- crackerjack/services/patterns/documentation/docstrings.py +89 -0
- crackerjack/services/patterns/formatting.py +226 -0
- crackerjack/services/patterns/operations.py +339 -0
- crackerjack/services/patterns/security/__init__.py +23 -0
- crackerjack/services/patterns/security/code_injection.py +122 -0
- crackerjack/services/patterns/security/credentials.py +190 -0
- crackerjack/services/patterns/security/path_traversal.py +221 -0
- crackerjack/services/patterns/security/unsafe_operations.py +216 -0
- crackerjack/services/patterns/templates.py +62 -0
- crackerjack/services/patterns/testing/__init__.py +18 -0
- crackerjack/services/patterns/testing/error_patterns.py +107 -0
- crackerjack/services/patterns/testing/pytest_output.py +126 -0
- crackerjack/services/patterns/tool_output/__init__.py +16 -0
- crackerjack/services/patterns/tool_output/bandit.py +72 -0
- crackerjack/services/patterns/tool_output/other.py +97 -0
- crackerjack/services/patterns/tool_output/pyright.py +67 -0
- crackerjack/services/patterns/tool_output/ruff.py +44 -0
- crackerjack/services/patterns/url_sanitization.py +114 -0
- crackerjack/services/patterns/utilities.py +42 -0
- crackerjack/services/patterns/utils.py +339 -0
- crackerjack/services/patterns/validation.py +46 -0
- crackerjack/services/patterns/versioning.py +62 -0
- crackerjack/services/predictive_analytics.py +523 -0
- crackerjack/services/profiler.py +280 -0
- crackerjack/services/quality/README.md +415 -0
- crackerjack/services/quality/__init__.py +11 -0
- crackerjack/services/quality/anomaly_detector.py +392 -0
- crackerjack/services/quality/pattern_cache.py +333 -0
- crackerjack/services/quality/pattern_detector.py +479 -0
- crackerjack/services/quality/qa_orchestrator.py +491 -0
- crackerjack/services/quality/quality_baseline.py +395 -0
- crackerjack/services/quality/quality_baseline_enhanced.py +649 -0
- crackerjack/services/quality/quality_intelligence.py +949 -0
- crackerjack/services/regex_patterns.py +58 -0
- crackerjack/services/regex_utils.py +483 -0
- crackerjack/services/secure_path_utils.py +524 -0
- crackerjack/services/secure_status_formatter.py +450 -0
- crackerjack/services/secure_subprocess.py +635 -0
- crackerjack/services/security.py +239 -0
- crackerjack/services/security_logger.py +495 -0
- crackerjack/services/server_manager.py +411 -0
- crackerjack/services/smart_scheduling.py +167 -0
- crackerjack/services/status_authentication.py +460 -0
- crackerjack/services/status_security_manager.py +315 -0
- crackerjack/services/terminal_utils.py +0 -0
- crackerjack/services/thread_safe_status_collector.py +441 -0
- crackerjack/services/tool_filter.py +368 -0
- crackerjack/services/tool_version_service.py +43 -0
- crackerjack/services/unified_config.py +115 -0
- crackerjack/services/validation_rate_limiter.py +220 -0
- crackerjack/services/vector_store.py +689 -0
- crackerjack/services/version_analyzer.py +461 -0
- crackerjack/services/version_checker.py +223 -0
- crackerjack/services/websocket_resource_limiter.py +438 -0
- crackerjack/services/zuban_lsp_service.py +391 -0
- crackerjack/slash_commands/README.md +11 -0
- crackerjack/slash_commands/__init__.py +59 -0
- crackerjack/slash_commands/init.md +112 -0
- crackerjack/slash_commands/run.md +197 -0
- crackerjack/slash_commands/status.md +127 -0
- crackerjack/tools/README.md +11 -0
- crackerjack/tools/__init__.py +30 -0
- crackerjack/tools/_git_utils.py +105 -0
- crackerjack/tools/check_added_large_files.py +139 -0
- crackerjack/tools/check_ast.py +105 -0
- crackerjack/tools/check_json.py +103 -0
- crackerjack/tools/check_jsonschema.py +297 -0
- crackerjack/tools/check_toml.py +103 -0
- crackerjack/tools/check_yaml.py +110 -0
- crackerjack/tools/codespell_wrapper.py +72 -0
- crackerjack/tools/end_of_file_fixer.py +202 -0
- crackerjack/tools/format_json.py +128 -0
- crackerjack/tools/mdformat_wrapper.py +114 -0
- crackerjack/tools/trailing_whitespace.py +198 -0
- crackerjack/tools/validate_input_validator_patterns.py +236 -0
- crackerjack/tools/validate_regex_patterns.py +188 -0
- crackerjack/ui/README.md +11 -0
- crackerjack/ui/__init__.py +1 -0
- crackerjack/ui/dashboard_renderer.py +28 -0
- crackerjack/ui/templates/README.md +11 -0
- crackerjack/utils/console_utils.py +13 -0
- crackerjack/utils/dependency_guard.py +230 -0
- crackerjack/utils/retry_utils.py +275 -0
- crackerjack/workflows/README.md +590 -0
- crackerjack/workflows/__init__.py +46 -0
- crackerjack/workflows/actions.py +811 -0
- crackerjack/workflows/auto_fix.py +444 -0
- crackerjack/workflows/container_builder.py +499 -0
- crackerjack/workflows/definitions.py +443 -0
- crackerjack/workflows/engine.py +177 -0
- crackerjack/workflows/event_bridge.py +242 -0
- crackerjack-0.45.2.dist-info/METADATA +1678 -0
- crackerjack-0.45.2.dist-info/RECORD +478 -0
- {crackerjack-0.18.2.dist-info → crackerjack-0.45.2.dist-info}/WHEEL +1 -1
- crackerjack-0.45.2.dist-info/entry_points.txt +2 -0
- crackerjack/.gitignore +0 -14
- crackerjack/.libcst.codemod.yaml +0 -18
- crackerjack/.pdm.toml +0 -1
- crackerjack/.pre-commit-config.yaml +0 -91
- crackerjack/.pytest_cache/.gitignore +0 -2
- crackerjack/.pytest_cache/CACHEDIR.TAG +0 -4
- crackerjack/.pytest_cache/README.md +0 -8
- crackerjack/.pytest_cache/v/cache/nodeids +0 -1
- crackerjack/.pytest_cache/v/cache/stepwise +0 -1
- crackerjack/.ruff_cache/.gitignore +0 -1
- crackerjack/.ruff_cache/0.1.11/3256171999636029978 +0 -0
- crackerjack/.ruff_cache/0.1.14/602324811142551221 +0 -0
- crackerjack/.ruff_cache/0.1.4/10355199064880463147 +0 -0
- crackerjack/.ruff_cache/0.1.6/15140459877605758699 +0 -0
- crackerjack/.ruff_cache/0.1.7/1790508110482614856 +0 -0
- crackerjack/.ruff_cache/0.1.9/17041001205004563469 +0 -0
- crackerjack/.ruff_cache/0.11.2/4070660268492669020 +0 -0
- crackerjack/.ruff_cache/0.11.3/9818742842212983150 +0 -0
- crackerjack/.ruff_cache/0.11.4/9818742842212983150 +0 -0
- crackerjack/.ruff_cache/0.11.6/3557596832929915217 +0 -0
- crackerjack/.ruff_cache/0.11.7/10386934055395314831 +0 -0
- crackerjack/.ruff_cache/0.11.7/3557596832929915217 +0 -0
- crackerjack/.ruff_cache/0.11.8/530407680854991027 +0 -0
- crackerjack/.ruff_cache/0.2.0/10047773857155985907 +0 -0
- crackerjack/.ruff_cache/0.2.1/8522267973936635051 +0 -0
- crackerjack/.ruff_cache/0.2.2/18053836298936336950 +0 -0
- crackerjack/.ruff_cache/0.3.0/12548816621480535786 +0 -0
- crackerjack/.ruff_cache/0.3.3/11081883392474770722 +0 -0
- crackerjack/.ruff_cache/0.3.4/676973378459347183 +0 -0
- crackerjack/.ruff_cache/0.3.5/16311176246009842383 +0 -0
- crackerjack/.ruff_cache/0.5.7/1493622539551733492 +0 -0
- crackerjack/.ruff_cache/0.5.7/6231957614044513175 +0 -0
- crackerjack/.ruff_cache/0.5.7/9932762556785938009 +0 -0
- crackerjack/.ruff_cache/0.6.0/11982804814124138945 +0 -0
- crackerjack/.ruff_cache/0.6.0/12055761203849489982 +0 -0
- crackerjack/.ruff_cache/0.6.2/1206147804896221174 +0 -0
- crackerjack/.ruff_cache/0.6.4/1206147804896221174 +0 -0
- crackerjack/.ruff_cache/0.6.5/1206147804896221174 +0 -0
- crackerjack/.ruff_cache/0.6.7/3657366982708166874 +0 -0
- crackerjack/.ruff_cache/0.6.9/285614542852677309 +0 -0
- crackerjack/.ruff_cache/0.7.1/1024065805990144819 +0 -0
- crackerjack/.ruff_cache/0.7.1/285614542852677309 +0 -0
- crackerjack/.ruff_cache/0.7.3/16061516852537040135 +0 -0
- crackerjack/.ruff_cache/0.8.4/16354268377385700367 +0 -0
- crackerjack/.ruff_cache/0.9.10/12813592349865671909 +0 -0
- crackerjack/.ruff_cache/0.9.10/923908772239632759 +0 -0
- crackerjack/.ruff_cache/0.9.3/13948373885254993391 +0 -0
- crackerjack/.ruff_cache/0.9.9/12813592349865671909 +0 -0
- crackerjack/.ruff_cache/0.9.9/8843823720003377982 +0 -0
- crackerjack/.ruff_cache/CACHEDIR.TAG +0 -1
- crackerjack/crackerjack.py +0 -855
- crackerjack/pyproject.toml +0 -214
- crackerjack-0.18.2.dist-info/METADATA +0 -420
- crackerjack-0.18.2.dist-info/RECORD +0 -59
- crackerjack-0.18.2.dist-info/entry_points.txt +0 -4
- {crackerjack-0.18.2.dist-info → crackerjack-0.45.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1181 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
import subprocess
|
|
3
|
+
import typing as t
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from ..services.regex_patterns import SAFE_PATTERNS
|
|
8
|
+
from .base import (
|
|
9
|
+
AgentContext,
|
|
10
|
+
FixResult,
|
|
11
|
+
Issue,
|
|
12
|
+
IssueType,
|
|
13
|
+
SubAgent,
|
|
14
|
+
agent_registry,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ImportAnalysis(t.NamedTuple):
|
|
19
|
+
file_path: Path
|
|
20
|
+
mixed_imports: list[str]
|
|
21
|
+
redundant_imports: list[str]
|
|
22
|
+
unused_imports: list[str]
|
|
23
|
+
optimization_opportunities: list[str]
|
|
24
|
+
import_violations: list[str]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ImportOptimizationAgent(SubAgent):
|
|
28
|
+
name = "import_optimization"
|
|
29
|
+
|
|
30
|
+
def __init__(self, context: AgentContext) -> None:
|
|
31
|
+
super().__init__(context)
|
|
32
|
+
|
|
33
|
+
def log(self, message: str, level: str = "INFO") -> None:
|
|
34
|
+
print(f"[{level}] ImportOptimizationAgent: {message}")
|
|
35
|
+
|
|
36
|
+
def get_supported_types(self) -> set[IssueType]:
|
|
37
|
+
return {IssueType.IMPORT_ERROR, IssueType.DEAD_CODE}
|
|
38
|
+
|
|
39
|
+
async def can_handle(self, issue: Issue) -> float:
|
|
40
|
+
if issue.type in self.get_supported_types():
|
|
41
|
+
return 0.85
|
|
42
|
+
|
|
43
|
+
description_lower = issue.message.lower()
|
|
44
|
+
import_keywords = [
|
|
45
|
+
"import",
|
|
46
|
+
"unused import",
|
|
47
|
+
"redundant import",
|
|
48
|
+
"import style",
|
|
49
|
+
"mixed import",
|
|
50
|
+
"import organization",
|
|
51
|
+
"from import",
|
|
52
|
+
"star import",
|
|
53
|
+
"unused variable",
|
|
54
|
+
"defined but never used",
|
|
55
|
+
]
|
|
56
|
+
if any(keyword in description_lower for keyword in import_keywords):
|
|
57
|
+
return 0.8
|
|
58
|
+
|
|
59
|
+
pattern_obj = SAFE_PATTERNS["match_error_code_patterns"]
|
|
60
|
+
if pattern_obj.test(issue.message):
|
|
61
|
+
return 0.85
|
|
62
|
+
|
|
63
|
+
return 0.0
|
|
64
|
+
|
|
65
|
+
async def analyze_and_fix(self, issue: Issue) -> FixResult:
|
|
66
|
+
return await self.fix_issue(issue)
|
|
67
|
+
|
|
68
|
+
async def analyze_file(self, file_path: Path) -> ImportAnalysis:
|
|
69
|
+
if not self._is_valid_python_file(file_path):
|
|
70
|
+
return self._create_empty_import_analysis(file_path)
|
|
71
|
+
|
|
72
|
+
return await self._parse_and_analyze_file(file_path)
|
|
73
|
+
|
|
74
|
+
def _is_valid_python_file(self, file_path: Path) -> bool:
|
|
75
|
+
return file_path.exists() and file_path.suffix == ".py"
|
|
76
|
+
|
|
77
|
+
def _create_empty_import_analysis(self, file_path: Path) -> ImportAnalysis:
|
|
78
|
+
return ImportAnalysis(file_path, [], [], [], [], [])
|
|
79
|
+
|
|
80
|
+
async def _parse_and_analyze_file(self, file_path: Path) -> ImportAnalysis:
|
|
81
|
+
try:
|
|
82
|
+
with file_path.open(encoding="utf-8") as f:
|
|
83
|
+
content = f.read()
|
|
84
|
+
tree = ast.parse(content)
|
|
85
|
+
except (SyntaxError, OSError) as e:
|
|
86
|
+
return self._handle_parse_error(file_path, e)
|
|
87
|
+
|
|
88
|
+
unused_imports = await self._detect_unused_imports(file_path)
|
|
89
|
+
|
|
90
|
+
return self._analyze_imports(file_path, tree, content, unused_imports)
|
|
91
|
+
|
|
92
|
+
def _handle_parse_error(self, file_path: Path, e: Exception) -> ImportAnalysis:
|
|
93
|
+
self.log(f"Could not parse {file_path}: {e}", level="WARNING")
|
|
94
|
+
return ImportAnalysis(file_path, [], [], [], [], [])
|
|
95
|
+
|
|
96
|
+
async def _detect_unused_imports(self, file_path: Path) -> list[str]:
|
|
97
|
+
try:
|
|
98
|
+
result = self._run_vulture_analysis(file_path)
|
|
99
|
+
return self._extract_unused_imports_from_result(result)
|
|
100
|
+
except (
|
|
101
|
+
subprocess.TimeoutExpired,
|
|
102
|
+
subprocess.SubprocessError,
|
|
103
|
+
FileNotFoundError,
|
|
104
|
+
):
|
|
105
|
+
return []
|
|
106
|
+
|
|
107
|
+
def _run_vulture_analysis(
|
|
108
|
+
self, file_path: Path
|
|
109
|
+
) -> subprocess.CompletedProcess[str]:
|
|
110
|
+
return subprocess.run(
|
|
111
|
+
["uv", "run", "vulture", "--min-confidence", "80", str(file_path)],
|
|
112
|
+
capture_output=True,
|
|
113
|
+
text=True,
|
|
114
|
+
timeout=30,
|
|
115
|
+
cwd=self.context.project_path,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
def _extract_unused_imports_from_result(
|
|
119
|
+
self, result: subprocess.CompletedProcess[str]
|
|
120
|
+
) -> list[str]:
|
|
121
|
+
unused_imports: list[str] = []
|
|
122
|
+
if not self._is_valid_vulture_result(result):
|
|
123
|
+
return unused_imports
|
|
124
|
+
|
|
125
|
+
for line in result.stdout.strip().split("\n"):
|
|
126
|
+
import_name = self._extract_import_name_from_line(line)
|
|
127
|
+
if import_name:
|
|
128
|
+
unused_imports.append(import_name)
|
|
129
|
+
|
|
130
|
+
return unused_imports
|
|
131
|
+
|
|
132
|
+
def _is_valid_vulture_result(
|
|
133
|
+
self, result: subprocess.CompletedProcess[str]
|
|
134
|
+
) -> bool:
|
|
135
|
+
return result.returncode == 0 and bool(result.stdout)
|
|
136
|
+
|
|
137
|
+
def _extract_import_name_from_line(self, line: str) -> str | None:
|
|
138
|
+
if not line or "unused import" not in line.lower():
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
pattern_obj = SAFE_PATTERNS["extract_unused_import_name"]
|
|
142
|
+
if pattern_obj.test(line):
|
|
143
|
+
return pattern_obj.apply(line)
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
def _analyze_imports(
|
|
147
|
+
self, file_path: Path, tree: ast.AST, content: str, unused_imports: list[str]
|
|
148
|
+
) -> ImportAnalysis:
|
|
149
|
+
analysis_results = self._perform_full_import_analysis(tree, content)
|
|
150
|
+
|
|
151
|
+
return self._create_import_analysis(file_path, analysis_results, unused_imports)
|
|
152
|
+
|
|
153
|
+
def _create_import_analysis(
|
|
154
|
+
self,
|
|
155
|
+
file_path: Path,
|
|
156
|
+
analysis_results: dict[str, list[str]],
|
|
157
|
+
unused_imports: list[str],
|
|
158
|
+
) -> ImportAnalysis:
|
|
159
|
+
return ImportAnalysis(
|
|
160
|
+
file_path=file_path,
|
|
161
|
+
mixed_imports=analysis_results["mixed_imports"],
|
|
162
|
+
redundant_imports=analysis_results["redundant_imports"],
|
|
163
|
+
unused_imports=unused_imports,
|
|
164
|
+
optimization_opportunities=analysis_results["optimization_opportunities"],
|
|
165
|
+
import_violations=analysis_results["import_violations"],
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
def _perform_full_import_analysis(
|
|
169
|
+
self, tree: ast.AST, content: str
|
|
170
|
+
) -> dict[str, list[str]]:
|
|
171
|
+
module_imports, all_imports = self._extract_import_information(tree)
|
|
172
|
+
|
|
173
|
+
return self._perform_import_analysis(module_imports, all_imports, content)
|
|
174
|
+
|
|
175
|
+
def _perform_import_analysis(
|
|
176
|
+
self,
|
|
177
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
178
|
+
all_imports: list[dict[str, t.Any]],
|
|
179
|
+
content: str,
|
|
180
|
+
) -> dict[str, list[str]]:
|
|
181
|
+
analysis_results = self._analyze_import_patterns(
|
|
182
|
+
module_imports, all_imports, content
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
return analysis_results
|
|
186
|
+
|
|
187
|
+
def _analyze_import_patterns(
|
|
188
|
+
self,
|
|
189
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
190
|
+
all_imports: list[dict[str, t.Any]],
|
|
191
|
+
content: str,
|
|
192
|
+
) -> dict[str, list[str]]:
|
|
193
|
+
return self._analyze_import_aspects(module_imports, all_imports, content)
|
|
194
|
+
|
|
195
|
+
def _analyze_import_aspects(
|
|
196
|
+
self,
|
|
197
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
198
|
+
all_imports: list[dict[str, t.Any]],
|
|
199
|
+
content: str,
|
|
200
|
+
) -> dict[str, list[str]]:
|
|
201
|
+
return self._analyze_each_import_aspect(module_imports, all_imports, content)
|
|
202
|
+
|
|
203
|
+
def _analyze_each_import_aspect(
|
|
204
|
+
self,
|
|
205
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
206
|
+
all_imports: list[dict[str, t.Any]],
|
|
207
|
+
content: str,
|
|
208
|
+
) -> dict[str, list[str]]:
|
|
209
|
+
mixed_imports = self._find_mixed_imports(module_imports)
|
|
210
|
+
redundant_imports = self._find_redundant_imports(all_imports)
|
|
211
|
+
optimization_opportunities = self._find_optimization_opportunities(
|
|
212
|
+
module_imports
|
|
213
|
+
)
|
|
214
|
+
import_violations = self._find_import_violations(content, all_imports)
|
|
215
|
+
|
|
216
|
+
return {
|
|
217
|
+
"mixed_imports": mixed_imports,
|
|
218
|
+
"redundant_imports": redundant_imports,
|
|
219
|
+
"optimization_opportunities": optimization_opportunities,
|
|
220
|
+
"import_violations": import_violations,
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
def _extract_import_information(
|
|
224
|
+
self, tree: ast.AST
|
|
225
|
+
) -> tuple[dict[str, list[dict[str, t.Any]]], list[dict[str, t.Any]]]:
|
|
226
|
+
module_imports: dict[str, list[dict[str, t.Any]]] = defaultdict(list)
|
|
227
|
+
all_imports: list[dict[str, t.Any]] = []
|
|
228
|
+
|
|
229
|
+
self._process_tree_imports(tree, all_imports, module_imports)
|
|
230
|
+
|
|
231
|
+
return module_imports, all_imports
|
|
232
|
+
|
|
233
|
+
def _initialize_import_containers(
|
|
234
|
+
self,
|
|
235
|
+
) -> tuple[dict[str, list[dict[str, t.Any]]], list[dict[str, t.Any]]]:
|
|
236
|
+
module_imports: dict[str, list[dict[str, t.Any]]] = defaultdict(list)
|
|
237
|
+
all_imports: list[dict[str, t.Any]] = []
|
|
238
|
+
return module_imports, all_imports
|
|
239
|
+
|
|
240
|
+
def _process_tree_imports(
|
|
241
|
+
self,
|
|
242
|
+
tree: ast.AST,
|
|
243
|
+
all_imports: list[dict[str, t.Any]],
|
|
244
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
245
|
+
) -> None:
|
|
246
|
+
self._process_all_nodes(tree, all_imports, module_imports)
|
|
247
|
+
|
|
248
|
+
def _process_all_nodes(
|
|
249
|
+
self,
|
|
250
|
+
tree: ast.AST,
|
|
251
|
+
all_imports: list[dict[str, t.Any]],
|
|
252
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
253
|
+
) -> None:
|
|
254
|
+
self._process_import_statements_in_tree(tree, all_imports, module_imports)
|
|
255
|
+
|
|
256
|
+
def _process_import_statements_in_tree(
|
|
257
|
+
self,
|
|
258
|
+
tree: ast.AST,
|
|
259
|
+
all_imports: list[dict[str, t.Any]],
|
|
260
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
261
|
+
) -> None:
|
|
262
|
+
for node in ast.walk(tree):
|
|
263
|
+
self._process_node_if_import(node, all_imports, module_imports)
|
|
264
|
+
|
|
265
|
+
def _process_node_if_import(
|
|
266
|
+
self,
|
|
267
|
+
node: ast.AST,
|
|
268
|
+
all_imports: list[dict[str, t.Any]],
|
|
269
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
270
|
+
) -> None:
|
|
271
|
+
if isinstance(node, ast.Import):
|
|
272
|
+
self._process_standard_import(node, all_imports, module_imports)
|
|
273
|
+
elif isinstance(node, ast.ImportFrom) and node.module:
|
|
274
|
+
self._process_from_import(node, all_imports, module_imports)
|
|
275
|
+
|
|
276
|
+
def _process_standard_import(
|
|
277
|
+
self,
|
|
278
|
+
node: ast.Import,
|
|
279
|
+
all_imports: list[dict[str, t.Any]],
|
|
280
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
281
|
+
) -> None:
|
|
282
|
+
self._process_standard_import_aliases(node, all_imports, module_imports)
|
|
283
|
+
|
|
284
|
+
def _process_standard_import_aliases(
|
|
285
|
+
self,
|
|
286
|
+
node: ast.Import,
|
|
287
|
+
all_imports: list[dict[str, t.Any]],
|
|
288
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
289
|
+
) -> None:
|
|
290
|
+
for alias in node.names:
|
|
291
|
+
import_info = {
|
|
292
|
+
"type": "standard",
|
|
293
|
+
"module": alias.name,
|
|
294
|
+
"name": alias.asname or alias.name,
|
|
295
|
+
"line": node.lineno,
|
|
296
|
+
}
|
|
297
|
+
all_imports.append(import_info)
|
|
298
|
+
base_module = alias.name.split(".")[0]
|
|
299
|
+
module_imports[base_module].append(import_info)
|
|
300
|
+
|
|
301
|
+
def _process_from_import(
|
|
302
|
+
self,
|
|
303
|
+
node: ast.ImportFrom,
|
|
304
|
+
all_imports: list[dict[str, t.Any]],
|
|
305
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
306
|
+
) -> None:
|
|
307
|
+
self._process_from_import_aliases(node, all_imports, module_imports)
|
|
308
|
+
|
|
309
|
+
def _process_from_import_aliases(
|
|
310
|
+
self,
|
|
311
|
+
node: ast.ImportFrom,
|
|
312
|
+
all_imports: list[dict[str, t.Any]],
|
|
313
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
314
|
+
) -> None:
|
|
315
|
+
if node.module is None:
|
|
316
|
+
return # Skip relative imports without module name
|
|
317
|
+
|
|
318
|
+
for alias in node.names:
|
|
319
|
+
import_info = {
|
|
320
|
+
"type": "from",
|
|
321
|
+
"module": node.module,
|
|
322
|
+
"name": alias.name,
|
|
323
|
+
"asname": alias.asname,
|
|
324
|
+
"line": node.lineno,
|
|
325
|
+
}
|
|
326
|
+
all_imports.append(import_info)
|
|
327
|
+
base_module = node.module.split(".")[0]
|
|
328
|
+
module_imports[base_module].append(import_info)
|
|
329
|
+
|
|
330
|
+
def _find_mixed_imports(
|
|
331
|
+
self,
|
|
332
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
333
|
+
) -> list[str]:
|
|
334
|
+
mixed: list[str] = []
|
|
335
|
+
|
|
336
|
+
mixed.extend(self._check_mixed_imports_per_module(module_imports))
|
|
337
|
+
return mixed
|
|
338
|
+
|
|
339
|
+
def _check_mixed_imports_per_module(
|
|
340
|
+
self,
|
|
341
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
342
|
+
) -> list[str]:
|
|
343
|
+
mixed: list[str] = []
|
|
344
|
+
for module, imports in module_imports.items():
|
|
345
|
+
types = {imp["type"] for imp in imports}
|
|
346
|
+
if len(types) > 1:
|
|
347
|
+
mixed.append(module)
|
|
348
|
+
return mixed
|
|
349
|
+
|
|
350
|
+
def _find_redundant_imports(self, all_imports: list[dict[str, t.Any]]) -> list[str]:
|
|
351
|
+
seen_modules: set[str] = set()
|
|
352
|
+
redundant: list[str] = []
|
|
353
|
+
|
|
354
|
+
redundant.extend(self._check_redundant_imports(all_imports, seen_modules))
|
|
355
|
+
|
|
356
|
+
return redundant
|
|
357
|
+
|
|
358
|
+
def _check_redundant_imports(
|
|
359
|
+
self, all_imports: list[dict[str, t.Any]], seen_modules: set[str]
|
|
360
|
+
) -> list[str]:
|
|
361
|
+
redundant: list[str] = []
|
|
362
|
+
|
|
363
|
+
for imp in all_imports:
|
|
364
|
+
module_key = f"{imp['module']}: {imp['name']}"
|
|
365
|
+
if module_key in seen_modules:
|
|
366
|
+
redundant.append(f"Line {imp['line']}: {imp['module']}.{imp['name']}")
|
|
367
|
+
seen_modules.add(module_key)
|
|
368
|
+
|
|
369
|
+
return redundant
|
|
370
|
+
|
|
371
|
+
def _find_optimization_opportunities(
|
|
372
|
+
self,
|
|
373
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
374
|
+
) -> list[str]:
|
|
375
|
+
return self._find_consolidation_opportunities(module_imports)
|
|
376
|
+
|
|
377
|
+
def _find_consolidation_opportunities(
|
|
378
|
+
self,
|
|
379
|
+
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
380
|
+
) -> list[str]:
|
|
381
|
+
opportunities: list[str] = []
|
|
382
|
+
|
|
383
|
+
for module, imports in module_imports.items():
|
|
384
|
+
standard_imports = [imp for imp in imports if imp["type"] == "standard"]
|
|
385
|
+
from_imports = [imp for imp in imports if imp["type"] == "from"]
|
|
386
|
+
|
|
387
|
+
if len(standard_imports) >= 2:
|
|
388
|
+
opportunities.append(
|
|
389
|
+
f"Consolidate {len(standard_imports)} standard imports "
|
|
390
|
+
f"from '{module}' into from-import style",
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
if len(from_imports) >= 3:
|
|
394
|
+
opportunities.append(
|
|
395
|
+
f"Consider combining {len(from_imports)} from-imports "
|
|
396
|
+
f"from '{module}' into fewer lines",
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
return opportunities
|
|
400
|
+
|
|
401
|
+
def _find_import_violations(
|
|
402
|
+
self, content: str, all_imports: list[dict[str, t.Any]]
|
|
403
|
+
) -> list[str]:
|
|
404
|
+
violations = self._check_import_ordering(all_imports)
|
|
405
|
+
|
|
406
|
+
violations.extend(self._check_star_imports(content))
|
|
407
|
+
|
|
408
|
+
return violations
|
|
409
|
+
|
|
410
|
+
def _check_import_ordering(self, all_imports: list[dict[str, t.Any]]) -> list[str]:
|
|
411
|
+
violations: list[str] = []
|
|
412
|
+
|
|
413
|
+
self._categorize_imports(all_imports)
|
|
414
|
+
|
|
415
|
+
violations.extend(self._find_pep8_order_violations(all_imports))
|
|
416
|
+
|
|
417
|
+
return violations
|
|
418
|
+
|
|
419
|
+
def _find_pep8_order_violations(
|
|
420
|
+
self, all_imports: list[dict[str, t.Any]]
|
|
421
|
+
) -> list[str]:
|
|
422
|
+
violations: list[str] = []
|
|
423
|
+
prev_category = 0
|
|
424
|
+
|
|
425
|
+
for imp in all_imports:
|
|
426
|
+
module = imp.get("module", "")
|
|
427
|
+
category = self._get_import_category(module)
|
|
428
|
+
|
|
429
|
+
if category < prev_category:
|
|
430
|
+
violations.append(
|
|
431
|
+
f"Import '{module}' should come before previous imports (PEP 8 ordering)"
|
|
432
|
+
)
|
|
433
|
+
prev_category = max(prev_category, category)
|
|
434
|
+
|
|
435
|
+
return violations
|
|
436
|
+
|
|
437
|
+
def _check_star_imports(self, content: str) -> list[str]:
|
|
438
|
+
violations: list[str] = []
|
|
439
|
+
lines = content.splitlines()
|
|
440
|
+
|
|
441
|
+
for line_num, line in enumerate(lines, 1):
|
|
442
|
+
if SAFE_PATTERNS["match_star_import"].test(line.strip()):
|
|
443
|
+
violations.append(f"Line {line_num}: Avoid star imports")
|
|
444
|
+
|
|
445
|
+
return violations
|
|
446
|
+
|
|
447
|
+
def _categorize_imports(
|
|
448
|
+
self, all_imports: list[dict[str, t.Any]]
|
|
449
|
+
) -> dict[int, list[dict[str, t.Any]]]:
|
|
450
|
+
categories: dict[int, list[dict[str, t.Any]]] = defaultdict(list)
|
|
451
|
+
|
|
452
|
+
for imp in all_imports:
|
|
453
|
+
module = imp.get("module", "")
|
|
454
|
+
category = self._get_import_category(module)
|
|
455
|
+
categories[category].append(imp)
|
|
456
|
+
|
|
457
|
+
return categories
|
|
458
|
+
|
|
459
|
+
def _get_import_category(self, module: str) -> int:
|
|
460
|
+
if not module:
|
|
461
|
+
return 3
|
|
462
|
+
|
|
463
|
+
return self._determine_module_category(module)
|
|
464
|
+
|
|
465
|
+
def _determine_module_category(self, module: str) -> int:
|
|
466
|
+
base_module = module.split(".")[0]
|
|
467
|
+
|
|
468
|
+
if self._is_stdlib_module(base_module):
|
|
469
|
+
return 1
|
|
470
|
+
|
|
471
|
+
if self._is_local_import(module, base_module):
|
|
472
|
+
return 3
|
|
473
|
+
|
|
474
|
+
return 2
|
|
475
|
+
|
|
476
|
+
def _is_stdlib_module(self, base_module: str) -> bool:
|
|
477
|
+
stdlib_modules = self._get_stdlib_modules()
|
|
478
|
+
return base_module in stdlib_modules
|
|
479
|
+
|
|
480
|
+
def _get_stdlib_modules(self) -> set[str]:
|
|
481
|
+
return {
|
|
482
|
+
"os",
|
|
483
|
+
"sys",
|
|
484
|
+
"json",
|
|
485
|
+
"ast",
|
|
486
|
+
"re",
|
|
487
|
+
"pathlib",
|
|
488
|
+
"subprocess",
|
|
489
|
+
"typing",
|
|
490
|
+
"collections",
|
|
491
|
+
"functools",
|
|
492
|
+
"itertools",
|
|
493
|
+
"tempfile",
|
|
494
|
+
"contextlib",
|
|
495
|
+
"dataclasses",
|
|
496
|
+
"enum",
|
|
497
|
+
"abc",
|
|
498
|
+
"asyncio",
|
|
499
|
+
"concurrent",
|
|
500
|
+
"urllib",
|
|
501
|
+
"http",
|
|
502
|
+
"socket",
|
|
503
|
+
"ssl",
|
|
504
|
+
"time",
|
|
505
|
+
"datetime",
|
|
506
|
+
"calendar",
|
|
507
|
+
"math",
|
|
508
|
+
"random",
|
|
509
|
+
"hashlib",
|
|
510
|
+
"hmac",
|
|
511
|
+
"base64",
|
|
512
|
+
"uuid",
|
|
513
|
+
"logging",
|
|
514
|
+
"warnings",
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
def _is_local_import(self, module: str, base_module: str) -> bool:
|
|
518
|
+
return module.startswith(".") or base_module == "crackerjack"
|
|
519
|
+
|
|
520
|
+
async def fix_issue(self, issue: Issue) -> FixResult:
|
|
521
|
+
validation_result = self._validate_issue(issue)
|
|
522
|
+
if validation_result:
|
|
523
|
+
return validation_result
|
|
524
|
+
|
|
525
|
+
return await self._process_import_optimization_issue(issue)
|
|
526
|
+
|
|
527
|
+
async def _process_import_optimization_issue(self, issue: Issue) -> FixResult:
|
|
528
|
+
if not issue.file_path:
|
|
529
|
+
return FixResult(
|
|
530
|
+
success=False,
|
|
531
|
+
confidence=0.0,
|
|
532
|
+
fixes_applied=[],
|
|
533
|
+
remaining_issues=["No file path provided in issue"],
|
|
534
|
+
)
|
|
535
|
+
file_path = Path(issue.file_path)
|
|
536
|
+
|
|
537
|
+
analysis = await self.analyze_file(file_path)
|
|
538
|
+
|
|
539
|
+
if not self._are_optimizations_needed(analysis):
|
|
540
|
+
return self._create_no_optimization_needed_result()
|
|
541
|
+
|
|
542
|
+
return await self._apply_optimizations_and_prepare_results(file_path, analysis)
|
|
543
|
+
|
|
544
|
+
def _create_no_optimization_needed_result(self) -> FixResult:
|
|
545
|
+
return FixResult(
|
|
546
|
+
success=True,
|
|
547
|
+
confidence=1.0,
|
|
548
|
+
fixes_applied=["No import optimizations needed"],
|
|
549
|
+
remaining_issues=[],
|
|
550
|
+
recommendations=["Import patterns are already optimal"],
|
|
551
|
+
files_modified=[],
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
def _validate_issue(self, issue: Issue) -> FixResult | None:
|
|
555
|
+
if issue.file_path is None:
|
|
556
|
+
return FixResult(
|
|
557
|
+
success=False,
|
|
558
|
+
confidence=0.0,
|
|
559
|
+
remaining_issues=["No file path provided for import optimization"],
|
|
560
|
+
)
|
|
561
|
+
return None
|
|
562
|
+
|
|
563
|
+
def _are_optimizations_needed(self, analysis: ImportAnalysis) -> bool:
|
|
564
|
+
return any(
|
|
565
|
+
[
|
|
566
|
+
analysis.mixed_imports,
|
|
567
|
+
analysis.redundant_imports,
|
|
568
|
+
analysis.unused_imports,
|
|
569
|
+
analysis.optimization_opportunities,
|
|
570
|
+
analysis.import_violations,
|
|
571
|
+
],
|
|
572
|
+
)
|
|
573
|
+
|
|
574
|
+
async def _apply_optimizations_and_prepare_results(
|
|
575
|
+
self, file_path: Path, analysis: ImportAnalysis
|
|
576
|
+
) -> FixResult:
|
|
577
|
+
try:
|
|
578
|
+
optimized_content = await self._read_and_optimize_file(file_path, analysis)
|
|
579
|
+
await self._write_optimized_content(file_path, optimized_content)
|
|
580
|
+
|
|
581
|
+
changes, remaining_issues = self._prepare_fix_results(analysis)
|
|
582
|
+
recommendations = self._prepare_recommendations(
|
|
583
|
+
file_path.name, remaining_issues
|
|
584
|
+
)
|
|
585
|
+
|
|
586
|
+
return FixResult(
|
|
587
|
+
success=True,
|
|
588
|
+
confidence=0.85,
|
|
589
|
+
fixes_applied=changes,
|
|
590
|
+
remaining_issues=remaining_issues,
|
|
591
|
+
recommendations=recommendations,
|
|
592
|
+
files_modified=[str(file_path)],
|
|
593
|
+
)
|
|
594
|
+
|
|
595
|
+
except Exception as e:
|
|
596
|
+
return self._handle_optimization_error(e)
|
|
597
|
+
|
|
598
|
+
async def _read_and_optimize_file(
|
|
599
|
+
self, file_path: Path, analysis: ImportAnalysis
|
|
600
|
+
) -> str:
|
|
601
|
+
with file_path.open(encoding="utf-8") as f:
|
|
602
|
+
original_content = f.read()
|
|
603
|
+
return await self._optimize_imports(original_content, analysis)
|
|
604
|
+
|
|
605
|
+
async def _write_optimized_content(
|
|
606
|
+
self, file_path: Path, optimized_content: str
|
|
607
|
+
) -> None:
|
|
608
|
+
with file_path.open("w", encoding="utf-8") as f:
|
|
609
|
+
f.write(optimized_content)
|
|
610
|
+
|
|
611
|
+
def _handle_optimization_error(self, e: Exception) -> FixResult:
|
|
612
|
+
return FixResult(
|
|
613
|
+
success=False,
|
|
614
|
+
confidence=0.0,
|
|
615
|
+
fixes_applied=[],
|
|
616
|
+
remaining_issues=[f"Failed to optimize imports: {e}"],
|
|
617
|
+
recommendations=["Manual import review needed"],
|
|
618
|
+
files_modified=[],
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
def _prepare_fix_results(
|
|
622
|
+
self, analysis: ImportAnalysis
|
|
623
|
+
) -> tuple[list[str], list[str]]:
|
|
624
|
+
changes: list[str] = []
|
|
625
|
+
remaining_issues: list[str] = []
|
|
626
|
+
|
|
627
|
+
changes.extend(self._get_mixed_import_changes(analysis.mixed_imports))
|
|
628
|
+
changes.extend(self._get_redundant_import_changes(analysis.redundant_imports))
|
|
629
|
+
changes.extend(self._get_unused_import_changes(analysis.unused_imports))
|
|
630
|
+
changes.extend(
|
|
631
|
+
self._get_optimization_opportunity_changes(
|
|
632
|
+
analysis.optimization_opportunities
|
|
633
|
+
)
|
|
634
|
+
)
|
|
635
|
+
|
|
636
|
+
remaining_issues.extend(
|
|
637
|
+
self._get_remaining_violations(analysis.import_violations)
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
return changes, remaining_issues
|
|
641
|
+
|
|
642
|
+
def _get_mixed_import_changes(self, mixed_imports: list[str]) -> list[str]:
|
|
643
|
+
changes: list[str] = []
|
|
644
|
+
if mixed_imports:
|
|
645
|
+
changes.append(
|
|
646
|
+
f"Standardized mixed imports for modules: {', '.join(mixed_imports)}",
|
|
647
|
+
)
|
|
648
|
+
return changes
|
|
649
|
+
|
|
650
|
+
def _get_redundant_import_changes(self, redundant_imports: list[str]) -> list[str]:
|
|
651
|
+
changes: list[str] = []
|
|
652
|
+
if redundant_imports:
|
|
653
|
+
changes.append(
|
|
654
|
+
f"Removed {len(redundant_imports)} redundant imports",
|
|
655
|
+
)
|
|
656
|
+
return changes
|
|
657
|
+
|
|
658
|
+
def _get_unused_import_changes(self, unused_imports: list[str]) -> list[str]:
|
|
659
|
+
changes: list[str] = []
|
|
660
|
+
if unused_imports:
|
|
661
|
+
changes.append(
|
|
662
|
+
f"Removed {len(unused_imports)} unused imports: {', '.join(unused_imports[:3])}"
|
|
663
|
+
+ ("..." if len(unused_imports) > 3 else ""),
|
|
664
|
+
)
|
|
665
|
+
return changes
|
|
666
|
+
|
|
667
|
+
def _get_optimization_opportunity_changes(
|
|
668
|
+
self, optimization_opportunities: list[str]
|
|
669
|
+
) -> list[str]:
|
|
670
|
+
changes: list[str] = []
|
|
671
|
+
if optimization_opportunities:
|
|
672
|
+
changes.append(
|
|
673
|
+
f"Applied {len(optimization_opportunities)} import consolidations",
|
|
674
|
+
)
|
|
675
|
+
return changes
|
|
676
|
+
|
|
677
|
+
def _get_remaining_violations(self, import_violations: list[str]) -> list[str]:
|
|
678
|
+
remaining_issues: list[str] = []
|
|
679
|
+
if import_violations:
|
|
680
|
+
remaining_issues.extend(import_violations[:3])
|
|
681
|
+
return remaining_issues
|
|
682
|
+
|
|
683
|
+
def _prepare_recommendations(
|
|
684
|
+
self, file_name: str, remaining_issues: list[str]
|
|
685
|
+
) -> list[str]:
|
|
686
|
+
recommendations = [f"Optimized import statements in {file_name}"]
|
|
687
|
+
if remaining_issues:
|
|
688
|
+
recommendations.append(
|
|
689
|
+
"Consider manual review for remaining PEP 8 violations"
|
|
690
|
+
)
|
|
691
|
+
return recommendations
|
|
692
|
+
|
|
693
|
+
async def _optimize_imports(self, content: str, analysis: ImportAnalysis) -> str:
|
|
694
|
+
lines = content.splitlines()
|
|
695
|
+
|
|
696
|
+
lines = self._apply_import_optimizations(lines, analysis)
|
|
697
|
+
|
|
698
|
+
return "\n".join(lines)
|
|
699
|
+
|
|
700
|
+
def _apply_import_optimizations(
|
|
701
|
+
self, lines: list[str], analysis: ImportAnalysis
|
|
702
|
+
) -> list[str]:
|
|
703
|
+
lines = self._apply_all_optimization_steps(lines, analysis)
|
|
704
|
+
return lines
|
|
705
|
+
|
|
706
|
+
def _apply_all_optimization_steps(
|
|
707
|
+
self, lines: list[str], analysis: ImportAnalysis
|
|
708
|
+
) -> list[str]:
|
|
709
|
+
lines = self._remove_unused_imports(lines, analysis.unused_imports)
|
|
710
|
+
|
|
711
|
+
lines = self._consolidate_mixed_imports(lines, analysis.mixed_imports)
|
|
712
|
+
|
|
713
|
+
lines = self._remove_redundant_imports(lines, analysis.redundant_imports)
|
|
714
|
+
|
|
715
|
+
lines = self._organize_imports_pep8(lines)
|
|
716
|
+
|
|
717
|
+
return lines
|
|
718
|
+
|
|
719
|
+
def _remove_unused_imports(
|
|
720
|
+
self, lines: list[str], unused_imports: list[str]
|
|
721
|
+
) -> list[str]:
|
|
722
|
+
if not unused_imports:
|
|
723
|
+
return lines
|
|
724
|
+
|
|
725
|
+
unused_patterns = self._create_unused_import_patterns(unused_imports)
|
|
726
|
+
return self._filter_unused_import_lines(lines, unused_patterns, unused_imports)
|
|
727
|
+
|
|
728
|
+
def _create_unused_import_patterns(
|
|
729
|
+
self, unused_imports: list[str]
|
|
730
|
+
) -> list[t.Pattern[str]]:
|
|
731
|
+
import re
|
|
732
|
+
|
|
733
|
+
unused_patterns: list[t.Pattern[str]] = []
|
|
734
|
+
for unused in unused_imports:
|
|
735
|
+
escaped_unused = re.escape(unused)
|
|
736
|
+
|
|
737
|
+
unused_patterns.extend(
|
|
738
|
+
(
|
|
739
|
+
re.compile(f"^\\s*import\\s+{escaped_unused}\\s*$"),
|
|
740
|
+
re.compile(
|
|
741
|
+
f"^\\s*from\\s+\\w+\\s+import\\s+.*\\b{escaped_unused}\\b"
|
|
742
|
+
),
|
|
743
|
+
)
|
|
744
|
+
)
|
|
745
|
+
return unused_patterns
|
|
746
|
+
|
|
747
|
+
def _filter_unused_import_lines(
|
|
748
|
+
self,
|
|
749
|
+
lines: list[str],
|
|
750
|
+
unused_patterns: list[t.Pattern[str]],
|
|
751
|
+
unused_imports: list[str],
|
|
752
|
+
) -> list[str]:
|
|
753
|
+
filtered_lines = []
|
|
754
|
+
for line in lines:
|
|
755
|
+
should_remove = False
|
|
756
|
+
for pattern in unused_patterns:
|
|
757
|
+
if pattern.search(line):
|
|
758
|
+
if self._is_multi_import_line(line):
|
|
759
|
+
line = self._remove_from_import_list(line, unused_imports)
|
|
760
|
+
else:
|
|
761
|
+
should_remove = True
|
|
762
|
+
break
|
|
763
|
+
|
|
764
|
+
if not should_remove and line.strip():
|
|
765
|
+
filtered_lines.append(line)
|
|
766
|
+
|
|
767
|
+
return filtered_lines
|
|
768
|
+
|
|
769
|
+
def _is_multi_import_line(self, line: str) -> bool:
|
|
770
|
+
return "import" in line and ", " in line
|
|
771
|
+
|
|
772
|
+
def _remove_from_import_list(self, line: str, unused_imports: list[str]) -> str:
|
|
773
|
+
for unused in unused_imports:
|
|
774
|
+
import re
|
|
775
|
+
|
|
776
|
+
escaped_unused = re.escape(unused)
|
|
777
|
+
line = re.sub(rf", ?\s*{escaped_unused}\s*, ?", ", ", line)
|
|
778
|
+
|
|
779
|
+
line = SAFE_PATTERNS["clean_import_commas"].apply(line)
|
|
780
|
+
line = SAFE_PATTERNS["clean_trailing_import_comma"].apply(line)
|
|
781
|
+
line = SAFE_PATTERNS["clean_import_prefix"].apply(line)
|
|
782
|
+
return line
|
|
783
|
+
|
|
784
|
+
def _consolidate_mixed_imports(
|
|
785
|
+
self, lines: list[str], mixed_modules: list[str]
|
|
786
|
+
) -> list[str]:
|
|
787
|
+
if not mixed_modules:
|
|
788
|
+
return lines
|
|
789
|
+
|
|
790
|
+
import_data = self._collect_mixed_module_imports(lines, mixed_modules)
|
|
791
|
+
lines = self._remove_old_mixed_imports(lines, import_data["lines_to_remove"])
|
|
792
|
+
lines = self._insert_consolidated_imports(lines, import_data)
|
|
793
|
+
|
|
794
|
+
return lines
|
|
795
|
+
|
|
796
|
+
def _collect_mixed_module_imports(
|
|
797
|
+
self, lines: list[str], mixed_modules: list[str]
|
|
798
|
+
) -> dict[str, t.Any]:
|
|
799
|
+
import_collector = self._create_import_collector()
|
|
800
|
+
|
|
801
|
+
for i, line in enumerate(lines):
|
|
802
|
+
stripped_line = line.strip()
|
|
803
|
+
for module in mixed_modules:
|
|
804
|
+
self._process_mixed_module_line(
|
|
805
|
+
stripped_line, module, i, import_collector
|
|
806
|
+
)
|
|
807
|
+
|
|
808
|
+
return self._finalize_import_collection(import_collector)
|
|
809
|
+
|
|
810
|
+
def _create_import_collector(self) -> dict[str, t.Any]:
|
|
811
|
+
return {
|
|
812
|
+
"module_imports": defaultdict(set),
|
|
813
|
+
"lines_to_remove": set(),
|
|
814
|
+
"insert_positions": {},
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
def _finalize_import_collection(
|
|
818
|
+
self, collector: dict[str, t.Any]
|
|
819
|
+
) -> dict[str, t.Any]:
|
|
820
|
+
return {
|
|
821
|
+
"module_imports": collector["module_imports"],
|
|
822
|
+
"lines_to_remove": collector["lines_to_remove"],
|
|
823
|
+
"insert_positions": collector["insert_positions"],
|
|
824
|
+
}
|
|
825
|
+
|
|
826
|
+
def _process_mixed_module_line(
|
|
827
|
+
self,
|
|
828
|
+
line: str,
|
|
829
|
+
module: str,
|
|
830
|
+
line_index: int,
|
|
831
|
+
import_collector: dict[str, t.Any],
|
|
832
|
+
) -> None:
|
|
833
|
+
if self._is_standard_import_line(line, module):
|
|
834
|
+
self._handle_standard_import(line, module, line_index, import_collector)
|
|
835
|
+
elif self._is_from_import_line(line, module):
|
|
836
|
+
self._handle_from_import(line, module, line_index, import_collector)
|
|
837
|
+
|
|
838
|
+
def _is_standard_import_line(self, line: str, module: str) -> bool:
|
|
839
|
+
import re
|
|
840
|
+
|
|
841
|
+
return bool(re.match(rf"^\s*import\s+{re.escape(module)}(?: \.\w+)*\s*$", line))
|
|
842
|
+
|
|
843
|
+
def _is_from_import_line(self, line: str, module: str) -> bool:
|
|
844
|
+
import re
|
|
845
|
+
|
|
846
|
+
return bool(re.match(rf"^\s*from\s+{re.escape(module)}\s+import\s+", line))
|
|
847
|
+
|
|
848
|
+
def _handle_standard_import(
|
|
849
|
+
self,
|
|
850
|
+
line: str,
|
|
851
|
+
module: str,
|
|
852
|
+
line_index: int,
|
|
853
|
+
import_collector: dict[str, t.Any],
|
|
854
|
+
) -> None:
|
|
855
|
+
import_name = self._extract_import_name_from_standard(line, module)
|
|
856
|
+
if import_name:
|
|
857
|
+
import_to_add = self._determine_import_name(import_name, module)
|
|
858
|
+
self._add_import_to_collector(
|
|
859
|
+
module, import_to_add, line_index, import_collector
|
|
860
|
+
)
|
|
861
|
+
|
|
862
|
+
def _extract_import_name_from_standard(self, line: str, module: str) -> str | None:
|
|
863
|
+
import re
|
|
864
|
+
|
|
865
|
+
match = re.search(rf"import\s+({re.escape(module)}(?: \.\w+)*)", line)
|
|
866
|
+
return match.group(1) if match else None
|
|
867
|
+
|
|
868
|
+
def _determine_import_name(self, import_name: str, module: str) -> str:
|
|
869
|
+
if "." in import_name:
|
|
870
|
+
return import_name.split(".")[-1]
|
|
871
|
+
return module
|
|
872
|
+
|
|
873
|
+
def _add_import_to_collector(
|
|
874
|
+
self,
|
|
875
|
+
module: str,
|
|
876
|
+
import_name: str,
|
|
877
|
+
line_index: int,
|
|
878
|
+
import_collector: dict[str, t.Any],
|
|
879
|
+
) -> None:
|
|
880
|
+
import_collector["module_imports"][module].add(import_name)
|
|
881
|
+
import_collector["lines_to_remove"].add(line_index)
|
|
882
|
+
if module not in import_collector["insert_positions"]:
|
|
883
|
+
import_collector["insert_positions"][module] = line_index
|
|
884
|
+
|
|
885
|
+
def _handle_from_import(
|
|
886
|
+
self,
|
|
887
|
+
line: str,
|
|
888
|
+
module: str,
|
|
889
|
+
line_index: int,
|
|
890
|
+
import_collector: dict[str, t.Any],
|
|
891
|
+
) -> None:
|
|
892
|
+
import_names = self._extract_import_names_from_from_import(line, module)
|
|
893
|
+
import_collector["module_imports"][module].update(import_names)
|
|
894
|
+
import_collector["lines_to_remove"].add(line_index)
|
|
895
|
+
if module not in import_collector["insert_positions"]:
|
|
896
|
+
import_collector["insert_positions"][module] = line_index
|
|
897
|
+
|
|
898
|
+
def _extract_import_names_from_from_import(
|
|
899
|
+
self, line: str, module: str
|
|
900
|
+
) -> list[str]:
|
|
901
|
+
import re
|
|
902
|
+
|
|
903
|
+
import_part = re.sub(rf"^\s*from\s+{re.escape(module)}\s+import\s+", "", line)
|
|
904
|
+
return [name.strip() for name in import_part.split(", ")]
|
|
905
|
+
|
|
906
|
+
def _remove_old_mixed_imports(
|
|
907
|
+
self, lines: list[str], lines_to_remove: set[int]
|
|
908
|
+
) -> list[str]:
|
|
909
|
+
for i in sorted(lines_to_remove, reverse=True):
|
|
910
|
+
del lines[i]
|
|
911
|
+
return lines
|
|
912
|
+
|
|
913
|
+
def _insert_consolidated_imports(
|
|
914
|
+
self, lines: list[str], import_data: dict[str, t.Any]
|
|
915
|
+
) -> list[str]:
|
|
916
|
+
module_imports = import_data["module_imports"]
|
|
917
|
+
insert_positions = import_data["insert_positions"]
|
|
918
|
+
lines_to_remove = import_data["lines_to_remove"]
|
|
919
|
+
|
|
920
|
+
offset = 0
|
|
921
|
+
for module, imports in module_imports.items():
|
|
922
|
+
if module in insert_positions:
|
|
923
|
+
imports_list = sorted(imports)
|
|
924
|
+
consolidated = f"from {module} import {', '.join(imports_list)}"
|
|
925
|
+
insert_pos = insert_positions[module] - offset
|
|
926
|
+
lines.insert(insert_pos, consolidated)
|
|
927
|
+
offset += (
|
|
928
|
+
len([i for i in lines_to_remove if i <= insert_positions[module]])
|
|
929
|
+
- 1
|
|
930
|
+
)
|
|
931
|
+
return lines
|
|
932
|
+
|
|
933
|
+
def _remove_redundant_imports(
|
|
934
|
+
self, lines: list[str], redundant_imports: list[str]
|
|
935
|
+
) -> list[str]:
|
|
936
|
+
if not redundant_imports:
|
|
937
|
+
return lines
|
|
938
|
+
|
|
939
|
+
seen_imports: set[str] = set()
|
|
940
|
+
filtered_lines = []
|
|
941
|
+
|
|
942
|
+
for line in lines:
|
|
943
|
+
normalized = SAFE_PATTERNS["normalize_whitespace"].apply(line.strip())
|
|
944
|
+
|
|
945
|
+
if normalized.startswith(("import ", "from ")):
|
|
946
|
+
if normalized not in seen_imports:
|
|
947
|
+
seen_imports.add(normalized)
|
|
948
|
+
filtered_lines.append(line)
|
|
949
|
+
|
|
950
|
+
else:
|
|
951
|
+
filtered_lines.append(line)
|
|
952
|
+
|
|
953
|
+
return filtered_lines
|
|
954
|
+
|
|
955
|
+
def _organize_imports_pep8(self, lines: list[str]) -> list[str]:
|
|
956
|
+
parsed_data = self._parse_import_lines(lines)
|
|
957
|
+
import_data, other_lines, import_bounds = parsed_data
|
|
958
|
+
|
|
959
|
+
if not import_data:
|
|
960
|
+
return lines
|
|
961
|
+
|
|
962
|
+
sorted_imports = self._sort_imports_by_pep8_standards(import_data)
|
|
963
|
+
return self._rebuild_with_organized_imports(
|
|
964
|
+
sorted_imports, other_lines, import_bounds
|
|
965
|
+
)
|
|
966
|
+
|
|
967
|
+
def _sort_imports_by_pep8_standards(
|
|
968
|
+
self, import_data: list[tuple[int, str, str]]
|
|
969
|
+
) -> list[tuple[int, str, str]]:
|
|
970
|
+
return sorted(import_data, key=lambda x: (x[0], x[2].lower()))
|
|
971
|
+
|
|
972
|
+
def _parse_import_lines(
|
|
973
|
+
self, lines: list[str]
|
|
974
|
+
) -> tuple[list[tuple[int, str, str]], list[tuple[int, str]], tuple[int, int]]:
|
|
975
|
+
parser_state = self._initialize_parser_state()
|
|
976
|
+
|
|
977
|
+
for i, line in enumerate(lines):
|
|
978
|
+
stripped = line.strip()
|
|
979
|
+
if self._is_import_line(stripped):
|
|
980
|
+
self._process_import_line(i, line, stripped, parser_state)
|
|
981
|
+
else:
|
|
982
|
+
self._process_non_import_line(i, line, stripped, parser_state)
|
|
983
|
+
|
|
984
|
+
return (
|
|
985
|
+
parser_state["import_lines"],
|
|
986
|
+
parser_state["other_lines"],
|
|
987
|
+
(parser_state["import_start"], parser_state["import_end"]),
|
|
988
|
+
)
|
|
989
|
+
|
|
990
|
+
def _initialize_parser_state(self) -> dict[str, t.Any]:
|
|
991
|
+
return {
|
|
992
|
+
"import_lines": [],
|
|
993
|
+
"other_lines": [],
|
|
994
|
+
"import_start": -1,
|
|
995
|
+
"import_end": -1,
|
|
996
|
+
}
|
|
997
|
+
|
|
998
|
+
def _process_import_line(
|
|
999
|
+
self, i: int, line: str, stripped: str, parser_state: dict[str, t.Any]
|
|
1000
|
+
) -> None:
|
|
1001
|
+
if parser_state["import_start"] == -1:
|
|
1002
|
+
parser_state["import_start"] = i
|
|
1003
|
+
parser_state["import_end"] = i
|
|
1004
|
+
|
|
1005
|
+
module = self._extract_module_name(stripped)
|
|
1006
|
+
category = self._get_import_category(module)
|
|
1007
|
+
parser_state["import_lines"].append((category, line, stripped))
|
|
1008
|
+
|
|
1009
|
+
def _process_non_import_line(
|
|
1010
|
+
self, i: int, line: str, stripped: str, parser_state: dict[str, t.Any]
|
|
1011
|
+
) -> None:
|
|
1012
|
+
self._categorize_non_import_line(
|
|
1013
|
+
i,
|
|
1014
|
+
line,
|
|
1015
|
+
stripped,
|
|
1016
|
+
parser_state["import_start"],
|
|
1017
|
+
parser_state["import_end"],
|
|
1018
|
+
parser_state["other_lines"],
|
|
1019
|
+
)
|
|
1020
|
+
|
|
1021
|
+
def _is_import_line(self, stripped: str) -> bool:
|
|
1022
|
+
return stripped.startswith(("import ", "from ")) and not stripped.startswith(
|
|
1023
|
+
"#"
|
|
1024
|
+
)
|
|
1025
|
+
|
|
1026
|
+
def _extract_module_name(self, stripped: str) -> str:
|
|
1027
|
+
if stripped.startswith("import "):
|
|
1028
|
+
return stripped.split()[1].split(".")[0]
|
|
1029
|
+
|
|
1030
|
+
return stripped.split()[1]
|
|
1031
|
+
|
|
1032
|
+
def _categorize_non_import_line(
|
|
1033
|
+
self,
|
|
1034
|
+
i: int,
|
|
1035
|
+
line: str,
|
|
1036
|
+
stripped: str,
|
|
1037
|
+
import_start: int,
|
|
1038
|
+
import_end: int,
|
|
1039
|
+
other_lines: list[tuple[int, str]],
|
|
1040
|
+
) -> None:
|
|
1041
|
+
if import_start != -1 and import_end != -1 and i > import_end:
|
|
1042
|
+
other_lines.append((i, line))
|
|
1043
|
+
elif import_start == -1:
|
|
1044
|
+
other_lines.append((i, line))
|
|
1045
|
+
elif stripped == "" and import_start <= i <= import_end:
|
|
1046
|
+
return
|
|
1047
|
+
else:
|
|
1048
|
+
other_lines.append((i, line))
|
|
1049
|
+
|
|
1050
|
+
def _rebuild_with_organized_imports(
|
|
1051
|
+
self,
|
|
1052
|
+
import_data: list[tuple[int, str, str]],
|
|
1053
|
+
other_lines: list[tuple[int, str]],
|
|
1054
|
+
import_bounds: tuple[int, int],
|
|
1055
|
+
) -> list[str]:
|
|
1056
|
+
result_lines: list[str] = []
|
|
1057
|
+
import_start, import_end = import_bounds
|
|
1058
|
+
|
|
1059
|
+
self._add_lines_before_imports(result_lines, other_lines, import_start)
|
|
1060
|
+
|
|
1061
|
+
self._add_organized_imports(result_lines, import_data)
|
|
1062
|
+
|
|
1063
|
+
self._add_lines_after_imports(result_lines, other_lines, import_end)
|
|
1064
|
+
|
|
1065
|
+
return result_lines
|
|
1066
|
+
|
|
1067
|
+
def _add_lines_before_imports(
|
|
1068
|
+
self,
|
|
1069
|
+
result_lines: list[str],
|
|
1070
|
+
other_lines: list[tuple[int, str]],
|
|
1071
|
+
import_start: int,
|
|
1072
|
+
) -> None:
|
|
1073
|
+
for i, line in other_lines:
|
|
1074
|
+
if i < import_start:
|
|
1075
|
+
result_lines.append(line)
|
|
1076
|
+
|
|
1077
|
+
def _add_organized_imports(
|
|
1078
|
+
self, result_lines: list[str], import_data: list[tuple[int, str, str]]
|
|
1079
|
+
) -> None:
|
|
1080
|
+
current_category = 0
|
|
1081
|
+
for category, line, _ in import_data:
|
|
1082
|
+
if category > current_category and current_category > 0:
|
|
1083
|
+
result_lines.append("")
|
|
1084
|
+
result_lines.append(line)
|
|
1085
|
+
current_category = category
|
|
1086
|
+
|
|
1087
|
+
def _add_lines_after_imports(
|
|
1088
|
+
self,
|
|
1089
|
+
result_lines: list[str],
|
|
1090
|
+
other_lines: list[tuple[int, str]],
|
|
1091
|
+
import_end: int,
|
|
1092
|
+
) -> None:
|
|
1093
|
+
if any(i > import_end for i, _ in other_lines):
|
|
1094
|
+
result_lines.append("")
|
|
1095
|
+
for i, line in other_lines:
|
|
1096
|
+
if i > import_end:
|
|
1097
|
+
result_lines.append(line)
|
|
1098
|
+
|
|
1099
|
+
async def get_diagnostics(self) -> dict[str, t.Any]:
|
|
1100
|
+
try:
|
|
1101
|
+
python_files = self._get_python_files()
|
|
1102
|
+
metrics = await self._analyze_file_sample(python_files[:10])
|
|
1103
|
+
return self._build_success_diagnostics(len(python_files), metrics)
|
|
1104
|
+
except Exception as e:
|
|
1105
|
+
return self._build_error_diagnostics(str(e))
|
|
1106
|
+
|
|
1107
|
+
def _get_python_files(self) -> list[Path]:
|
|
1108
|
+
return list[t.Any](self.context.project_path.rglob("*.py"))
|
|
1109
|
+
|
|
1110
|
+
async def _analyze_file_sample(self, python_files: list[Path]) -> dict[str, int]:
|
|
1111
|
+
metrics = {
|
|
1112
|
+
"mixed_import_files": 0,
|
|
1113
|
+
"total_mixed_modules": 0,
|
|
1114
|
+
"unused_import_files": 0,
|
|
1115
|
+
"total_unused_imports": 0,
|
|
1116
|
+
"pep8_violations": 0,
|
|
1117
|
+
}
|
|
1118
|
+
|
|
1119
|
+
for file_path in python_files:
|
|
1120
|
+
file_metrics = await self._analyze_single_file_metrics(file_path)
|
|
1121
|
+
if file_metrics:
|
|
1122
|
+
self._update_metrics(metrics, file_metrics)
|
|
1123
|
+
|
|
1124
|
+
return metrics
|
|
1125
|
+
|
|
1126
|
+
async def _analyze_single_file_metrics(
|
|
1127
|
+
self, file_path: Path
|
|
1128
|
+
) -> dict[str, int] | None:
|
|
1129
|
+
try:
|
|
1130
|
+
analysis = await self.analyze_file(file_path)
|
|
1131
|
+
return self._extract_file_metrics(analysis)
|
|
1132
|
+
except Exception as e:
|
|
1133
|
+
self.log(f"Could not analyze {file_path}: {e}")
|
|
1134
|
+
return None
|
|
1135
|
+
|
|
1136
|
+
def _extract_file_metrics(self, analysis: ImportAnalysis) -> dict[str, int]:
|
|
1137
|
+
metrics = {
|
|
1138
|
+
"mixed_import_files": 1 if analysis.mixed_imports else 0,
|
|
1139
|
+
"total_mixed_modules": len(analysis.mixed_imports),
|
|
1140
|
+
"unused_import_files": 1 if analysis.unused_imports else 0,
|
|
1141
|
+
"total_unused_imports": len(analysis.unused_imports),
|
|
1142
|
+
"pep8_violations": len(analysis.import_violations),
|
|
1143
|
+
}
|
|
1144
|
+
return metrics
|
|
1145
|
+
|
|
1146
|
+
def _update_metrics(
|
|
1147
|
+
self, metrics: dict[str, int], file_metrics: dict[str, int]
|
|
1148
|
+
) -> None:
|
|
1149
|
+
for key, value in file_metrics.items():
|
|
1150
|
+
metrics[key] += value
|
|
1151
|
+
|
|
1152
|
+
def _build_success_diagnostics(
|
|
1153
|
+
self, files_analyzed: int, metrics: dict[str, int]
|
|
1154
|
+
) -> dict[str, t.Any]:
|
|
1155
|
+
return {
|
|
1156
|
+
"files_analyzed": files_analyzed,
|
|
1157
|
+
**metrics,
|
|
1158
|
+
"agent": "ImportOptimizationAgent",
|
|
1159
|
+
"capabilities": [
|
|
1160
|
+
"Mixed import style consolidation",
|
|
1161
|
+
"Unused import detection with vulture",
|
|
1162
|
+
"PEP 8 import organization",
|
|
1163
|
+
"Redundant import removal",
|
|
1164
|
+
"Intelligent context-aware analysis",
|
|
1165
|
+
],
|
|
1166
|
+
}
|
|
1167
|
+
|
|
1168
|
+
def _build_error_diagnostics(self, error: str) -> dict[str, t.Any]:
|
|
1169
|
+
return {
|
|
1170
|
+
"files_analyzed": 0,
|
|
1171
|
+
"mixed_import_files": 0,
|
|
1172
|
+
"total_mixed_modules": 0,
|
|
1173
|
+
"unused_import_files": 0,
|
|
1174
|
+
"total_unused_imports": 0,
|
|
1175
|
+
"pep8_violations": 0,
|
|
1176
|
+
"agent": "ImportOptimizationAgent",
|
|
1177
|
+
"error": error,
|
|
1178
|
+
}
|
|
1179
|
+
|
|
1180
|
+
|
|
1181
|
+
agent_registry.register(ImportOptimizationAgent)
|