crackerjack 0.33.0__py3-none-any.whl → 0.33.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/__main__.py +1350 -34
- crackerjack/adapters/__init__.py +17 -0
- crackerjack/adapters/lsp_client.py +358 -0
- crackerjack/adapters/rust_tool_adapter.py +194 -0
- crackerjack/adapters/rust_tool_manager.py +193 -0
- crackerjack/adapters/skylos_adapter.py +231 -0
- crackerjack/adapters/zuban_adapter.py +560 -0
- crackerjack/agents/base.py +7 -3
- crackerjack/agents/coordinator.py +271 -33
- crackerjack/agents/documentation_agent.py +9 -15
- crackerjack/agents/dry_agent.py +3 -15
- crackerjack/agents/formatting_agent.py +1 -1
- crackerjack/agents/import_optimization_agent.py +36 -180
- crackerjack/agents/performance_agent.py +17 -98
- crackerjack/agents/performance_helpers.py +7 -31
- crackerjack/agents/proactive_agent.py +1 -3
- crackerjack/agents/refactoring_agent.py +16 -85
- crackerjack/agents/refactoring_helpers.py +7 -42
- crackerjack/agents/security_agent.py +9 -48
- crackerjack/agents/test_creation_agent.py +356 -513
- crackerjack/agents/test_specialist_agent.py +0 -4
- crackerjack/api.py +6 -25
- crackerjack/cli/cache_handlers.py +204 -0
- crackerjack/cli/cache_handlers_enhanced.py +683 -0
- crackerjack/cli/facade.py +100 -0
- crackerjack/cli/handlers.py +224 -9
- crackerjack/cli/interactive.py +6 -4
- crackerjack/cli/options.py +642 -55
- crackerjack/cli/utils.py +2 -1
- crackerjack/code_cleaner.py +58 -117
- crackerjack/config/global_lock_config.py +8 -48
- crackerjack/config/hooks.py +53 -62
- crackerjack/core/async_workflow_orchestrator.py +24 -34
- crackerjack/core/autofix_coordinator.py +3 -17
- crackerjack/core/enhanced_container.py +4 -13
- crackerjack/core/file_lifecycle.py +12 -89
- crackerjack/core/performance.py +2 -2
- crackerjack/core/performance_monitor.py +15 -55
- crackerjack/core/phase_coordinator.py +104 -204
- crackerjack/core/resource_manager.py +14 -90
- crackerjack/core/service_watchdog.py +62 -95
- crackerjack/core/session_coordinator.py +149 -0
- crackerjack/core/timeout_manager.py +14 -72
- crackerjack/core/websocket_lifecycle.py +13 -78
- crackerjack/core/workflow_orchestrator.py +171 -174
- crackerjack/docs/INDEX.md +11 -0
- crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
- crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
- crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
- crackerjack/docs/generated/api/SERVICES.md +1252 -0
- crackerjack/documentation/__init__.py +31 -0
- crackerjack/documentation/ai_templates.py +756 -0
- crackerjack/documentation/dual_output_generator.py +765 -0
- crackerjack/documentation/mkdocs_integration.py +518 -0
- crackerjack/documentation/reference_generator.py +977 -0
- crackerjack/dynamic_config.py +55 -50
- crackerjack/executors/async_hook_executor.py +10 -15
- crackerjack/executors/cached_hook_executor.py +117 -43
- crackerjack/executors/hook_executor.py +8 -34
- crackerjack/executors/hook_lock_manager.py +26 -183
- crackerjack/executors/individual_hook_executor.py +13 -11
- crackerjack/executors/lsp_aware_hook_executor.py +270 -0
- crackerjack/executors/tool_proxy.py +417 -0
- crackerjack/hooks/lsp_hook.py +79 -0
- crackerjack/intelligence/adaptive_learning.py +25 -10
- crackerjack/intelligence/agent_orchestrator.py +2 -5
- crackerjack/intelligence/agent_registry.py +34 -24
- crackerjack/intelligence/agent_selector.py +5 -7
- crackerjack/interactive.py +17 -6
- crackerjack/managers/async_hook_manager.py +0 -1
- crackerjack/managers/hook_manager.py +79 -1
- crackerjack/managers/publish_manager.py +44 -8
- crackerjack/managers/test_command_builder.py +1 -15
- crackerjack/managers/test_executor.py +1 -3
- crackerjack/managers/test_manager.py +98 -7
- crackerjack/managers/test_manager_backup.py +10 -9
- crackerjack/mcp/cache.py +2 -2
- crackerjack/mcp/client_runner.py +1 -1
- crackerjack/mcp/context.py +191 -68
- crackerjack/mcp/dashboard.py +7 -5
- crackerjack/mcp/enhanced_progress_monitor.py +31 -28
- crackerjack/mcp/file_monitor.py +30 -23
- crackerjack/mcp/progress_components.py +31 -21
- crackerjack/mcp/progress_monitor.py +50 -53
- crackerjack/mcp/rate_limiter.py +6 -6
- crackerjack/mcp/server_core.py +17 -16
- crackerjack/mcp/service_watchdog.py +2 -1
- crackerjack/mcp/state.py +4 -7
- crackerjack/mcp/task_manager.py +11 -9
- crackerjack/mcp/tools/core_tools.py +173 -32
- crackerjack/mcp/tools/error_analyzer.py +3 -2
- crackerjack/mcp/tools/execution_tools.py +8 -10
- crackerjack/mcp/tools/execution_tools_backup.py +42 -30
- crackerjack/mcp/tools/intelligence_tool_registry.py +7 -5
- crackerjack/mcp/tools/intelligence_tools.py +5 -2
- crackerjack/mcp/tools/monitoring_tools.py +33 -70
- crackerjack/mcp/tools/proactive_tools.py +24 -11
- crackerjack/mcp/tools/progress_tools.py +5 -8
- crackerjack/mcp/tools/utility_tools.py +20 -14
- crackerjack/mcp/tools/workflow_executor.py +62 -40
- crackerjack/mcp/websocket/app.py +8 -0
- crackerjack/mcp/websocket/endpoints.py +352 -357
- crackerjack/mcp/websocket/jobs.py +40 -57
- crackerjack/mcp/websocket/monitoring_endpoints.py +2935 -0
- crackerjack/mcp/websocket/server.py +7 -25
- crackerjack/mcp/websocket/websocket_handler.py +6 -17
- crackerjack/mixins/__init__.py +0 -2
- crackerjack/mixins/error_handling.py +1 -70
- crackerjack/models/config.py +12 -1
- crackerjack/models/config_adapter.py +49 -1
- crackerjack/models/protocols.py +122 -122
- crackerjack/models/resource_protocols.py +55 -210
- crackerjack/monitoring/ai_agent_watchdog.py +13 -13
- crackerjack/monitoring/metrics_collector.py +426 -0
- crackerjack/monitoring/regression_prevention.py +8 -8
- crackerjack/monitoring/websocket_server.py +643 -0
- crackerjack/orchestration/advanced_orchestrator.py +11 -6
- crackerjack/orchestration/coverage_improvement.py +3 -3
- crackerjack/orchestration/execution_strategies.py +26 -6
- crackerjack/orchestration/test_progress_streamer.py +8 -5
- crackerjack/plugins/base.py +2 -2
- crackerjack/plugins/hooks.py +7 -0
- crackerjack/plugins/managers.py +11 -8
- crackerjack/security/__init__.py +0 -1
- crackerjack/security/audit.py +6 -35
- crackerjack/services/anomaly_detector.py +392 -0
- crackerjack/services/api_extractor.py +615 -0
- crackerjack/services/backup_service.py +2 -2
- crackerjack/services/bounded_status_operations.py +15 -152
- crackerjack/services/cache.py +127 -1
- crackerjack/services/changelog_automation.py +395 -0
- crackerjack/services/config.py +15 -9
- crackerjack/services/config_merge.py +19 -80
- crackerjack/services/config_template.py +506 -0
- crackerjack/services/contextual_ai_assistant.py +48 -22
- crackerjack/services/coverage_badge_service.py +171 -0
- crackerjack/services/coverage_ratchet.py +27 -25
- crackerjack/services/debug.py +3 -3
- crackerjack/services/dependency_analyzer.py +460 -0
- crackerjack/services/dependency_monitor.py +14 -11
- crackerjack/services/documentation_generator.py +491 -0
- crackerjack/services/documentation_service.py +675 -0
- crackerjack/services/enhanced_filesystem.py +6 -5
- crackerjack/services/enterprise_optimizer.py +865 -0
- crackerjack/services/error_pattern_analyzer.py +676 -0
- crackerjack/services/file_hasher.py +1 -1
- crackerjack/services/git.py +8 -25
- crackerjack/services/health_metrics.py +10 -8
- crackerjack/services/heatmap_generator.py +735 -0
- crackerjack/services/initialization.py +11 -30
- crackerjack/services/input_validator.py +5 -97
- crackerjack/services/intelligent_commit.py +327 -0
- crackerjack/services/log_manager.py +15 -12
- crackerjack/services/logging.py +4 -3
- crackerjack/services/lsp_client.py +628 -0
- crackerjack/services/memory_optimizer.py +19 -87
- crackerjack/services/metrics.py +42 -33
- crackerjack/services/parallel_executor.py +9 -67
- crackerjack/services/pattern_cache.py +1 -1
- crackerjack/services/pattern_detector.py +6 -6
- crackerjack/services/performance_benchmarks.py +18 -59
- crackerjack/services/performance_cache.py +20 -81
- crackerjack/services/performance_monitor.py +27 -95
- crackerjack/services/predictive_analytics.py +510 -0
- crackerjack/services/quality_baseline.py +234 -0
- crackerjack/services/quality_baseline_enhanced.py +646 -0
- crackerjack/services/quality_intelligence.py +785 -0
- crackerjack/services/regex_patterns.py +618 -524
- crackerjack/services/regex_utils.py +43 -123
- crackerjack/services/secure_path_utils.py +5 -164
- crackerjack/services/secure_status_formatter.py +30 -141
- crackerjack/services/secure_subprocess.py +11 -92
- crackerjack/services/security.py +9 -41
- crackerjack/services/security_logger.py +12 -24
- crackerjack/services/server_manager.py +124 -16
- crackerjack/services/status_authentication.py +16 -159
- crackerjack/services/status_security_manager.py +4 -131
- crackerjack/services/thread_safe_status_collector.py +19 -125
- crackerjack/services/unified_config.py +21 -13
- crackerjack/services/validation_rate_limiter.py +5 -54
- crackerjack/services/version_analyzer.py +459 -0
- crackerjack/services/version_checker.py +1 -1
- crackerjack/services/websocket_resource_limiter.py +10 -144
- crackerjack/services/zuban_lsp_service.py +390 -0
- crackerjack/slash_commands/__init__.py +2 -7
- crackerjack/slash_commands/run.md +2 -2
- crackerjack/tools/validate_input_validator_patterns.py +14 -40
- crackerjack/tools/validate_regex_patterns.py +19 -48
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/METADATA +196 -25
- crackerjack-0.33.2.dist-info/RECORD +229 -0
- crackerjack/CLAUDE.md +0 -207
- crackerjack/RULES.md +0 -380
- crackerjack/py313.py +0 -234
- crackerjack-0.33.0.dist-info/RECORD +0 -187
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/WHEEL +0 -0
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.33.0.dist-info → crackerjack-0.33.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -31,14 +31,12 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
31
31
|
super().__init__(context)
|
|
32
32
|
|
|
33
33
|
def log(self, message: str, level: str = "INFO") -> None:
|
|
34
|
-
"""Simple logging method for the agent."""
|
|
35
34
|
print(f"[{level}] ImportOptimizationAgent: {message}")
|
|
36
35
|
|
|
37
36
|
def get_supported_types(self) -> set[IssueType]:
|
|
38
37
|
return {IssueType.IMPORT_ERROR, IssueType.DEAD_CODE}
|
|
39
38
|
|
|
40
39
|
async def can_handle(self, issue: Issue) -> float:
|
|
41
|
-
"""Determine confidence level for handling import-related issues."""
|
|
42
40
|
if issue.type in self.get_supported_types():
|
|
43
41
|
return 0.85
|
|
44
42
|
|
|
@@ -58,8 +56,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
58
56
|
if any(keyword in description_lower for keyword in import_keywords):
|
|
59
57
|
return 0.8
|
|
60
58
|
|
|
61
|
-
# Check for ruff/pyflakes import error codes
|
|
62
|
-
# Use safe pattern matching for error code detection
|
|
63
59
|
pattern_obj = SAFE_PATTERNS["match_error_code_patterns"]
|
|
64
60
|
if pattern_obj.test(issue.message):
|
|
65
61
|
return 0.85
|
|
@@ -70,24 +66,18 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
70
66
|
return await self.fix_issue(issue)
|
|
71
67
|
|
|
72
68
|
async def analyze_file(self, file_path: Path) -> ImportAnalysis:
|
|
73
|
-
"""Comprehensive import analysis including vulture dead code detection."""
|
|
74
|
-
# Validate file
|
|
75
69
|
if not self._is_valid_python_file(file_path):
|
|
76
70
|
return self._create_empty_import_analysis(file_path)
|
|
77
71
|
|
|
78
|
-
# Parse file content
|
|
79
72
|
return await self._parse_and_analyze_file(file_path)
|
|
80
73
|
|
|
81
74
|
def _is_valid_python_file(self, file_path: Path) -> bool:
|
|
82
|
-
"""Check if the file is a valid Python file."""
|
|
83
75
|
return file_path.exists() and file_path.suffix == ".py"
|
|
84
76
|
|
|
85
77
|
def _create_empty_import_analysis(self, file_path: Path) -> ImportAnalysis:
|
|
86
|
-
"""Create an empty import analysis for invalid files."""
|
|
87
78
|
return ImportAnalysis(file_path, [], [], [], [], [])
|
|
88
79
|
|
|
89
80
|
async def _parse_and_analyze_file(self, file_path: Path) -> ImportAnalysis:
|
|
90
|
-
"""Parse and analyze a Python file."""
|
|
91
81
|
try:
|
|
92
82
|
with file_path.open(encoding="utf-8") as f:
|
|
93
83
|
content = f.read()
|
|
@@ -95,19 +85,15 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
95
85
|
except (SyntaxError, OSError) as e:
|
|
96
86
|
return self._handle_parse_error(file_path, e)
|
|
97
87
|
|
|
98
|
-
# Get unused imports from vulture
|
|
99
88
|
unused_imports = await self._detect_unused_imports(file_path)
|
|
100
89
|
|
|
101
|
-
# Analyze import structure
|
|
102
90
|
return self._analyze_imports(file_path, tree, content, unused_imports)
|
|
103
91
|
|
|
104
92
|
def _handle_parse_error(self, file_path: Path, e: Exception) -> ImportAnalysis:
|
|
105
|
-
"""Handle errors when parsing a file."""
|
|
106
93
|
self.log(f"Could not parse {file_path}: {e}", level="WARNING")
|
|
107
94
|
return ImportAnalysis(file_path, [], [], [], [], [])
|
|
108
95
|
|
|
109
96
|
async def _detect_unused_imports(self, file_path: Path) -> list[str]:
|
|
110
|
-
"""Use vulture to detect unused imports with intelligent filtering."""
|
|
111
97
|
try:
|
|
112
98
|
result = self._run_vulture_analysis(file_path)
|
|
113
99
|
return self._extract_unused_imports_from_result(result)
|
|
@@ -116,13 +102,11 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
116
102
|
subprocess.SubprocessError,
|
|
117
103
|
FileNotFoundError,
|
|
118
104
|
):
|
|
119
|
-
# Fallback to basic AST analysis if vulture fails
|
|
120
105
|
return []
|
|
121
106
|
|
|
122
107
|
def _run_vulture_analysis(
|
|
123
108
|
self, file_path: Path
|
|
124
109
|
) -> subprocess.CompletedProcess[str]:
|
|
125
|
-
"""Run vulture analysis on a single file."""
|
|
126
110
|
return subprocess.run(
|
|
127
111
|
["uv", "run", "vulture", "--min-confidence", "80", str(file_path)],
|
|
128
112
|
capture_output=True,
|
|
@@ -134,8 +118,7 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
134
118
|
def _extract_unused_imports_from_result(
|
|
135
119
|
self, result: subprocess.CompletedProcess[str]
|
|
136
120
|
) -> list[str]:
|
|
137
|
-
|
|
138
|
-
unused_imports = []
|
|
121
|
+
unused_imports: list[str] = []
|
|
139
122
|
if not self._is_valid_vulture_result(result):
|
|
140
123
|
return unused_imports
|
|
141
124
|
|
|
@@ -149,16 +132,12 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
149
132
|
def _is_valid_vulture_result(
|
|
150
133
|
self, result: subprocess.CompletedProcess[str]
|
|
151
134
|
) -> bool:
|
|
152
|
-
"""Check if vulture result is valid and contains output."""
|
|
153
135
|
return result.returncode == 0 and bool(result.stdout)
|
|
154
136
|
|
|
155
137
|
def _extract_import_name_from_line(self, line: str) -> str | None:
|
|
156
|
-
"""Extract import name from a single vulture output line."""
|
|
157
138
|
if not line or "unused import" not in line.lower():
|
|
158
139
|
return None
|
|
159
140
|
|
|
160
|
-
# Extract import name from vulture output using safe patterns
|
|
161
|
-
# Format: "file.py:line: unused import 'name' (confidence: XX%)"
|
|
162
141
|
pattern_obj = SAFE_PATTERNS["extract_unused_import_name"]
|
|
163
142
|
if pattern_obj.test(line):
|
|
164
143
|
return pattern_obj.apply(line)
|
|
@@ -167,11 +146,8 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
167
146
|
def _analyze_imports(
|
|
168
147
|
self, file_path: Path, tree: ast.AST, content: str, unused_imports: list[str]
|
|
169
148
|
) -> ImportAnalysis:
|
|
170
|
-
"""Analyze imports in a Python file for various optimization opportunities."""
|
|
171
|
-
# Extract and analyze import information
|
|
172
149
|
analysis_results = self._perform_full_import_analysis(tree, content)
|
|
173
150
|
|
|
174
|
-
# Create the import analysis object
|
|
175
151
|
return self._create_import_analysis(file_path, analysis_results, unused_imports)
|
|
176
152
|
|
|
177
153
|
def _create_import_analysis(
|
|
@@ -180,7 +156,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
180
156
|
analysis_results: dict[str, list[str]],
|
|
181
157
|
unused_imports: list[str],
|
|
182
158
|
) -> ImportAnalysis:
|
|
183
|
-
"""Create an ImportAnalysis object from the analysis results."""
|
|
184
159
|
return ImportAnalysis(
|
|
185
160
|
file_path=file_path,
|
|
186
161
|
mixed_imports=analysis_results["mixed_imports"],
|
|
@@ -193,11 +168,8 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
193
168
|
def _perform_full_import_analysis(
|
|
194
169
|
self, tree: ast.AST, content: str
|
|
195
170
|
) -> dict[str, list[str]]:
|
|
196
|
-
"""Perform full import analysis on the AST tree."""
|
|
197
|
-
# Extract import information
|
|
198
171
|
module_imports, all_imports = self._extract_import_information(tree)
|
|
199
172
|
|
|
200
|
-
# Analyze different aspects of imports
|
|
201
173
|
return self._perform_import_analysis(module_imports, all_imports, content)
|
|
202
174
|
|
|
203
175
|
def _perform_import_analysis(
|
|
@@ -206,8 +178,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
206
178
|
all_imports: list[dict[str, t.Any]],
|
|
207
179
|
content: str,
|
|
208
180
|
) -> dict[str, list[str]]:
|
|
209
|
-
"""Perform comprehensive analysis of import patterns."""
|
|
210
|
-
# Analyze different aspects of imports
|
|
211
181
|
analysis_results = self._analyze_import_patterns(
|
|
212
182
|
module_imports, all_imports, content
|
|
213
183
|
)
|
|
@@ -220,8 +190,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
220
190
|
all_imports: list[dict[str, t.Any]],
|
|
221
191
|
content: str,
|
|
222
192
|
) -> dict[str, list[str]]:
|
|
223
|
-
"""Analyze various import patterns."""
|
|
224
|
-
# Analyze different aspects of imports
|
|
225
193
|
return self._analyze_import_aspects(module_imports, all_imports, content)
|
|
226
194
|
|
|
227
195
|
def _analyze_import_aspects(
|
|
@@ -230,8 +198,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
230
198
|
all_imports: list[dict[str, t.Any]],
|
|
231
199
|
content: str,
|
|
232
200
|
) -> dict[str, list[str]]:
|
|
233
|
-
"""Analyze different aspects of imports."""
|
|
234
|
-
# Analyze each aspect of imports separately
|
|
235
201
|
return self._analyze_each_import_aspect(module_imports, all_imports, content)
|
|
236
202
|
|
|
237
203
|
def _analyze_each_import_aspect(
|
|
@@ -240,7 +206,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
240
206
|
all_imports: list[dict[str, t.Any]],
|
|
241
207
|
content: str,
|
|
242
208
|
) -> dict[str, list[str]]:
|
|
243
|
-
"""Analyze each import aspect individually."""
|
|
244
209
|
mixed_imports = self._find_mixed_imports(module_imports)
|
|
245
210
|
redundant_imports = self._find_redundant_imports(all_imports)
|
|
246
211
|
optimization_opportunities = self._find_optimization_opportunities(
|
|
@@ -258,7 +223,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
258
223
|
def _extract_import_information(
|
|
259
224
|
self, tree: ast.AST
|
|
260
225
|
) -> tuple[dict[str, list[dict[str, t.Any]]], list[dict[str, t.Any]]]:
|
|
261
|
-
"""Extract import information from the AST tree."""
|
|
262
226
|
module_imports: dict[str, list[dict[str, t.Any]]] = defaultdict(list)
|
|
263
227
|
all_imports: list[dict[str, t.Any]] = []
|
|
264
228
|
|
|
@@ -269,7 +233,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
269
233
|
def _initialize_import_containers(
|
|
270
234
|
self,
|
|
271
235
|
) -> tuple[dict[str, list[dict[str, t.Any]]], list[dict[str, t.Any]]]:
|
|
272
|
-
"""Initialize containers for import information."""
|
|
273
236
|
module_imports: dict[str, list[dict[str, t.Any]]] = defaultdict(list)
|
|
274
237
|
all_imports: list[dict[str, t.Any]] = []
|
|
275
238
|
return module_imports, all_imports
|
|
@@ -280,8 +243,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
280
243
|
all_imports: list[dict[str, t.Any]],
|
|
281
244
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
282
245
|
) -> None:
|
|
283
|
-
"""Process all import statements in the AST tree."""
|
|
284
|
-
# Process all nodes in the tree
|
|
285
246
|
self._process_all_nodes(tree, all_imports, module_imports)
|
|
286
247
|
|
|
287
248
|
def _process_all_nodes(
|
|
@@ -290,8 +251,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
290
251
|
all_imports: list[dict[str, t.Any]],
|
|
291
252
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
292
253
|
) -> None:
|
|
293
|
-
"""Process all nodes in the AST tree."""
|
|
294
|
-
# Process all import statements in the tree
|
|
295
254
|
self._process_import_statements_in_tree(tree, all_imports, module_imports)
|
|
296
255
|
|
|
297
256
|
def _process_import_statements_in_tree(
|
|
@@ -300,7 +259,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
300
259
|
all_imports: list[dict[str, t.Any]],
|
|
301
260
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
302
261
|
) -> None:
|
|
303
|
-
"""Process all import statements in the AST tree."""
|
|
304
262
|
for node in ast.walk(tree):
|
|
305
263
|
self._process_node_if_import(node, all_imports, module_imports)
|
|
306
264
|
|
|
@@ -310,7 +268,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
310
268
|
all_imports: list[dict[str, t.Any]],
|
|
311
269
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
312
270
|
) -> None:
|
|
313
|
-
"""Process a node if it's an import statement."""
|
|
314
271
|
if isinstance(node, ast.Import):
|
|
315
272
|
self._process_standard_import(node, all_imports, module_imports)
|
|
316
273
|
elif isinstance(node, ast.ImportFrom) and node.module:
|
|
@@ -322,8 +279,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
322
279
|
all_imports: list[dict[str, t.Any]],
|
|
323
280
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
324
281
|
) -> None:
|
|
325
|
-
"""Process standard import statements."""
|
|
326
|
-
# Process all aliases in the import
|
|
327
282
|
self._process_standard_import_aliases(node, all_imports, module_imports)
|
|
328
283
|
|
|
329
284
|
def _process_standard_import_aliases(
|
|
@@ -332,7 +287,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
332
287
|
all_imports: list[dict[str, t.Any]],
|
|
333
288
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
334
289
|
) -> None:
|
|
335
|
-
"""Process all aliases in a standard import statement."""
|
|
336
290
|
for alias in node.names:
|
|
337
291
|
import_info = {
|
|
338
292
|
"type": "standard",
|
|
@@ -350,8 +304,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
350
304
|
all_imports: list[dict[str, t.Any]],
|
|
351
305
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
352
306
|
) -> None:
|
|
353
|
-
"""Process from import statements."""
|
|
354
|
-
# Process all aliases in the from import
|
|
355
307
|
self._process_from_import_aliases(node, all_imports, module_imports)
|
|
356
308
|
|
|
357
309
|
def _process_from_import_aliases(
|
|
@@ -360,7 +312,9 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
360
312
|
all_imports: list[dict[str, t.Any]],
|
|
361
313
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
362
314
|
) -> None:
|
|
363
|
-
|
|
315
|
+
if node.module is None:
|
|
316
|
+
return # Skip relative imports without module name
|
|
317
|
+
|
|
364
318
|
for alias in node.names:
|
|
365
319
|
import_info = {
|
|
366
320
|
"type": "from",
|
|
@@ -378,7 +332,7 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
378
332
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
379
333
|
) -> list[str]:
|
|
380
334
|
mixed: list[str] = []
|
|
381
|
-
|
|
335
|
+
|
|
382
336
|
mixed.extend(self._check_mixed_imports_per_module(module_imports))
|
|
383
337
|
return mixed
|
|
384
338
|
|
|
@@ -386,7 +340,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
386
340
|
self,
|
|
387
341
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
388
342
|
) -> list[str]:
|
|
389
|
-
"""Check each module for mixed import types."""
|
|
390
343
|
mixed: list[str] = []
|
|
391
344
|
for module, imports in module_imports.items():
|
|
392
345
|
types = {imp["type"] for imp in imports}
|
|
@@ -398,7 +351,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
398
351
|
seen_modules: set[str] = set()
|
|
399
352
|
redundant: list[str] = []
|
|
400
353
|
|
|
401
|
-
# Check each import for redundancy
|
|
402
354
|
redundant.extend(self._check_redundant_imports(all_imports, seen_modules))
|
|
403
355
|
|
|
404
356
|
return redundant
|
|
@@ -406,7 +358,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
406
358
|
def _check_redundant_imports(
|
|
407
359
|
self, all_imports: list[dict[str, t.Any]], seen_modules: set[str]
|
|
408
360
|
) -> list[str]:
|
|
409
|
-
"""Check each import for redundancy."""
|
|
410
361
|
redundant: list[str] = []
|
|
411
362
|
|
|
412
363
|
for imp in all_imports:
|
|
@@ -421,29 +372,24 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
421
372
|
self,
|
|
422
373
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
423
374
|
) -> list[str]:
|
|
424
|
-
"""Find import consolidation and optimization opportunities."""
|
|
425
|
-
# Find different types of optimization opportunities
|
|
426
375
|
return self._find_consolidation_opportunities(module_imports)
|
|
427
376
|
|
|
428
377
|
def _find_consolidation_opportunities(
|
|
429
378
|
self,
|
|
430
379
|
module_imports: dict[str, list[dict[str, t.Any]]],
|
|
431
380
|
) -> list[str]:
|
|
432
|
-
"""Find opportunities to consolidate imports."""
|
|
433
381
|
opportunities: list[str] = []
|
|
434
382
|
|
|
435
383
|
for module, imports in module_imports.items():
|
|
436
384
|
standard_imports = [imp for imp in imports if imp["type"] == "standard"]
|
|
437
385
|
from_imports = [imp for imp in imports if imp["type"] == "from"]
|
|
438
386
|
|
|
439
|
-
# Recommend consolidating multiple standard imports to from-imports
|
|
440
387
|
if len(standard_imports) >= 2:
|
|
441
388
|
opportunities.append(
|
|
442
389
|
f"Consolidate {len(standard_imports)} standard imports "
|
|
443
390
|
f"from '{module}' into from-import style",
|
|
444
391
|
)
|
|
445
392
|
|
|
446
|
-
# Recommend combining from-imports from same module
|
|
447
393
|
if len(from_imports) >= 3:
|
|
448
394
|
opportunities.append(
|
|
449
395
|
f"Consider combining {len(from_imports)} from-imports "
|
|
@@ -455,23 +401,17 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
455
401
|
def _find_import_violations(
|
|
456
402
|
self, content: str, all_imports: list[dict[str, t.Any]]
|
|
457
403
|
) -> list[str]:
|
|
458
|
-
"""Find PEP 8 import organization violations."""
|
|
459
|
-
# Categorize imports and check ordering
|
|
460
404
|
violations = self._check_import_ordering(all_imports)
|
|
461
405
|
|
|
462
|
-
# Check for star imports
|
|
463
406
|
violations.extend(self._check_star_imports(content))
|
|
464
407
|
|
|
465
408
|
return violations
|
|
466
409
|
|
|
467
410
|
def _check_import_ordering(self, all_imports: list[dict[str, t.Any]]) -> list[str]:
|
|
468
|
-
"""Check if imports are in proper PEP 8 order."""
|
|
469
411
|
violations: list[str] = []
|
|
470
412
|
|
|
471
|
-
# Check for import organization (stdlib, third-party, local)
|
|
472
413
|
self._categorize_imports(all_imports)
|
|
473
414
|
|
|
474
|
-
# Find imports that are not in PEP 8 order
|
|
475
415
|
violations.extend(self._find_pep8_order_violations(all_imports))
|
|
476
416
|
|
|
477
417
|
return violations
|
|
@@ -479,7 +419,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
479
419
|
def _find_pep8_order_violations(
|
|
480
420
|
self, all_imports: list[dict[str, t.Any]]
|
|
481
421
|
) -> list[str]:
|
|
482
|
-
"""Find imports that are not in PEP 8 order."""
|
|
483
422
|
violations: list[str] = []
|
|
484
423
|
prev_category = 0
|
|
485
424
|
|
|
@@ -496,13 +435,10 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
496
435
|
return violations
|
|
497
436
|
|
|
498
437
|
def _check_star_imports(self, content: str) -> list[str]:
|
|
499
|
-
"""Check for star imports which should be avoided."""
|
|
500
438
|
violations: list[str] = []
|
|
501
439
|
lines = content.splitlines()
|
|
502
440
|
|
|
503
|
-
# Check for star imports
|
|
504
441
|
for line_num, line in enumerate(lines, 1):
|
|
505
|
-
# Use safe pattern matching for star import detection
|
|
506
442
|
if SAFE_PATTERNS["match_star_import"].test(line.strip()):
|
|
507
443
|
violations.append(f"Line {line_num}: Avoid star imports")
|
|
508
444
|
|
|
@@ -511,7 +447,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
511
447
|
def _categorize_imports(
|
|
512
448
|
self, all_imports: list[dict[str, t.Any]]
|
|
513
449
|
) -> dict[int, list[dict[str, t.Any]]]:
|
|
514
|
-
"""Categorize imports by PEP 8 standards: 1=stdlib, 2=third-party, 3=local."""
|
|
515
450
|
categories: dict[int, list[dict[str, t.Any]]] = defaultdict(list)
|
|
516
451
|
|
|
517
452
|
for imp in all_imports:
|
|
@@ -522,36 +457,27 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
522
457
|
return categories
|
|
523
458
|
|
|
524
459
|
def _get_import_category(self, module: str) -> int:
|
|
525
|
-
"""Determine import category: 1=stdlib, 2=third-party, 3=local."""
|
|
526
460
|
if not module:
|
|
527
461
|
return 3
|
|
528
462
|
|
|
529
|
-
# Determine category based on module type
|
|
530
463
|
return self._determine_module_category(module)
|
|
531
464
|
|
|
532
465
|
def _determine_module_category(self, module: str) -> int:
|
|
533
|
-
"""Determine the category of a module."""
|
|
534
466
|
base_module = module.split(".")[0]
|
|
535
467
|
|
|
536
|
-
# Check if it's a standard library module
|
|
537
468
|
if self._is_stdlib_module(base_module):
|
|
538
469
|
return 1
|
|
539
470
|
|
|
540
|
-
# Check if it's a local import
|
|
541
471
|
if self._is_local_import(module, base_module):
|
|
542
472
|
return 3
|
|
543
473
|
|
|
544
|
-
# Otherwise assume third-party
|
|
545
474
|
return 2
|
|
546
475
|
|
|
547
476
|
def _is_stdlib_module(self, base_module: str) -> bool:
|
|
548
|
-
"""Check if a module is a standard library module."""
|
|
549
|
-
# Get the set of standard library modules
|
|
550
477
|
stdlib_modules = self._get_stdlib_modules()
|
|
551
478
|
return base_module in stdlib_modules
|
|
552
479
|
|
|
553
480
|
def _get_stdlib_modules(self) -> set[str]:
|
|
554
|
-
"""Get the set of standard library modules."""
|
|
555
481
|
return {
|
|
556
482
|
"os",
|
|
557
483
|
"sys",
|
|
@@ -589,30 +515,30 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
589
515
|
}
|
|
590
516
|
|
|
591
517
|
def _is_local_import(self, module: str, base_module: str) -> bool:
|
|
592
|
-
"""Check if a module is a local import."""
|
|
593
518
|
return module.startswith(".") or base_module == "crackerjack"
|
|
594
519
|
|
|
595
520
|
async def fix_issue(self, issue: Issue) -> FixResult:
|
|
596
|
-
# Validate input
|
|
597
521
|
validation_result = self._validate_issue(issue)
|
|
598
522
|
if validation_result:
|
|
599
523
|
return validation_result
|
|
600
524
|
|
|
601
|
-
# Process the issue
|
|
602
525
|
return await self._process_import_optimization_issue(issue)
|
|
603
526
|
|
|
604
527
|
async def _process_import_optimization_issue(self, issue: Issue) -> FixResult:
|
|
605
|
-
|
|
606
|
-
|
|
528
|
+
if not issue.file_path:
|
|
529
|
+
return FixResult(
|
|
530
|
+
success=False,
|
|
531
|
+
confidence=0.0,
|
|
532
|
+
fixes_applied=[],
|
|
533
|
+
remaining_issues=["No file path provided in issue"],
|
|
534
|
+
)
|
|
535
|
+
file_path = Path(issue.file_path)
|
|
607
536
|
|
|
608
|
-
# Analyze the file
|
|
609
537
|
analysis = await self.analyze_file(file_path)
|
|
610
538
|
|
|
611
|
-
# Check if optimizations are needed
|
|
612
539
|
if not self._are_optimizations_needed(analysis):
|
|
613
540
|
return self._create_no_optimization_needed_result()
|
|
614
541
|
|
|
615
|
-
# Apply optimizations and prepare results
|
|
616
542
|
return await self._apply_optimizations_and_prepare_results(file_path, analysis)
|
|
617
543
|
|
|
618
544
|
def _create_no_optimization_needed_result(self) -> FixResult:
|
|
@@ -652,7 +578,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
652
578
|
optimized_content = await self._read_and_optimize_file(file_path, analysis)
|
|
653
579
|
await self._write_optimized_content(file_path, optimized_content)
|
|
654
580
|
|
|
655
|
-
# Prepare results
|
|
656
581
|
changes, remaining_issues = self._prepare_fix_results(analysis)
|
|
657
582
|
recommendations = self._prepare_recommendations(
|
|
658
583
|
file_path.name, remaining_issues
|
|
@@ -699,7 +624,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
699
624
|
changes: list[str] = []
|
|
700
625
|
remaining_issues: list[str] = []
|
|
701
626
|
|
|
702
|
-
# Add changes for different types of optimizations
|
|
703
627
|
changes.extend(self._get_mixed_import_changes(analysis.mixed_imports))
|
|
704
628
|
changes.extend(self._get_redundant_import_changes(analysis.redundant_imports))
|
|
705
629
|
changes.extend(self._get_unused_import_changes(analysis.unused_imports))
|
|
@@ -709,7 +633,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
709
633
|
)
|
|
710
634
|
)
|
|
711
635
|
|
|
712
|
-
# Report violations that couldn't be auto-fixed
|
|
713
636
|
remaining_issues.extend(
|
|
714
637
|
self._get_remaining_violations(analysis.import_violations)
|
|
715
638
|
)
|
|
@@ -754,7 +677,7 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
754
677
|
def _get_remaining_violations(self, import_violations: list[str]) -> list[str]:
|
|
755
678
|
remaining_issues: list[str] = []
|
|
756
679
|
if import_violations:
|
|
757
|
-
remaining_issues.extend(import_violations[:3])
|
|
680
|
+
remaining_issues.extend(import_violations[:3])
|
|
758
681
|
return remaining_issues
|
|
759
682
|
|
|
760
683
|
def _prepare_recommendations(
|
|
@@ -768,7 +691,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
768
691
|
return recommendations
|
|
769
692
|
|
|
770
693
|
async def _optimize_imports(self, content: str, analysis: ImportAnalysis) -> str:
|
|
771
|
-
"""Apply comprehensive import optimizations."""
|
|
772
694
|
lines = content.splitlines()
|
|
773
695
|
|
|
774
696
|
lines = self._apply_import_optimizations(lines, analysis)
|
|
@@ -778,24 +700,18 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
778
700
|
def _apply_import_optimizations(
|
|
779
701
|
self, lines: list[str], analysis: ImportAnalysis
|
|
780
702
|
) -> list[str]:
|
|
781
|
-
"""Apply all import optimization steps in sequence."""
|
|
782
|
-
# Apply all optimization steps
|
|
783
703
|
lines = self._apply_all_optimization_steps(lines, analysis)
|
|
784
704
|
return lines
|
|
785
705
|
|
|
786
706
|
def _apply_all_optimization_steps(
|
|
787
707
|
self, lines: list[str], analysis: ImportAnalysis
|
|
788
708
|
) -> list[str]:
|
|
789
|
-
# Remove unused imports first
|
|
790
709
|
lines = self._remove_unused_imports(lines, analysis.unused_imports)
|
|
791
710
|
|
|
792
|
-
# Consolidate mixed imports to from-import style
|
|
793
711
|
lines = self._consolidate_mixed_imports(lines, analysis.mixed_imports)
|
|
794
712
|
|
|
795
|
-
# Remove redundant imports
|
|
796
713
|
lines = self._remove_redundant_imports(lines, analysis.redundant_imports)
|
|
797
714
|
|
|
798
|
-
# Apply PEP 8 import organization
|
|
799
715
|
lines = self._organize_imports_pep8(lines)
|
|
800
716
|
|
|
801
717
|
return lines
|
|
@@ -803,7 +719,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
803
719
|
def _remove_unused_imports(
|
|
804
720
|
self, lines: list[str], unused_imports: list[str]
|
|
805
721
|
) -> list[str]:
|
|
806
|
-
"""Remove unused imports identified by vulture."""
|
|
807
722
|
if not unused_imports:
|
|
808
723
|
return lines
|
|
809
724
|
|
|
@@ -813,14 +728,12 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
813
728
|
def _create_unused_import_patterns(
|
|
814
729
|
self, unused_imports: list[str]
|
|
815
730
|
) -> list[t.Pattern[str]]:
|
|
816
|
-
|
|
817
|
-
import re # Import needed for pattern compilation
|
|
731
|
+
import re
|
|
818
732
|
|
|
819
|
-
unused_patterns = []
|
|
733
|
+
unused_patterns: list[t.Pattern[str]] = []
|
|
820
734
|
for unused in unused_imports:
|
|
821
|
-
# Use dynamic pattern creation with escaping
|
|
822
735
|
escaped_unused = re.escape(unused)
|
|
823
|
-
|
|
736
|
+
|
|
824
737
|
unused_patterns.extend(
|
|
825
738
|
(
|
|
826
739
|
re.compile(f"^\\s*import\\s+{escaped_unused}\\s*$"),
|
|
@@ -837,40 +750,32 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
837
750
|
unused_patterns: list[t.Pattern[str]],
|
|
838
751
|
unused_imports: list[str],
|
|
839
752
|
) -> list[str]:
|
|
840
|
-
"""Filter out lines containing unused imports."""
|
|
841
753
|
filtered_lines = []
|
|
842
754
|
for line in lines:
|
|
843
755
|
should_remove = False
|
|
844
756
|
for pattern in unused_patterns:
|
|
845
757
|
if pattern.search(line):
|
|
846
758
|
if self._is_multi_import_line(line):
|
|
847
|
-
# Only remove the specific unused import, not the whole line
|
|
848
759
|
line = self._remove_from_import_list(line, unused_imports)
|
|
849
760
|
else:
|
|
850
761
|
should_remove = True
|
|
851
762
|
break
|
|
852
763
|
|
|
853
|
-
if not should_remove and line.strip():
|
|
764
|
+
if not should_remove and line.strip():
|
|
854
765
|
filtered_lines.append(line)
|
|
855
766
|
|
|
856
767
|
return filtered_lines
|
|
857
768
|
|
|
858
769
|
def _is_multi_import_line(self, line: str) -> bool:
|
|
859
|
-
""
|
|
860
|
-
return "import" in line and "," in line
|
|
770
|
+
return "import" in line and ", " in line
|
|
861
771
|
|
|
862
772
|
def _remove_from_import_list(self, line: str, unused_imports: list[str]) -> str:
|
|
863
|
-
"""Remove specific imports from a multi-import line."""
|
|
864
773
|
for unused in unused_imports:
|
|
865
|
-
|
|
866
|
-
import re # REGEX OK: temporary for escaping in dynamic removal
|
|
774
|
+
import re
|
|
867
775
|
|
|
868
776
|
escaped_unused = re.escape(unused)
|
|
869
|
-
line = re.sub(
|
|
870
|
-
rf",?\s*{escaped_unused}\s*,?", ", ", line
|
|
871
|
-
) # REGEX OK: dynamic removal with escaping
|
|
777
|
+
line = re.sub(rf", ?\s*{escaped_unused}\s*, ?", ", ", line)
|
|
872
778
|
|
|
873
|
-
# Clean up using safe patterns
|
|
874
779
|
line = SAFE_PATTERNS["clean_import_commas"].apply(line)
|
|
875
780
|
line = SAFE_PATTERNS["clean_trailing_import_comma"].apply(line)
|
|
876
781
|
line = SAFE_PATTERNS["clean_import_prefix"].apply(line)
|
|
@@ -879,7 +784,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
879
784
|
def _consolidate_mixed_imports(
|
|
880
785
|
self, lines: list[str], mixed_modules: list[str]
|
|
881
786
|
) -> list[str]:
|
|
882
|
-
"""Consolidate mixed import styles to prefer from-import format."""
|
|
883
787
|
if not mixed_modules:
|
|
884
788
|
return lines
|
|
885
789
|
|
|
@@ -892,7 +796,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
892
796
|
def _collect_mixed_module_imports(
|
|
893
797
|
self, lines: list[str], mixed_modules: list[str]
|
|
894
798
|
) -> dict[str, t.Any]:
|
|
895
|
-
"""Collect import information for mixed modules."""
|
|
896
799
|
import_collector = self._create_import_collector()
|
|
897
800
|
|
|
898
801
|
for i, line in enumerate(lines):
|
|
@@ -905,7 +808,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
905
808
|
return self._finalize_import_collection(import_collector)
|
|
906
809
|
|
|
907
810
|
def _create_import_collector(self) -> dict[str, t.Any]:
|
|
908
|
-
"""Create containers for collecting import information."""
|
|
909
811
|
return {
|
|
910
812
|
"module_imports": defaultdict(set),
|
|
911
813
|
"lines_to_remove": set(),
|
|
@@ -915,7 +817,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
915
817
|
def _finalize_import_collection(
|
|
916
818
|
self, collector: dict[str, t.Any]
|
|
917
819
|
) -> dict[str, t.Any]:
|
|
918
|
-
"""Finalize the collected import information."""
|
|
919
820
|
return {
|
|
920
821
|
"module_imports": collector["module_imports"],
|
|
921
822
|
"lines_to_remove": collector["lines_to_remove"],
|
|
@@ -929,27 +830,20 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
929
830
|
line_index: int,
|
|
930
831
|
import_collector: dict[str, t.Any],
|
|
931
832
|
) -> None:
|
|
932
|
-
"""Process a single line for mixed module imports."""
|
|
933
833
|
if self._is_standard_import_line(line, module):
|
|
934
834
|
self._handle_standard_import(line, module, line_index, import_collector)
|
|
935
835
|
elif self._is_from_import_line(line, module):
|
|
936
836
|
self._handle_from_import(line, module, line_index, import_collector)
|
|
937
837
|
|
|
938
838
|
def _is_standard_import_line(self, line: str, module: str) -> bool:
|
|
939
|
-
|
|
940
|
-
import re # REGEX OK: localized for pattern matching
|
|
839
|
+
import re
|
|
941
840
|
|
|
942
|
-
return bool(
|
|
943
|
-
re.match(rf"^\s*import\s+{re.escape(module)}(?:\.\w+)*\s*$", line)
|
|
944
|
-
) # REGEX OK: dynamic module matching with escaping
|
|
841
|
+
return bool(re.match(rf"^\s*import\s+{re.escape(module)}(?: \.\w+)*\s*$", line))
|
|
945
842
|
|
|
946
843
|
def _is_from_import_line(self, line: str, module: str) -> bool:
|
|
947
|
-
|
|
948
|
-
import re # REGEX OK: localized for pattern matching
|
|
844
|
+
import re
|
|
949
845
|
|
|
950
|
-
return bool(
|
|
951
|
-
re.match(rf"^\s*from\s+{re.escape(module)}\s+import\s+", line)
|
|
952
|
-
) # REGEX OK: dynamic from import matching with escaping
|
|
846
|
+
return bool(re.match(rf"^\s*from\s+{re.escape(module)}\s+import\s+", line))
|
|
953
847
|
|
|
954
848
|
def _handle_standard_import(
|
|
955
849
|
self,
|
|
@@ -958,7 +852,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
958
852
|
line_index: int,
|
|
959
853
|
import_collector: dict[str, t.Any],
|
|
960
854
|
) -> None:
|
|
961
|
-
"""Handle standard import statement."""
|
|
962
855
|
import_name = self._extract_import_name_from_standard(line, module)
|
|
963
856
|
if import_name:
|
|
964
857
|
import_to_add = self._determine_import_name(import_name, module)
|
|
@@ -967,16 +860,13 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
967
860
|
)
|
|
968
861
|
|
|
969
862
|
def _extract_import_name_from_standard(self, line: str, module: str) -> str | None:
|
|
970
|
-
|
|
971
|
-
import re # REGEX OK: localized for pattern matching
|
|
863
|
+
import re
|
|
972
864
|
|
|
973
|
-
match = re.search(rf"import\s+({re.escape(module)}(
|
|
865
|
+
match = re.search(rf"import\s+({re.escape(module)}(?: \.\w+)*)", line)
|
|
974
866
|
return match.group(1) if match else None
|
|
975
867
|
|
|
976
868
|
def _determine_import_name(self, import_name: str, module: str) -> str:
|
|
977
|
-
"""Determine what name to import based on the import statement."""
|
|
978
869
|
if "." in import_name:
|
|
979
|
-
# For submodules, import the submodule name
|
|
980
870
|
return import_name.split(".")[-1]
|
|
981
871
|
return module
|
|
982
872
|
|
|
@@ -987,7 +877,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
987
877
|
line_index: int,
|
|
988
878
|
import_collector: dict[str, t.Any],
|
|
989
879
|
) -> None:
|
|
990
|
-
"""Add import information to the collector."""
|
|
991
880
|
import_collector["module_imports"][module].add(import_name)
|
|
992
881
|
import_collector["lines_to_remove"].add(line_index)
|
|
993
882
|
if module not in import_collector["insert_positions"]:
|
|
@@ -1000,7 +889,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1000
889
|
line_index: int,
|
|
1001
890
|
import_collector: dict[str, t.Any],
|
|
1002
891
|
) -> None:
|
|
1003
|
-
"""Handle from-import statement."""
|
|
1004
892
|
import_names = self._extract_import_names_from_from_import(line, module)
|
|
1005
893
|
import_collector["module_imports"][module].update(import_names)
|
|
1006
894
|
import_collector["lines_to_remove"].add(line_index)
|
|
@@ -1010,16 +898,14 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1010
898
|
def _extract_import_names_from_from_import(
|
|
1011
899
|
self, line: str, module: str
|
|
1012
900
|
) -> list[str]:
|
|
1013
|
-
|
|
1014
|
-
import re # REGEX OK: localized for pattern matching
|
|
901
|
+
import re
|
|
1015
902
|
|
|
1016
903
|
import_part = re.sub(rf"^\s*from\s+{re.escape(module)}\s+import\s+", "", line)
|
|
1017
|
-
return [name.strip() for name in import_part.split(",")]
|
|
904
|
+
return [name.strip() for name in import_part.split(", ")]
|
|
1018
905
|
|
|
1019
906
|
def _remove_old_mixed_imports(
|
|
1020
907
|
self, lines: list[str], lines_to_remove: set[int]
|
|
1021
908
|
) -> list[str]:
|
|
1022
|
-
"""Remove old import lines in reverse order to preserve indices."""
|
|
1023
909
|
for i in sorted(lines_to_remove, reverse=True):
|
|
1024
910
|
del lines[i]
|
|
1025
911
|
return lines
|
|
@@ -1027,7 +913,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1027
913
|
def _insert_consolidated_imports(
|
|
1028
914
|
self, lines: list[str], import_data: dict[str, t.Any]
|
|
1029
915
|
) -> list[str]:
|
|
1030
|
-
"""Insert consolidated from-imports."""
|
|
1031
916
|
module_imports = import_data["module_imports"]
|
|
1032
917
|
insert_positions = import_data["insert_positions"]
|
|
1033
918
|
lines_to_remove = import_data["lines_to_remove"]
|
|
@@ -1048,7 +933,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1048
933
|
def _remove_redundant_imports(
|
|
1049
934
|
self, lines: list[str], redundant_imports: list[str]
|
|
1050
935
|
) -> list[str]:
|
|
1051
|
-
"""Remove redundant/duplicate import statements."""
|
|
1052
936
|
if not redundant_imports:
|
|
1053
937
|
return lines
|
|
1054
938
|
|
|
@@ -1056,21 +940,19 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1056
940
|
filtered_lines = []
|
|
1057
941
|
|
|
1058
942
|
for line in lines:
|
|
1059
|
-
# Normalize the import line for comparison using safe patterns
|
|
1060
943
|
normalized = SAFE_PATTERNS["normalize_whitespace"].apply(line.strip())
|
|
1061
944
|
|
|
1062
945
|
if normalized.startswith(("import ", "from ")):
|
|
1063
946
|
if normalized not in seen_imports:
|
|
1064
947
|
seen_imports.add(normalized)
|
|
1065
948
|
filtered_lines.append(line)
|
|
1066
|
-
|
|
949
|
+
|
|
1067
950
|
else:
|
|
1068
951
|
filtered_lines.append(line)
|
|
1069
952
|
|
|
1070
953
|
return filtered_lines
|
|
1071
954
|
|
|
1072
955
|
def _organize_imports_pep8(self, lines: list[str]) -> list[str]:
|
|
1073
|
-
"""Organize imports according to PEP 8 standards."""
|
|
1074
956
|
parsed_data = self._parse_import_lines(lines)
|
|
1075
957
|
import_data, other_lines, import_bounds = parsed_data
|
|
1076
958
|
|
|
@@ -1085,13 +967,11 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1085
967
|
def _sort_imports_by_pep8_standards(
|
|
1086
968
|
self, import_data: list[tuple[int, str, str]]
|
|
1087
969
|
) -> list[tuple[int, str, str]]:
|
|
1088
|
-
"""Sort imports by PEP 8 standards: category first, then alphabetically."""
|
|
1089
970
|
return sorted(import_data, key=lambda x: (x[0], x[2].lower()))
|
|
1090
971
|
|
|
1091
972
|
def _parse_import_lines(
|
|
1092
973
|
self, lines: list[str]
|
|
1093
974
|
) -> tuple[list[tuple[int, str, str]], list[tuple[int, str]], tuple[int, int]]:
|
|
1094
|
-
"""Parse lines to separate imports from other code."""
|
|
1095
975
|
parser_state = self._initialize_parser_state()
|
|
1096
976
|
|
|
1097
977
|
for i, line in enumerate(lines):
|
|
@@ -1108,9 +988,8 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1108
988
|
)
|
|
1109
989
|
|
|
1110
990
|
def _initialize_parser_state(self) -> dict[str, t.Any]:
|
|
1111
|
-
"""Initialize parser state for import line processing."""
|
|
1112
991
|
return {
|
|
1113
|
-
"import_lines": [],
|
|
992
|
+
"import_lines": [],
|
|
1114
993
|
"other_lines": [],
|
|
1115
994
|
"import_start": -1,
|
|
1116
995
|
"import_end": -1,
|
|
@@ -1119,7 +998,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1119
998
|
def _process_import_line(
|
|
1120
999
|
self, i: int, line: str, stripped: str, parser_state: dict[str, t.Any]
|
|
1121
1000
|
) -> None:
|
|
1122
|
-
"""Process a line that contains an import statement."""
|
|
1123
1001
|
if parser_state["import_start"] == -1:
|
|
1124
1002
|
parser_state["import_start"] = i
|
|
1125
1003
|
parser_state["import_end"] = i
|
|
@@ -1131,7 +1009,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1131
1009
|
def _process_non_import_line(
|
|
1132
1010
|
self, i: int, line: str, stripped: str, parser_state: dict[str, t.Any]
|
|
1133
1011
|
) -> None:
|
|
1134
|
-
"""Process a line that is not an import statement."""
|
|
1135
1012
|
self._categorize_non_import_line(
|
|
1136
1013
|
i,
|
|
1137
1014
|
line,
|
|
@@ -1142,16 +1019,14 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1142
1019
|
)
|
|
1143
1020
|
|
|
1144
1021
|
def _is_import_line(self, stripped: str) -> bool:
|
|
1145
|
-
"""Check if line is an import statement."""
|
|
1146
1022
|
return stripped.startswith(("import ", "from ")) and not stripped.startswith(
|
|
1147
1023
|
"#"
|
|
1148
1024
|
)
|
|
1149
1025
|
|
|
1150
1026
|
def _extract_module_name(self, stripped: str) -> str:
|
|
1151
|
-
"""Extract module name from import statement."""
|
|
1152
1027
|
if stripped.startswith("import "):
|
|
1153
1028
|
return stripped.split()[1].split(".")[0]
|
|
1154
|
-
|
|
1029
|
+
|
|
1155
1030
|
return stripped.split()[1]
|
|
1156
1031
|
|
|
1157
1032
|
def _categorize_non_import_line(
|
|
@@ -1163,15 +1038,11 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1163
1038
|
import_end: int,
|
|
1164
1039
|
other_lines: list[tuple[int, str]],
|
|
1165
1040
|
) -> None:
|
|
1166
|
-
"""Categorize non-import lines for later reconstruction."""
|
|
1167
1041
|
if import_start != -1 and import_end != -1 and i > import_end:
|
|
1168
|
-
# We've passed the import section
|
|
1169
1042
|
other_lines.append((i, line))
|
|
1170
1043
|
elif import_start == -1:
|
|
1171
|
-
# We haven't reached imports yet
|
|
1172
1044
|
other_lines.append((i, line))
|
|
1173
1045
|
elif stripped == "" and import_start <= i <= import_end:
|
|
1174
|
-
# Empty line within import section - we'll reorganize these
|
|
1175
1046
|
return
|
|
1176
1047
|
else:
|
|
1177
1048
|
other_lines.append((i, line))
|
|
@@ -1182,17 +1053,13 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1182
1053
|
other_lines: list[tuple[int, str]],
|
|
1183
1054
|
import_bounds: tuple[int, int],
|
|
1184
1055
|
) -> list[str]:
|
|
1185
|
-
|
|
1186
|
-
result_lines = []
|
|
1056
|
+
result_lines: list[str] = []
|
|
1187
1057
|
import_start, import_end = import_bounds
|
|
1188
1058
|
|
|
1189
|
-
# Add lines before imports
|
|
1190
1059
|
self._add_lines_before_imports(result_lines, other_lines, import_start)
|
|
1191
1060
|
|
|
1192
|
-
# Add organized imports with proper spacing
|
|
1193
1061
|
self._add_organized_imports(result_lines, import_data)
|
|
1194
1062
|
|
|
1195
|
-
# Add lines after imports
|
|
1196
1063
|
self._add_lines_after_imports(result_lines, other_lines, import_end)
|
|
1197
1064
|
|
|
1198
1065
|
return result_lines
|
|
@@ -1203,7 +1070,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1203
1070
|
other_lines: list[tuple[int, str]],
|
|
1204
1071
|
import_start: int,
|
|
1205
1072
|
) -> None:
|
|
1206
|
-
"""Add lines that appear before import section."""
|
|
1207
1073
|
for i, line in other_lines:
|
|
1208
1074
|
if i < import_start:
|
|
1209
1075
|
result_lines.append(line)
|
|
@@ -1211,11 +1077,10 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1211
1077
|
def _add_organized_imports(
|
|
1212
1078
|
self, result_lines: list[str], import_data: list[tuple[int, str, str]]
|
|
1213
1079
|
) -> None:
|
|
1214
|
-
"""Add imports with proper category spacing."""
|
|
1215
1080
|
current_category = 0
|
|
1216
1081
|
for category, line, _ in import_data:
|
|
1217
1082
|
if category > current_category and current_category > 0:
|
|
1218
|
-
result_lines.append("")
|
|
1083
|
+
result_lines.append("")
|
|
1219
1084
|
result_lines.append(line)
|
|
1220
1085
|
current_category = category
|
|
1221
1086
|
|
|
@@ -1225,15 +1090,13 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1225
1090
|
other_lines: list[tuple[int, str]],
|
|
1226
1091
|
import_end: int,
|
|
1227
1092
|
) -> None:
|
|
1228
|
-
"""Add lines that appear after import section."""
|
|
1229
1093
|
if any(i > import_end for i, _ in other_lines):
|
|
1230
|
-
result_lines.append("")
|
|
1094
|
+
result_lines.append("")
|
|
1231
1095
|
for i, line in other_lines:
|
|
1232
1096
|
if i > import_end:
|
|
1233
1097
|
result_lines.append(line)
|
|
1234
1098
|
|
|
1235
1099
|
async def get_diagnostics(self) -> dict[str, t.Any]:
|
|
1236
|
-
"""Provide comprehensive diagnostics about import analysis across the project."""
|
|
1237
1100
|
try:
|
|
1238
1101
|
python_files = self._get_python_files()
|
|
1239
1102
|
metrics = await self._analyze_file_sample(python_files[:10])
|
|
@@ -1242,11 +1105,9 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1242
1105
|
return self._build_error_diagnostics(str(e))
|
|
1243
1106
|
|
|
1244
1107
|
def _get_python_files(self) -> list[Path]:
|
|
1245
|
-
|
|
1246
|
-
return list(self.context.project_path.rglob("*.py"))
|
|
1108
|
+
return list[t.Any](self.context.project_path.rglob("*.py"))
|
|
1247
1109
|
|
|
1248
1110
|
async def _analyze_file_sample(self, python_files: list[Path]) -> dict[str, int]:
|
|
1249
|
-
"""Analyze a sample of files for comprehensive import metrics."""
|
|
1250
1111
|
metrics = {
|
|
1251
1112
|
"mixed_import_files": 0,
|
|
1252
1113
|
"total_mixed_modules": 0,
|
|
@@ -1265,7 +1126,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1265
1126
|
async def _analyze_single_file_metrics(
|
|
1266
1127
|
self, file_path: Path
|
|
1267
1128
|
) -> dict[str, int] | None:
|
|
1268
|
-
"""Analyze a single file and return its metrics, or None if analysis fails."""
|
|
1269
1129
|
try:
|
|
1270
1130
|
analysis = await self.analyze_file(file_path)
|
|
1271
1131
|
return self._extract_file_metrics(analysis)
|
|
@@ -1274,7 +1134,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1274
1134
|
return None
|
|
1275
1135
|
|
|
1276
1136
|
def _extract_file_metrics(self, analysis: ImportAnalysis) -> dict[str, int]:
|
|
1277
|
-
"""Extract metrics from a single file analysis."""
|
|
1278
1137
|
metrics = {
|
|
1279
1138
|
"mixed_import_files": 1 if analysis.mixed_imports else 0,
|
|
1280
1139
|
"total_mixed_modules": len(analysis.mixed_imports),
|
|
@@ -1287,14 +1146,12 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1287
1146
|
def _update_metrics(
|
|
1288
1147
|
self, metrics: dict[str, int], file_metrics: dict[str, int]
|
|
1289
1148
|
) -> None:
|
|
1290
|
-
"""Update overall metrics with single file metrics."""
|
|
1291
1149
|
for key, value in file_metrics.items():
|
|
1292
1150
|
metrics[key] += value
|
|
1293
1151
|
|
|
1294
1152
|
def _build_success_diagnostics(
|
|
1295
1153
|
self, files_analyzed: int, metrics: dict[str, int]
|
|
1296
1154
|
) -> dict[str, t.Any]:
|
|
1297
|
-
"""Build successful diagnostics response."""
|
|
1298
1155
|
return {
|
|
1299
1156
|
"files_analyzed": files_analyzed,
|
|
1300
1157
|
**metrics,
|
|
@@ -1309,7 +1166,6 @@ class ImportOptimizationAgent(SubAgent):
|
|
|
1309
1166
|
}
|
|
1310
1167
|
|
|
1311
1168
|
def _build_error_diagnostics(self, error: str) -> dict[str, t.Any]:
|
|
1312
|
-
"""Build error diagnostics response."""
|
|
1313
1169
|
return {
|
|
1314
1170
|
"files_analyzed": 0,
|
|
1315
1171
|
"mixed_import_files": 0,
|