crackerjack 0.32.0__py3-none-any.whl → 0.33.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of crackerjack might be problematic. Click here for more details.
- crackerjack/__main__.py +1350 -34
- crackerjack/adapters/__init__.py +17 -0
- crackerjack/adapters/lsp_client.py +358 -0
- crackerjack/adapters/rust_tool_adapter.py +194 -0
- crackerjack/adapters/rust_tool_manager.py +193 -0
- crackerjack/adapters/skylos_adapter.py +231 -0
- crackerjack/adapters/zuban_adapter.py +560 -0
- crackerjack/agents/base.py +7 -3
- crackerjack/agents/coordinator.py +271 -33
- crackerjack/agents/documentation_agent.py +9 -15
- crackerjack/agents/dry_agent.py +3 -15
- crackerjack/agents/formatting_agent.py +1 -1
- crackerjack/agents/import_optimization_agent.py +36 -180
- crackerjack/agents/performance_agent.py +17 -98
- crackerjack/agents/performance_helpers.py +7 -31
- crackerjack/agents/proactive_agent.py +1 -3
- crackerjack/agents/refactoring_agent.py +16 -85
- crackerjack/agents/refactoring_helpers.py +7 -42
- crackerjack/agents/security_agent.py +9 -48
- crackerjack/agents/test_creation_agent.py +356 -513
- crackerjack/agents/test_specialist_agent.py +0 -4
- crackerjack/api.py +6 -25
- crackerjack/cli/cache_handlers.py +204 -0
- crackerjack/cli/cache_handlers_enhanced.py +683 -0
- crackerjack/cli/facade.py +100 -0
- crackerjack/cli/handlers.py +224 -9
- crackerjack/cli/interactive.py +6 -4
- crackerjack/cli/options.py +642 -55
- crackerjack/cli/utils.py +2 -1
- crackerjack/code_cleaner.py +58 -117
- crackerjack/config/global_lock_config.py +8 -48
- crackerjack/config/hooks.py +53 -62
- crackerjack/core/async_workflow_orchestrator.py +24 -34
- crackerjack/core/autofix_coordinator.py +3 -17
- crackerjack/core/enhanced_container.py +64 -6
- crackerjack/core/file_lifecycle.py +12 -89
- crackerjack/core/performance.py +2 -2
- crackerjack/core/performance_monitor.py +15 -55
- crackerjack/core/phase_coordinator.py +257 -218
- crackerjack/core/resource_manager.py +14 -90
- crackerjack/core/service_watchdog.py +62 -95
- crackerjack/core/session_coordinator.py +149 -0
- crackerjack/core/timeout_manager.py +14 -72
- crackerjack/core/websocket_lifecycle.py +13 -78
- crackerjack/core/workflow_orchestrator.py +558 -240
- crackerjack/docs/INDEX.md +11 -0
- crackerjack/docs/generated/api/API_REFERENCE.md +10895 -0
- crackerjack/docs/generated/api/CLI_REFERENCE.md +109 -0
- crackerjack/docs/generated/api/CROSS_REFERENCES.md +1755 -0
- crackerjack/docs/generated/api/PROTOCOLS.md +3 -0
- crackerjack/docs/generated/api/SERVICES.md +1252 -0
- crackerjack/documentation/__init__.py +31 -0
- crackerjack/documentation/ai_templates.py +756 -0
- crackerjack/documentation/dual_output_generator.py +765 -0
- crackerjack/documentation/mkdocs_integration.py +518 -0
- crackerjack/documentation/reference_generator.py +977 -0
- crackerjack/dynamic_config.py +55 -50
- crackerjack/executors/async_hook_executor.py +10 -15
- crackerjack/executors/cached_hook_executor.py +117 -43
- crackerjack/executors/hook_executor.py +8 -34
- crackerjack/executors/hook_lock_manager.py +26 -183
- crackerjack/executors/individual_hook_executor.py +13 -11
- crackerjack/executors/lsp_aware_hook_executor.py +270 -0
- crackerjack/executors/tool_proxy.py +417 -0
- crackerjack/hooks/lsp_hook.py +79 -0
- crackerjack/intelligence/adaptive_learning.py +25 -10
- crackerjack/intelligence/agent_orchestrator.py +2 -5
- crackerjack/intelligence/agent_registry.py +34 -24
- crackerjack/intelligence/agent_selector.py +5 -7
- crackerjack/interactive.py +17 -6
- crackerjack/managers/async_hook_manager.py +0 -1
- crackerjack/managers/hook_manager.py +79 -1
- crackerjack/managers/publish_manager.py +66 -13
- crackerjack/managers/test_command_builder.py +5 -17
- crackerjack/managers/test_executor.py +1 -3
- crackerjack/managers/test_manager.py +109 -7
- crackerjack/managers/test_manager_backup.py +10 -9
- crackerjack/mcp/cache.py +2 -2
- crackerjack/mcp/client_runner.py +1 -1
- crackerjack/mcp/context.py +191 -68
- crackerjack/mcp/dashboard.py +7 -5
- crackerjack/mcp/enhanced_progress_monitor.py +31 -28
- crackerjack/mcp/file_monitor.py +30 -23
- crackerjack/mcp/progress_components.py +31 -21
- crackerjack/mcp/progress_monitor.py +50 -53
- crackerjack/mcp/rate_limiter.py +6 -6
- crackerjack/mcp/server_core.py +161 -32
- crackerjack/mcp/service_watchdog.py +2 -1
- crackerjack/mcp/state.py +4 -7
- crackerjack/mcp/task_manager.py +11 -9
- crackerjack/mcp/tools/core_tools.py +174 -33
- crackerjack/mcp/tools/error_analyzer.py +3 -2
- crackerjack/mcp/tools/execution_tools.py +15 -12
- crackerjack/mcp/tools/execution_tools_backup.py +42 -30
- crackerjack/mcp/tools/intelligence_tool_registry.py +7 -5
- crackerjack/mcp/tools/intelligence_tools.py +5 -2
- crackerjack/mcp/tools/monitoring_tools.py +33 -70
- crackerjack/mcp/tools/proactive_tools.py +24 -11
- crackerjack/mcp/tools/progress_tools.py +5 -8
- crackerjack/mcp/tools/utility_tools.py +20 -14
- crackerjack/mcp/tools/workflow_executor.py +62 -40
- crackerjack/mcp/websocket/app.py +8 -0
- crackerjack/mcp/websocket/endpoints.py +352 -357
- crackerjack/mcp/websocket/jobs.py +40 -57
- crackerjack/mcp/websocket/monitoring_endpoints.py +2935 -0
- crackerjack/mcp/websocket/server.py +7 -25
- crackerjack/mcp/websocket/websocket_handler.py +6 -17
- crackerjack/mixins/__init__.py +3 -0
- crackerjack/mixins/error_handling.py +145 -0
- crackerjack/models/config.py +21 -1
- crackerjack/models/config_adapter.py +49 -1
- crackerjack/models/protocols.py +176 -107
- crackerjack/models/resource_protocols.py +55 -210
- crackerjack/models/task.py +3 -0
- crackerjack/monitoring/ai_agent_watchdog.py +13 -13
- crackerjack/monitoring/metrics_collector.py +426 -0
- crackerjack/monitoring/regression_prevention.py +8 -8
- crackerjack/monitoring/websocket_server.py +643 -0
- crackerjack/orchestration/advanced_orchestrator.py +11 -6
- crackerjack/orchestration/coverage_improvement.py +3 -3
- crackerjack/orchestration/execution_strategies.py +26 -6
- crackerjack/orchestration/test_progress_streamer.py +8 -5
- crackerjack/plugins/base.py +2 -2
- crackerjack/plugins/hooks.py +7 -0
- crackerjack/plugins/managers.py +11 -8
- crackerjack/security/__init__.py +0 -1
- crackerjack/security/audit.py +90 -105
- crackerjack/services/anomaly_detector.py +392 -0
- crackerjack/services/api_extractor.py +615 -0
- crackerjack/services/backup_service.py +2 -2
- crackerjack/services/bounded_status_operations.py +15 -152
- crackerjack/services/cache.py +127 -1
- crackerjack/services/changelog_automation.py +395 -0
- crackerjack/services/config.py +18 -11
- crackerjack/services/config_merge.py +30 -85
- crackerjack/services/config_template.py +506 -0
- crackerjack/services/contextual_ai_assistant.py +48 -22
- crackerjack/services/coverage_badge_service.py +171 -0
- crackerjack/services/coverage_ratchet.py +41 -17
- crackerjack/services/debug.py +3 -3
- crackerjack/services/dependency_analyzer.py +460 -0
- crackerjack/services/dependency_monitor.py +14 -11
- crackerjack/services/documentation_generator.py +491 -0
- crackerjack/services/documentation_service.py +675 -0
- crackerjack/services/enhanced_filesystem.py +6 -5
- crackerjack/services/enterprise_optimizer.py +865 -0
- crackerjack/services/error_pattern_analyzer.py +676 -0
- crackerjack/services/file_hasher.py +1 -1
- crackerjack/services/git.py +41 -45
- crackerjack/services/health_metrics.py +10 -8
- crackerjack/services/heatmap_generator.py +735 -0
- crackerjack/services/initialization.py +30 -33
- crackerjack/services/input_validator.py +5 -97
- crackerjack/services/intelligent_commit.py +327 -0
- crackerjack/services/log_manager.py +15 -12
- crackerjack/services/logging.py +4 -3
- crackerjack/services/lsp_client.py +628 -0
- crackerjack/services/memory_optimizer.py +409 -0
- crackerjack/services/metrics.py +42 -33
- crackerjack/services/parallel_executor.py +416 -0
- crackerjack/services/pattern_cache.py +1 -1
- crackerjack/services/pattern_detector.py +6 -6
- crackerjack/services/performance_benchmarks.py +250 -576
- crackerjack/services/performance_cache.py +382 -0
- crackerjack/services/performance_monitor.py +565 -0
- crackerjack/services/predictive_analytics.py +510 -0
- crackerjack/services/quality_baseline.py +234 -0
- crackerjack/services/quality_baseline_enhanced.py +646 -0
- crackerjack/services/quality_intelligence.py +785 -0
- crackerjack/services/regex_patterns.py +605 -524
- crackerjack/services/regex_utils.py +43 -123
- crackerjack/services/secure_path_utils.py +5 -164
- crackerjack/services/secure_status_formatter.py +30 -141
- crackerjack/services/secure_subprocess.py +11 -92
- crackerjack/services/security.py +61 -30
- crackerjack/services/security_logger.py +18 -22
- crackerjack/services/server_manager.py +124 -16
- crackerjack/services/status_authentication.py +16 -159
- crackerjack/services/status_security_manager.py +4 -131
- crackerjack/services/terminal_utils.py +0 -0
- crackerjack/services/thread_safe_status_collector.py +19 -125
- crackerjack/services/unified_config.py +21 -13
- crackerjack/services/validation_rate_limiter.py +5 -54
- crackerjack/services/version_analyzer.py +459 -0
- crackerjack/services/version_checker.py +1 -1
- crackerjack/services/websocket_resource_limiter.py +10 -144
- crackerjack/services/zuban_lsp_service.py +390 -0
- crackerjack/slash_commands/__init__.py +2 -7
- crackerjack/slash_commands/run.md +2 -2
- crackerjack/tools/validate_input_validator_patterns.py +14 -40
- crackerjack/tools/validate_regex_patterns.py +19 -48
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/METADATA +197 -26
- crackerjack-0.33.1.dist-info/RECORD +229 -0
- crackerjack/CLAUDE.md +0 -207
- crackerjack/RULES.md +0 -380
- crackerjack/py313.py +0 -234
- crackerjack-0.32.0.dist-info/RECORD +0 -180
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/WHEEL +0 -0
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/entry_points.txt +0 -0
- {crackerjack-0.32.0.dist-info → crackerjack-0.33.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
"""Performance analysis helper classes and utilities."""
|
|
2
|
-
|
|
3
1
|
import ast
|
|
4
2
|
import typing as t
|
|
5
3
|
from dataclasses import dataclass
|
|
@@ -7,16 +5,12 @@ from dataclasses import dataclass
|
|
|
7
5
|
|
|
8
6
|
@dataclass
|
|
9
7
|
class OptimizationResult:
|
|
10
|
-
"""Result of an optimization operation."""
|
|
11
|
-
|
|
12
8
|
lines: list[str]
|
|
13
9
|
modified: bool
|
|
14
10
|
optimization_description: str | None = None
|
|
15
11
|
|
|
16
12
|
|
|
17
13
|
class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
18
|
-
"""Analyzer for detecting nested loops with complexity analysis."""
|
|
19
|
-
|
|
20
14
|
def __init__(self) -> None:
|
|
21
15
|
self.loop_stack: list[tuple[str, ast.AST, int]] = []
|
|
22
16
|
self.nested_loops: list[dict[str, t.Any]] = []
|
|
@@ -29,7 +23,6 @@ class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
|
29
23
|
self._process_loop_node(node, "nested_while_loop")
|
|
30
24
|
|
|
31
25
|
def _process_loop_node(self, node: ast.For | ast.While, loop_type: str) -> None:
|
|
32
|
-
"""Process a loop node and track nesting information."""
|
|
33
26
|
current_depth = len(self.loop_stack) + 1
|
|
34
27
|
self.loop_stack.append((loop_type.split("_")[1], node, current_depth))
|
|
35
28
|
|
|
@@ -44,7 +37,6 @@ class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
|
44
37
|
def _create_loop_info(
|
|
45
38
|
self, node: ast.For | ast.While, loop_type: str, current_depth: int
|
|
46
39
|
) -> dict[str, t.Any]:
|
|
47
|
-
"""Create loop information dictionary."""
|
|
48
40
|
loop_info: dict[str, t.Any] = {
|
|
49
41
|
"line_number": node.lineno,
|
|
50
42
|
"type": loop_type,
|
|
@@ -63,8 +55,7 @@ class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
|
63
55
|
def _check_complexity_hotspot(
|
|
64
56
|
self, loop_info: dict[str, t.Any], current_depth: int
|
|
65
57
|
) -> None:
|
|
66
|
-
|
|
67
|
-
if current_depth >= 3: # O(n³) or higher
|
|
58
|
+
if current_depth >= 3:
|
|
68
59
|
self.complexity_hotspots.append(
|
|
69
60
|
loop_info
|
|
70
61
|
| {
|
|
@@ -74,11 +65,9 @@ class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
|
74
65
|
)
|
|
75
66
|
|
|
76
67
|
def _calculate_complexity_factor(self, depth: int) -> int:
|
|
77
|
-
|
|
78
|
-
return depth**2 # Exponential growth factor
|
|
68
|
+
return depth**2
|
|
79
69
|
|
|
80
70
|
def _get_optimization_priority(self, depth: int) -> str:
|
|
81
|
-
"""Determine optimization priority based on nesting depth."""
|
|
82
71
|
if depth >= 4:
|
|
83
72
|
return "critical"
|
|
84
73
|
elif depth == 3:
|
|
@@ -88,7 +77,6 @@ class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
|
88
77
|
return "low"
|
|
89
78
|
|
|
90
79
|
def _extract_iterable_info(self, node: ast.For) -> dict[str, t.Any]:
|
|
91
|
-
"""Extract information about the iterable for optimization hints."""
|
|
92
80
|
iterable_info = {"type": "unknown", "name": None}
|
|
93
81
|
|
|
94
82
|
if isinstance(node.iter, ast.Name):
|
|
@@ -100,15 +88,13 @@ class EnhancedNestedLoopAnalyzer(ast.NodeVisitor):
|
|
|
100
88
|
}
|
|
101
89
|
if node.iter.func.id == "range":
|
|
102
90
|
iterable_info["optimization_hint"] = (
|
|
103
|
-
"Consider list comprehension or vectorization"
|
|
91
|
+
"Consider list[t.Any] comprehension or vectorization"
|
|
104
92
|
)
|
|
105
93
|
|
|
106
94
|
return iterable_info
|
|
107
95
|
|
|
108
96
|
|
|
109
97
|
class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
110
|
-
"""Analyzer for detecting inefficient list operations in loops."""
|
|
111
|
-
|
|
112
98
|
def __init__(self) -> None:
|
|
113
99
|
self.in_loop = False
|
|
114
100
|
self.loop_depth = 0
|
|
@@ -131,22 +117,18 @@ class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
|
131
117
|
self.generic_visit(node)
|
|
132
118
|
|
|
133
119
|
def _enter_loop_context(self, node: ast.For | ast.While) -> None:
|
|
134
|
-
"""Enter loop context and save previous state."""
|
|
135
120
|
self._old_state = (self.in_loop, self.loop_depth, self.current_loop_node)
|
|
136
121
|
self.in_loop = True
|
|
137
122
|
self.loop_depth += 1
|
|
138
123
|
self.current_loop_node = node
|
|
139
124
|
|
|
140
125
|
def _exit_loop_context(self) -> None:
|
|
141
|
-
"""Exit loop context and restore previous state."""
|
|
142
126
|
self.in_loop, self.loop_depth, self.current_loop_node = self._old_state
|
|
143
127
|
|
|
144
128
|
def _should_analyze_aug_assign(self, node: ast.AugAssign) -> bool:
|
|
145
|
-
"""Check if this augmented assignment should be analyzed."""
|
|
146
129
|
return self.in_loop and isinstance(node.op, ast.Add)
|
|
147
130
|
|
|
148
131
|
def _analyze_aug_assign_node(self, node: ast.AugAssign) -> None:
|
|
149
|
-
"""Analyze an augmented assignment node for inefficiencies."""
|
|
150
132
|
impact_factor = self._calculate_performance_impact()
|
|
151
133
|
|
|
152
134
|
if isinstance(node.value, ast.List):
|
|
@@ -155,8 +137,6 @@ class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
|
155
137
|
self._handle_variable_concat(node, impact_factor)
|
|
156
138
|
|
|
157
139
|
def _handle_list_concat(self, node: ast.AugAssign, impact_factor: int) -> None:
|
|
158
|
-
"""Handle list concatenation with literal list."""
|
|
159
|
-
# Type narrowing to help pyright understand that node.value is an ast.List
|
|
160
140
|
assert isinstance(node.value, ast.List)
|
|
161
141
|
list_size = len(node.value.elts)
|
|
162
142
|
|
|
@@ -164,7 +144,7 @@ class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
|
164
144
|
{
|
|
165
145
|
"line_number": node.lineno,
|
|
166
146
|
"type": "list_concat_in_loop",
|
|
167
|
-
"pattern": f"list += [{list_size} items]",
|
|
147
|
+
"pattern": f"list[t.Any] += [{list_size} items]",
|
|
168
148
|
"loop_depth": self.loop_depth,
|
|
169
149
|
"impact_factor": impact_factor,
|
|
170
150
|
"optimization": "append" if list_size == 1 else "extend",
|
|
@@ -175,13 +155,12 @@ class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
|
175
155
|
)
|
|
176
156
|
|
|
177
157
|
def _handle_variable_concat(self, node: ast.AugAssign, impact_factor: int) -> None:
|
|
178
|
-
"""Handle list concatenation with variable."""
|
|
179
158
|
var_name = getattr(node.value, "id", "unknown")
|
|
180
159
|
self.list_ops.append(
|
|
181
160
|
{
|
|
182
161
|
"line_number": node.lineno,
|
|
183
162
|
"type": "list_concat_variable",
|
|
184
|
-
"pattern": f"list += {var_name}",
|
|
163
|
+
"pattern": f"list[t.Any] += {var_name}",
|
|
185
164
|
"loop_depth": self.loop_depth,
|
|
186
165
|
"impact_factor": impact_factor,
|
|
187
166
|
"optimization": "extend",
|
|
@@ -190,8 +169,7 @@ class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
|
190
169
|
)
|
|
191
170
|
|
|
192
171
|
def _calculate_performance_impact(self) -> int:
|
|
193
|
-
|
|
194
|
-
base_impact = 2 # Baseline improvement factor
|
|
172
|
+
base_impact = 2
|
|
195
173
|
|
|
196
174
|
if self.loop_depth > 1:
|
|
197
175
|
base_impact *= self.loop_depth**2
|
|
@@ -199,17 +177,15 @@ class EnhancedListOpAnalyzer(ast.NodeVisitor):
|
|
|
199
177
|
if self._is_hot_loop():
|
|
200
178
|
base_impact *= 5
|
|
201
179
|
|
|
202
|
-
return min(base_impact, 50)
|
|
180
|
+
return min(base_impact, 50)
|
|
203
181
|
|
|
204
182
|
def _is_hot_loop(self) -> bool:
|
|
205
|
-
"""Check if current loop is a hot loop with large range."""
|
|
206
183
|
if not (self.current_loop_node and isinstance(self.current_loop_node, ast.For)):
|
|
207
184
|
return False
|
|
208
185
|
|
|
209
186
|
return self._has_large_range_iterator()
|
|
210
187
|
|
|
211
188
|
def _has_large_range_iterator(self) -> bool:
|
|
212
|
-
"""Check if the for loop uses a large range."""
|
|
213
189
|
if not isinstance(self.current_loop_node, ast.For):
|
|
214
190
|
return False
|
|
215
191
|
|
|
@@ -37,7 +37,7 @@ class ProactiveAgent(SubAgent):
|
|
|
37
37
|
return await self.analyze_and_fix(issue)
|
|
38
38
|
|
|
39
39
|
def _get_planning_cache_key(self, issue: Issue) -> str:
|
|
40
|
-
return f"{issue.type.value}:{issue.file_path}:{issue.line_number}"
|
|
40
|
+
return f"{issue.type.value}: {issue.file_path}: {issue.line_number}"
|
|
41
41
|
|
|
42
42
|
def _cache_successful_pattern(
|
|
43
43
|
self, issue: Issue, plan: dict[str, t.Any], result: FixResult
|
|
@@ -53,5 +53,3 @@ class ProactiveAgent(SubAgent):
|
|
|
53
53
|
|
|
54
54
|
def get_cached_patterns(self) -> dict[str, t.Any]:
|
|
55
55
|
return self._pattern_cache.copy()
|
|
56
|
-
|
|
57
|
-
# Removed unused methods: clear_pattern_cache, get_planning_confidence
|
|
@@ -25,15 +25,12 @@ class RefactoringAgent(SubAgent):
|
|
|
25
25
|
|
|
26
26
|
async def can_handle(self, issue: Issue) -> float:
|
|
27
27
|
if issue.type == IssueType.COMPLEXITY:
|
|
28
|
-
# Enhanced confidence for complexity reduction
|
|
29
28
|
return 0.9 if self._has_complexity_markers(issue) else 0.85
|
|
30
29
|
if issue.type == IssueType.DEAD_CODE:
|
|
31
|
-
# Enhanced confidence for dead code detection
|
|
32
30
|
return 0.8 if self._has_dead_code_markers(issue) else 0.75
|
|
33
31
|
return 0.0
|
|
34
32
|
|
|
35
33
|
def _has_complexity_markers(self, issue: Issue) -> bool:
|
|
36
|
-
"""Check if issue shows signs of high complexity that we can handle."""
|
|
37
34
|
if not issue.message:
|
|
38
35
|
return False
|
|
39
36
|
|
|
@@ -52,7 +49,6 @@ class RefactoringAgent(SubAgent):
|
|
|
52
49
|
)
|
|
53
50
|
|
|
54
51
|
def _has_dead_code_markers(self, issue: Issue) -> bool:
|
|
55
|
-
"""Check if issue shows signs of dead code that we can handle."""
|
|
56
52
|
if not issue.message:
|
|
57
53
|
return False
|
|
58
54
|
|
|
@@ -407,13 +403,11 @@ class RefactoringAgent(SubAgent):
|
|
|
407
403
|
self,
|
|
408
404
|
node: ast.FunctionDef | ast.AsyncFunctionDef,
|
|
409
405
|
) -> int:
|
|
410
|
-
"""Enhanced cognitive complexity calculator with more accurate scoring."""
|
|
411
406
|
calculator = self._create_complexity_calculator()
|
|
412
407
|
calculator.visit(node)
|
|
413
408
|
return calculator.complexity
|
|
414
409
|
|
|
415
410
|
def _create_complexity_calculator(self) -> "ComplexityCalculator":
|
|
416
|
-
"""Create and configure the complexity calculator."""
|
|
417
411
|
from . import refactoring_helpers
|
|
418
412
|
|
|
419
413
|
return refactoring_helpers.ComplexityCalculator()
|
|
@@ -423,20 +417,17 @@ class RefactoringAgent(SubAgent):
|
|
|
423
417
|
content: str,
|
|
424
418
|
complex_functions: list[dict[str, t.Any]],
|
|
425
419
|
) -> str:
|
|
426
|
-
# First try specific function refactoring
|
|
427
420
|
refactored_content = self._refactor_complex_functions(
|
|
428
421
|
content, complex_functions
|
|
429
422
|
)
|
|
430
423
|
if refactored_content != content:
|
|
431
424
|
return refactored_content
|
|
432
425
|
|
|
433
|
-
# Apply enhanced complexity reduction strategies
|
|
434
426
|
return self._apply_enhanced_strategies(content)
|
|
435
427
|
|
|
436
428
|
def _refactor_complex_functions(
|
|
437
429
|
self, content: str, complex_functions: list[dict[str, t.Any]]
|
|
438
430
|
) -> str:
|
|
439
|
-
"""Refactor complex functions by applying specific patterns."""
|
|
440
431
|
lines = content.split("\n")
|
|
441
432
|
|
|
442
433
|
for func_info in complex_functions:
|
|
@@ -462,12 +453,10 @@ class RefactoringAgent(SubAgent):
|
|
|
462
453
|
return content
|
|
463
454
|
|
|
464
455
|
def _apply_enhanced_strategies(self, content: str) -> str:
|
|
465
|
-
"""Apply enhanced complexity reduction strategies."""
|
|
466
456
|
enhanced_content = self._apply_enhanced_complexity_patterns(content)
|
|
467
457
|
return enhanced_content
|
|
468
458
|
|
|
469
459
|
def _apply_enhanced_complexity_patterns(self, content: str) -> str:
|
|
470
|
-
"""Apply enhanced complexity reduction patterns using SAFE_PATTERNS."""
|
|
471
460
|
operations = [
|
|
472
461
|
self._extract_nested_conditions,
|
|
473
462
|
self._simplify_boolean_expressions,
|
|
@@ -482,23 +471,20 @@ class RefactoringAgent(SubAgent):
|
|
|
482
471
|
return modified_content
|
|
483
472
|
|
|
484
473
|
def _extract_nested_conditions(self, content: str) -> str:
|
|
485
|
-
"""Extract deeply nested conditions into helper methods."""
|
|
486
474
|
lines = content.split("\n")
|
|
487
475
|
modified_lines = []
|
|
488
476
|
|
|
489
477
|
for i, line in enumerate(lines):
|
|
490
478
|
stripped = line.strip()
|
|
491
479
|
|
|
492
|
-
# Look for complex conditions that could be extracted
|
|
493
480
|
if (
|
|
494
481
|
stripped.startswith("if ")
|
|
495
482
|
and (" and " in stripped or " or " in stripped)
|
|
496
483
|
and len(stripped) > 80
|
|
497
484
|
):
|
|
498
|
-
# This is a candidate for extraction
|
|
499
485
|
indent = " " * (len(line) - len(line.lstrip()))
|
|
500
486
|
helper_name = f"_is_complex_condition_{i}"
|
|
501
|
-
modified_lines.append(f"{indent}if self.{helper_name}():")
|
|
487
|
+
modified_lines.append(f"{indent}if self.{helper_name}(): ")
|
|
502
488
|
continue
|
|
503
489
|
|
|
504
490
|
modified_lines.append(line)
|
|
@@ -506,18 +492,15 @@ class RefactoringAgent(SubAgent):
|
|
|
506
492
|
return "\n".join(modified_lines)
|
|
507
493
|
|
|
508
494
|
def _simplify_boolean_expressions(self, content: str) -> str:
|
|
509
|
-
"""Simplify complex boolean expressions using SAFE_PATTERNS."""
|
|
510
|
-
# Look for long boolean chains and suggest extraction
|
|
511
495
|
lines = content.split("\n")
|
|
512
496
|
modified_lines = []
|
|
513
497
|
|
|
514
498
|
for line in lines:
|
|
515
499
|
if " and " in line and " or " in line and len(line.strip()) > 100:
|
|
516
|
-
# Mark for potential extraction
|
|
517
500
|
if line.strip().startswith("if "):
|
|
518
501
|
indent = " " * (len(line) - len(line.lstrip()))
|
|
519
502
|
method_name = "_validate_complex_condition"
|
|
520
|
-
modified_lines.append(f"{indent}if self.{method_name}():")
|
|
503
|
+
modified_lines.append(f"{indent}if self.{method_name}(): ")
|
|
521
504
|
continue
|
|
522
505
|
|
|
523
506
|
modified_lines.append(line)
|
|
@@ -525,46 +508,35 @@ class RefactoringAgent(SubAgent):
|
|
|
525
508
|
return "\n".join(modified_lines)
|
|
526
509
|
|
|
527
510
|
def _extract_validation_patterns(self, content: str) -> str:
|
|
528
|
-
"""Extract common validation patterns to separate methods."""
|
|
529
|
-
# Look for repeated validation patterns
|
|
530
511
|
if "validation_extract" in SAFE_PATTERNS:
|
|
531
512
|
content = SAFE_PATTERNS["validation_extract"].apply(content)
|
|
532
513
|
else:
|
|
533
|
-
# Use safe pattern matching instead of raw regex
|
|
534
514
|
pattern_obj = SAFE_PATTERNS["match_validation_patterns"]
|
|
535
515
|
if pattern_obj.test(content):
|
|
536
516
|
matches = len(
|
|
537
517
|
[line for line in content.split("\n") if pattern_obj.test(line)]
|
|
538
518
|
)
|
|
539
|
-
if matches > 2:
|
|
540
|
-
# Could extract to helper method
|
|
519
|
+
if matches > 2:
|
|
541
520
|
pass
|
|
542
521
|
|
|
543
522
|
return content
|
|
544
523
|
|
|
545
524
|
def _simplify_data_structures(self, content: str) -> str:
|
|
546
|
-
"""Simplify complex data structure operations."""
|
|
547
|
-
# Look for complex dictionary/list comprehensions
|
|
548
525
|
lines = content.split("\n")
|
|
549
526
|
modified_lines = []
|
|
550
527
|
|
|
551
528
|
for line in lines:
|
|
552
529
|
stripped = line.strip()
|
|
553
530
|
|
|
554
|
-
# Check for complex list comprehensions
|
|
555
531
|
if (
|
|
556
532
|
"[" in stripped
|
|
557
533
|
and "for" in stripped
|
|
558
534
|
and "if" in stripped
|
|
559
535
|
and len(stripped) > 80
|
|
560
536
|
):
|
|
561
|
-
# Consider extracting to separate method
|
|
562
|
-
# Could add logic to extract comprehension
|
|
563
537
|
pass
|
|
564
538
|
|
|
565
|
-
|
|
566
|
-
elif stripped.count(":") > 5 and stripped.count(",") > 5:
|
|
567
|
-
# Could extract to builder method
|
|
539
|
+
elif stripped.count(": ") > 5 and stripped.count(", ") > 5:
|
|
568
540
|
pass
|
|
569
541
|
|
|
570
542
|
modified_lines.append(line)
|
|
@@ -583,7 +555,7 @@ class RefactoringAgent(SubAgent):
|
|
|
583
555
|
"detection_reasoning": "",
|
|
584
556
|
}
|
|
585
557
|
|
|
586
|
-
if error_context:"""
|
|
558
|
+
if error_context: """
|
|
587
559
|
|
|
588
560
|
replacement_pattern = """ recommendations = {
|
|
589
561
|
"urgent_agents": [],
|
|
@@ -609,11 +581,10 @@ class RefactoringAgent(SubAgent):
|
|
|
609
581
|
def _extract_logical_sections(
|
|
610
582
|
self, func_content: str, func_info: dict[str, t.Any]
|
|
611
583
|
) -> list[dict[str, str]]:
|
|
612
|
-
|
|
613
|
-
sections = []
|
|
584
|
+
sections: list[dict[str, str]] = []
|
|
614
585
|
lines = func_content.split("\n")
|
|
615
|
-
current_section = []
|
|
616
|
-
section_type = None
|
|
586
|
+
current_section: list[str] = []
|
|
587
|
+
section_type: str | None = None
|
|
617
588
|
|
|
618
589
|
for line in lines:
|
|
619
590
|
stripped = line.strip()
|
|
@@ -632,7 +603,6 @@ class RefactoringAgent(SubAgent):
|
|
|
632
603
|
else:
|
|
633
604
|
current_section.append(line)
|
|
634
605
|
|
|
635
|
-
# Handle final section
|
|
636
606
|
if current_section:
|
|
637
607
|
sections.append(
|
|
638
608
|
self._create_section(current_section, section_type, len(sections))
|
|
@@ -643,7 +613,6 @@ class RefactoringAgent(SubAgent):
|
|
|
643
613
|
def _should_start_new_section(
|
|
644
614
|
self, stripped: str, current_section_type: str | None
|
|
645
615
|
) -> bool:
|
|
646
|
-
"""Determine if a line should start a new logical section."""
|
|
647
616
|
if stripped.startswith("if ") and len(stripped) > 50:
|
|
648
617
|
return True
|
|
649
618
|
return (
|
|
@@ -653,7 +622,6 @@ class RefactoringAgent(SubAgent):
|
|
|
653
622
|
def _initialize_new_section(
|
|
654
623
|
self, line: str, stripped: str
|
|
655
624
|
) -> tuple[list[str], str]:
|
|
656
|
-
"""Initialize a new section based on the line type."""
|
|
657
625
|
if stripped.startswith("if ") and len(stripped) > 50:
|
|
658
626
|
return [line], "conditional"
|
|
659
627
|
elif stripped.startswith(("for ", "while ")):
|
|
@@ -662,8 +630,7 @@ class RefactoringAgent(SubAgent):
|
|
|
662
630
|
|
|
663
631
|
def _create_section(
|
|
664
632
|
self, current_section: list[str], section_type: str | None, section_count: int
|
|
665
|
-
) -> dict[str, str
|
|
666
|
-
"""Create a section dictionary from the current section data."""
|
|
633
|
+
) -> dict[str, str]:
|
|
667
634
|
effective_type = section_type or "general"
|
|
668
635
|
name_prefix = "handle" if effective_type == "conditional" else "process"
|
|
669
636
|
|
|
@@ -674,7 +641,6 @@ class RefactoringAgent(SubAgent):
|
|
|
674
641
|
}
|
|
675
642
|
|
|
676
643
|
def _analyze_dead_code(self, tree: ast.AST, content: str) -> dict[str, t.Any]:
|
|
677
|
-
"""Enhanced analysis for dead/unused elements."""
|
|
678
644
|
analysis: dict[str, list[t.Any]] = {
|
|
679
645
|
"unused_imports": [],
|
|
680
646
|
"unused_variables": [],
|
|
@@ -694,14 +660,12 @@ class RefactoringAgent(SubAgent):
|
|
|
694
660
|
return analysis
|
|
695
661
|
|
|
696
662
|
def _collect_usage_data(self, tree: ast.AST) -> dict[str, t.Any]:
|
|
697
|
-
"""Enhanced collection of usage data from AST."""
|
|
698
663
|
collector = self._create_usage_data_collector()
|
|
699
664
|
analyzer = self._create_enhanced_usage_analyzer(collector)
|
|
700
665
|
analyzer.visit(tree)
|
|
701
666
|
return collector.get_results(analyzer)
|
|
702
667
|
|
|
703
668
|
def _create_usage_data_collector(self) -> "UsageDataCollector":
|
|
704
|
-
"""Create data collector for usage analysis."""
|
|
705
669
|
from . import refactoring_helpers
|
|
706
670
|
|
|
707
671
|
return refactoring_helpers.UsageDataCollector()
|
|
@@ -709,7 +673,6 @@ class RefactoringAgent(SubAgent):
|
|
|
709
673
|
def _create_enhanced_usage_analyzer(
|
|
710
674
|
self, collector: "UsageDataCollector"
|
|
711
675
|
) -> "EnhancedUsageAnalyzer":
|
|
712
|
-
"""Create the enhanced usage analyzer."""
|
|
713
676
|
from . import refactoring_helpers
|
|
714
677
|
|
|
715
678
|
return refactoring_helpers.EnhancedUsageAnalyzer(collector)
|
|
@@ -719,7 +682,6 @@ class RefactoringAgent(SubAgent):
|
|
|
719
682
|
analysis: dict[str, t.Any],
|
|
720
683
|
analyzer_result: dict[str, t.Any],
|
|
721
684
|
) -> None:
|
|
722
|
-
"""Process unused imports and add to analysis."""
|
|
723
685
|
import_lines: list[tuple[int, str, str]] = analyzer_result["import_lines"]
|
|
724
686
|
for line_no, name, import_type in import_lines:
|
|
725
687
|
if name not in analyzer_result["used_names"]:
|
|
@@ -737,7 +699,6 @@ class RefactoringAgent(SubAgent):
|
|
|
737
699
|
analysis: dict[str, t.Any],
|
|
738
700
|
analyzer_result: dict[str, t.Any],
|
|
739
701
|
) -> None:
|
|
740
|
-
"""Process unused functions and add to analysis."""
|
|
741
702
|
all_unused_functions: list[dict[str, t.Any]] = analyzer_result[
|
|
742
703
|
"unused_functions"
|
|
743
704
|
]
|
|
@@ -753,7 +714,6 @@ class RefactoringAgent(SubAgent):
|
|
|
753
714
|
def _process_unused_classes(
|
|
754
715
|
self, analysis: dict[str, t.Any], analyzer_result: dict[str, t.Any]
|
|
755
716
|
) -> None:
|
|
756
|
-
"""Process unused classes and add to analysis."""
|
|
757
717
|
if "unused_classes" not in analyzer_result:
|
|
758
718
|
return
|
|
759
719
|
|
|
@@ -770,11 +730,9 @@ class RefactoringAgent(SubAgent):
|
|
|
770
730
|
def _detect_unreachable_code(
|
|
771
731
|
self, analysis: dict[str, t.Any], tree: ast.AST, content: str
|
|
772
732
|
) -> None:
|
|
773
|
-
"""Detect unreachable code patterns."""
|
|
774
|
-
|
|
775
733
|
class UnreachableCodeDetector(ast.NodeVisitor):
|
|
776
|
-
def __init__(self):
|
|
777
|
-
self.unreachable_blocks = []
|
|
734
|
+
def __init__(self) -> None:
|
|
735
|
+
self.unreachable_blocks: list[dict[str, t.Any]] = []
|
|
778
736
|
|
|
779
737
|
def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
|
|
780
738
|
self._check_unreachable_in_function(node)
|
|
@@ -787,10 +745,8 @@ class RefactoringAgent(SubAgent):
|
|
|
787
745
|
def _check_unreachable_in_function(
|
|
788
746
|
self, node: ast.FunctionDef | ast.AsyncFunctionDef
|
|
789
747
|
) -> None:
|
|
790
|
-
"""Check for unreachable code after return/raise statements."""
|
|
791
748
|
for i, stmt in enumerate(node.body):
|
|
792
749
|
if isinstance(stmt, ast.Return | ast.Raise):
|
|
793
|
-
# Check if there are statements after this
|
|
794
750
|
if i + 1 < len(node.body):
|
|
795
751
|
next_stmt = node.body[i + 1]
|
|
796
752
|
self.unreachable_blocks.append(
|
|
@@ -813,28 +769,23 @@ class RefactoringAgent(SubAgent):
|
|
|
813
769
|
def _detect_redundant_code(
|
|
814
770
|
self, analysis: dict[str, t.Any], tree: ast.AST, content: str
|
|
815
771
|
) -> None:
|
|
816
|
-
"""Detect redundant code patterns."""
|
|
817
772
|
lines = content.split("\n")
|
|
818
773
|
|
|
819
|
-
# Look for duplicate code blocks
|
|
820
774
|
line_hashes = {}
|
|
821
775
|
for i, line in enumerate(lines):
|
|
822
776
|
if line.strip() and not line.strip().startswith("#"):
|
|
823
777
|
line_hash = hash(line.strip())
|
|
824
778
|
if line_hash in line_hashes:
|
|
825
|
-
# Potential duplicate
|
|
826
779
|
analysis["removable_items"].append(
|
|
827
780
|
f"potential duplicate code at line {i + 1}"
|
|
828
781
|
)
|
|
829
782
|
line_hashes[line_hash] = i
|
|
830
783
|
|
|
831
|
-
# Look for empty except blocks
|
|
832
784
|
class RedundantPatternDetector(ast.NodeVisitor):
|
|
833
|
-
def __init__(self):
|
|
834
|
-
self.redundant_items = []
|
|
785
|
+
def __init__(self) -> None:
|
|
786
|
+
self.redundant_items: list[dict[str, t.Any]] = []
|
|
835
787
|
|
|
836
788
|
def visit_ExceptHandler(self, node: ast.ExceptHandler) -> None:
|
|
837
|
-
# Check for empty except blocks or just 'pass'
|
|
838
789
|
if len(node.body) == 1 and isinstance(node.body[0], ast.Pass):
|
|
839
790
|
self.redundant_items.append(
|
|
840
791
|
{"type": "empty_except", "line": node.lineno}
|
|
@@ -842,7 +793,6 @@ class RefactoringAgent(SubAgent):
|
|
|
842
793
|
self.generic_visit(node)
|
|
843
794
|
|
|
844
795
|
def visit_If(self, node: ast.If) -> None:
|
|
845
|
-
# Check for if True: or if False:
|
|
846
796
|
if isinstance(node.test, ast.Constant):
|
|
847
797
|
if node.test.value is True:
|
|
848
798
|
self.redundant_items.append(
|
|
@@ -865,7 +815,6 @@ class RefactoringAgent(SubAgent):
|
|
|
865
815
|
def _should_remove_import_line(
|
|
866
816
|
self, line: str, unused_import: dict[str, str]
|
|
867
817
|
) -> bool:
|
|
868
|
-
"""Check if an import line should be removed."""
|
|
869
818
|
if unused_import["type"] == "import":
|
|
870
819
|
return f"import {unused_import['name']}" in line
|
|
871
820
|
elif unused_import["type"] == "from_import":
|
|
@@ -879,20 +828,18 @@ class RefactoringAgent(SubAgent):
|
|
|
879
828
|
def _find_lines_to_remove(
|
|
880
829
|
self, lines: list[str], analysis: dict[str, t.Any]
|
|
881
830
|
) -> set[int]:
|
|
882
|
-
"""Find line indices that should be removed."""
|
|
883
831
|
lines_to_remove: set[int] = set()
|
|
884
832
|
|
|
885
833
|
for unused_import in analysis["unused_imports"]:
|
|
886
834
|
line_idx = unused_import["line"] - 1
|
|
887
835
|
if 0 <= line_idx < len(lines):
|
|
888
|
-
line =
|
|
836
|
+
line = lines[line_idx]
|
|
889
837
|
if self._should_remove_import_line(line, unused_import):
|
|
890
838
|
lines_to_remove.add(line_idx)
|
|
891
839
|
|
|
892
840
|
return lines_to_remove
|
|
893
841
|
|
|
894
842
|
def _remove_dead_code_items(self, content: str, analysis: dict[str, t.Any]) -> str:
|
|
895
|
-
"""Enhanced removal of dead code items from content."""
|
|
896
843
|
lines = content.split("\n")
|
|
897
844
|
lines_to_remove = self._collect_all_removable_lines(lines, analysis)
|
|
898
845
|
|
|
@@ -905,7 +852,6 @@ class RefactoringAgent(SubAgent):
|
|
|
905
852
|
def _collect_all_removable_lines(
|
|
906
853
|
self, lines: list[str], analysis: dict[str, t.Any]
|
|
907
854
|
) -> set[int]:
|
|
908
|
-
"""Collect all line indices that should be removed."""
|
|
909
855
|
removal_functions = [
|
|
910
856
|
lambda: self._find_lines_to_remove(lines, analysis),
|
|
911
857
|
lambda: self._find_unreachable_lines(lines, analysis),
|
|
@@ -921,12 +867,10 @@ class RefactoringAgent(SubAgent):
|
|
|
921
867
|
def _find_unreachable_lines(
|
|
922
868
|
self, lines: list[str], analysis: dict[str, t.Any]
|
|
923
869
|
) -> set[int]:
|
|
924
|
-
"""Find line indices for unreachable code."""
|
|
925
870
|
lines_to_remove: set[int] = set()
|
|
926
871
|
|
|
927
872
|
for item in analysis.get("unreachable_code", []):
|
|
928
873
|
if "line" in item:
|
|
929
|
-
# Remove the unreachable line (convert to 0-based index)
|
|
930
874
|
line_idx = item["line"] - 1
|
|
931
875
|
if 0 <= line_idx < len(lines):
|
|
932
876
|
lines_to_remove.add(line_idx)
|
|
@@ -936,10 +880,8 @@ class RefactoringAgent(SubAgent):
|
|
|
936
880
|
def _find_redundant_lines(
|
|
937
881
|
self, lines: list[str], analysis: dict[str, t.Any]
|
|
938
882
|
) -> set[int]:
|
|
939
|
-
"""Find line indices for redundant code patterns."""
|
|
940
883
|
lines_to_remove: set[int] = set()
|
|
941
884
|
|
|
942
|
-
# Look for empty except blocks
|
|
943
885
|
for i in range(len(lines)):
|
|
944
886
|
if self._is_empty_except_block(lines, i):
|
|
945
887
|
empty_pass_idx = self._find_empty_pass_line(lines, i)
|
|
@@ -949,12 +891,10 @@ class RefactoringAgent(SubAgent):
|
|
|
949
891
|
return lines_to_remove
|
|
950
892
|
|
|
951
893
|
def _is_empty_except_block(self, lines: list[str], line_idx: int) -> bool:
|
|
952
|
-
"""Check if line is an empty except block."""
|
|
953
894
|
stripped = lines[line_idx].strip()
|
|
954
|
-
return stripped == "except:" or stripped.startswith("except ")
|
|
895
|
+
return stripped == "except: " or stripped.startswith("except ")
|
|
955
896
|
|
|
956
897
|
def _find_empty_pass_line(self, lines: list[str], except_idx: int) -> int | None:
|
|
957
|
-
"""Find the pass line in an empty except block."""
|
|
958
898
|
for j in range(except_idx + 1, min(except_idx + 5, len(lines))):
|
|
959
899
|
next_line = lines[j].strip()
|
|
960
900
|
if not next_line:
|
|
@@ -967,7 +907,6 @@ class RefactoringAgent(SubAgent):
|
|
|
967
907
|
def _extract_function_content(
|
|
968
908
|
self, lines: list[str], func_info: dict[str, t.Any]
|
|
969
909
|
) -> str:
|
|
970
|
-
"""Extract the complete content of a function."""
|
|
971
910
|
start_line = func_info["line_start"] - 1
|
|
972
911
|
end_line = func_info.get("line_end", len(lines)) - 1
|
|
973
912
|
|
|
@@ -982,7 +921,6 @@ class RefactoringAgent(SubAgent):
|
|
|
982
921
|
func_info: dict[str, t.Any],
|
|
983
922
|
extracted_helpers: list[dict[str, str]],
|
|
984
923
|
) -> str:
|
|
985
|
-
"""Apply function extraction by replacing original with calls to helpers."""
|
|
986
924
|
lines = content.split("\n")
|
|
987
925
|
|
|
988
926
|
if not self._is_extraction_valid(lines, func_info, extracted_helpers):
|
|
@@ -996,7 +934,6 @@ class RefactoringAgent(SubAgent):
|
|
|
996
934
|
func_info: dict[str, t.Any],
|
|
997
935
|
extracted_helpers: list[dict[str, str]],
|
|
998
936
|
) -> bool:
|
|
999
|
-
"""Check if extraction parameters are valid."""
|
|
1000
937
|
start_line = func_info["line_start"] - 1
|
|
1001
938
|
end_line = func_info.get("line_end", len(lines)) - 1
|
|
1002
939
|
|
|
@@ -1008,7 +945,6 @@ class RefactoringAgent(SubAgent):
|
|
|
1008
945
|
func_info: dict[str, t.Any],
|
|
1009
946
|
extracted_helpers: list[dict[str, str]],
|
|
1010
947
|
) -> str:
|
|
1011
|
-
"""Perform the actual function extraction."""
|
|
1012
948
|
new_lines = self._replace_function_with_calls(
|
|
1013
949
|
lines, func_info, extracted_helpers
|
|
1014
950
|
)
|
|
@@ -1020,13 +956,12 @@ class RefactoringAgent(SubAgent):
|
|
|
1020
956
|
func_info: dict[str, t.Any],
|
|
1021
957
|
extracted_helpers: list[dict[str, str]],
|
|
1022
958
|
) -> list[str]:
|
|
1023
|
-
"""Replace the original function with calls to helper methods."""
|
|
1024
959
|
start_line = func_info["line_start"] - 1
|
|
1025
960
|
end_line = func_info.get("line_end", len(lines)) - 1
|
|
1026
961
|
func_indent = len(lines[start_line]) - len(lines[start_line].lstrip())
|
|
1027
962
|
indent = " " * (func_indent + 4)
|
|
1028
963
|
|
|
1029
|
-
new_func_lines = [lines[start_line]]
|
|
964
|
+
new_func_lines = [lines[start_line]]
|
|
1030
965
|
for helper in extracted_helpers:
|
|
1031
966
|
new_func_lines.append(f"{indent}self.{helper['name']}()")
|
|
1032
967
|
|
|
@@ -1038,7 +973,6 @@ class RefactoringAgent(SubAgent):
|
|
|
1038
973
|
func_info: dict[str, t.Any],
|
|
1039
974
|
extracted_helpers: list[dict[str, str]],
|
|
1040
975
|
) -> str:
|
|
1041
|
-
"""Add helper method definitions at the end of the class."""
|
|
1042
976
|
start_line = func_info["line_start"] - 1
|
|
1043
977
|
class_end = self._find_class_end(new_lines, start_line)
|
|
1044
978
|
|
|
@@ -1052,14 +986,12 @@ class RefactoringAgent(SubAgent):
|
|
|
1052
986
|
return "\n".join(new_lines)
|
|
1053
987
|
|
|
1054
988
|
def _find_class_end(self, lines: list[str], func_start: int) -> int:
|
|
1055
|
-
"""Find the end of the class containing the function."""
|
|
1056
989
|
class_indent = self._find_class_indent(lines, func_start)
|
|
1057
990
|
if class_indent is None:
|
|
1058
991
|
return len(lines)
|
|
1059
992
|
return self._find_class_end_line(lines, func_start, class_indent)
|
|
1060
993
|
|
|
1061
994
|
def _find_class_indent(self, lines: list[str], func_start: int) -> int | None:
|
|
1062
|
-
"""Find the indentation level of the class containing the function."""
|
|
1063
995
|
for i in range(func_start, -1, -1):
|
|
1064
996
|
if lines[i].strip().startswith("class "):
|
|
1065
997
|
return len(lines[i]) - len(lines[i].lstrip())
|
|
@@ -1068,7 +1000,6 @@ class RefactoringAgent(SubAgent):
|
|
|
1068
1000
|
def _find_class_end_line(
|
|
1069
1001
|
self, lines: list[str], func_start: int, class_indent: int
|
|
1070
1002
|
) -> int:
|
|
1071
|
-
"""Find the line where the class ends based on indentation."""
|
|
1072
1003
|
for i in range(func_start + 1, len(lines)):
|
|
1073
1004
|
line = lines[i]
|
|
1074
1005
|
if line.strip() and len(line) - len(line.lstrip()) <= class_indent:
|