crackerjack 0.29.0__py3-none-any.whl → 0.31.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of crackerjack might be problematic. Click here for more details.

Files changed (158) hide show
  1. crackerjack/CLAUDE.md +1005 -0
  2. crackerjack/RULES.md +380 -0
  3. crackerjack/__init__.py +42 -13
  4. crackerjack/__main__.py +225 -253
  5. crackerjack/agents/__init__.py +41 -0
  6. crackerjack/agents/architect_agent.py +281 -0
  7. crackerjack/agents/base.py +169 -0
  8. crackerjack/agents/coordinator.py +512 -0
  9. crackerjack/agents/documentation_agent.py +498 -0
  10. crackerjack/agents/dry_agent.py +388 -0
  11. crackerjack/agents/formatting_agent.py +245 -0
  12. crackerjack/agents/import_optimization_agent.py +281 -0
  13. crackerjack/agents/performance_agent.py +669 -0
  14. crackerjack/agents/proactive_agent.py +104 -0
  15. crackerjack/agents/refactoring_agent.py +788 -0
  16. crackerjack/agents/security_agent.py +529 -0
  17. crackerjack/agents/test_creation_agent.py +652 -0
  18. crackerjack/agents/test_specialist_agent.py +486 -0
  19. crackerjack/agents/tracker.py +212 -0
  20. crackerjack/api.py +560 -0
  21. crackerjack/cli/__init__.py +24 -0
  22. crackerjack/cli/facade.py +104 -0
  23. crackerjack/cli/handlers.py +267 -0
  24. crackerjack/cli/interactive.py +471 -0
  25. crackerjack/cli/options.py +401 -0
  26. crackerjack/cli/utils.py +18 -0
  27. crackerjack/code_cleaner.py +670 -0
  28. crackerjack/config/__init__.py +19 -0
  29. crackerjack/config/hooks.py +218 -0
  30. crackerjack/core/__init__.py +0 -0
  31. crackerjack/core/async_workflow_orchestrator.py +406 -0
  32. crackerjack/core/autofix_coordinator.py +200 -0
  33. crackerjack/core/container.py +104 -0
  34. crackerjack/core/enhanced_container.py +542 -0
  35. crackerjack/core/performance.py +243 -0
  36. crackerjack/core/phase_coordinator.py +561 -0
  37. crackerjack/core/proactive_workflow.py +316 -0
  38. crackerjack/core/session_coordinator.py +289 -0
  39. crackerjack/core/workflow_orchestrator.py +640 -0
  40. crackerjack/dynamic_config.py +577 -0
  41. crackerjack/errors.py +263 -41
  42. crackerjack/executors/__init__.py +11 -0
  43. crackerjack/executors/async_hook_executor.py +431 -0
  44. crackerjack/executors/cached_hook_executor.py +242 -0
  45. crackerjack/executors/hook_executor.py +345 -0
  46. crackerjack/executors/individual_hook_executor.py +669 -0
  47. crackerjack/intelligence/__init__.py +44 -0
  48. crackerjack/intelligence/adaptive_learning.py +751 -0
  49. crackerjack/intelligence/agent_orchestrator.py +551 -0
  50. crackerjack/intelligence/agent_registry.py +414 -0
  51. crackerjack/intelligence/agent_selector.py +502 -0
  52. crackerjack/intelligence/integration.py +290 -0
  53. crackerjack/interactive.py +576 -315
  54. crackerjack/managers/__init__.py +11 -0
  55. crackerjack/managers/async_hook_manager.py +135 -0
  56. crackerjack/managers/hook_manager.py +137 -0
  57. crackerjack/managers/publish_manager.py +411 -0
  58. crackerjack/managers/test_command_builder.py +151 -0
  59. crackerjack/managers/test_executor.py +435 -0
  60. crackerjack/managers/test_manager.py +258 -0
  61. crackerjack/managers/test_manager_backup.py +1124 -0
  62. crackerjack/managers/test_progress.py +144 -0
  63. crackerjack/mcp/__init__.py +0 -0
  64. crackerjack/mcp/cache.py +336 -0
  65. crackerjack/mcp/client_runner.py +104 -0
  66. crackerjack/mcp/context.py +615 -0
  67. crackerjack/mcp/dashboard.py +636 -0
  68. crackerjack/mcp/enhanced_progress_monitor.py +479 -0
  69. crackerjack/mcp/file_monitor.py +336 -0
  70. crackerjack/mcp/progress_components.py +569 -0
  71. crackerjack/mcp/progress_monitor.py +949 -0
  72. crackerjack/mcp/rate_limiter.py +332 -0
  73. crackerjack/mcp/server.py +22 -0
  74. crackerjack/mcp/server_core.py +244 -0
  75. crackerjack/mcp/service_watchdog.py +501 -0
  76. crackerjack/mcp/state.py +395 -0
  77. crackerjack/mcp/task_manager.py +257 -0
  78. crackerjack/mcp/tools/__init__.py +17 -0
  79. crackerjack/mcp/tools/core_tools.py +249 -0
  80. crackerjack/mcp/tools/error_analyzer.py +308 -0
  81. crackerjack/mcp/tools/execution_tools.py +370 -0
  82. crackerjack/mcp/tools/execution_tools_backup.py +1097 -0
  83. crackerjack/mcp/tools/intelligence_tool_registry.py +80 -0
  84. crackerjack/mcp/tools/intelligence_tools.py +314 -0
  85. crackerjack/mcp/tools/monitoring_tools.py +502 -0
  86. crackerjack/mcp/tools/proactive_tools.py +384 -0
  87. crackerjack/mcp/tools/progress_tools.py +141 -0
  88. crackerjack/mcp/tools/utility_tools.py +341 -0
  89. crackerjack/mcp/tools/workflow_executor.py +360 -0
  90. crackerjack/mcp/websocket/__init__.py +14 -0
  91. crackerjack/mcp/websocket/app.py +39 -0
  92. crackerjack/mcp/websocket/endpoints.py +559 -0
  93. crackerjack/mcp/websocket/jobs.py +253 -0
  94. crackerjack/mcp/websocket/server.py +116 -0
  95. crackerjack/mcp/websocket/websocket_handler.py +78 -0
  96. crackerjack/mcp/websocket_server.py +10 -0
  97. crackerjack/models/__init__.py +31 -0
  98. crackerjack/models/config.py +93 -0
  99. crackerjack/models/config_adapter.py +230 -0
  100. crackerjack/models/protocols.py +118 -0
  101. crackerjack/models/task.py +154 -0
  102. crackerjack/monitoring/ai_agent_watchdog.py +450 -0
  103. crackerjack/monitoring/regression_prevention.py +638 -0
  104. crackerjack/orchestration/__init__.py +0 -0
  105. crackerjack/orchestration/advanced_orchestrator.py +970 -0
  106. crackerjack/orchestration/execution_strategies.py +341 -0
  107. crackerjack/orchestration/test_progress_streamer.py +636 -0
  108. crackerjack/plugins/__init__.py +15 -0
  109. crackerjack/plugins/base.py +200 -0
  110. crackerjack/plugins/hooks.py +246 -0
  111. crackerjack/plugins/loader.py +335 -0
  112. crackerjack/plugins/managers.py +259 -0
  113. crackerjack/py313.py +8 -3
  114. crackerjack/services/__init__.py +22 -0
  115. crackerjack/services/cache.py +314 -0
  116. crackerjack/services/config.py +347 -0
  117. crackerjack/services/config_integrity.py +99 -0
  118. crackerjack/services/contextual_ai_assistant.py +516 -0
  119. crackerjack/services/coverage_ratchet.py +347 -0
  120. crackerjack/services/debug.py +736 -0
  121. crackerjack/services/dependency_monitor.py +617 -0
  122. crackerjack/services/enhanced_filesystem.py +439 -0
  123. crackerjack/services/file_hasher.py +151 -0
  124. crackerjack/services/filesystem.py +395 -0
  125. crackerjack/services/git.py +165 -0
  126. crackerjack/services/health_metrics.py +611 -0
  127. crackerjack/services/initialization.py +847 -0
  128. crackerjack/services/log_manager.py +286 -0
  129. crackerjack/services/logging.py +174 -0
  130. crackerjack/services/metrics.py +578 -0
  131. crackerjack/services/pattern_cache.py +362 -0
  132. crackerjack/services/pattern_detector.py +515 -0
  133. crackerjack/services/performance_benchmarks.py +653 -0
  134. crackerjack/services/security.py +163 -0
  135. crackerjack/services/server_manager.py +234 -0
  136. crackerjack/services/smart_scheduling.py +144 -0
  137. crackerjack/services/tool_version_service.py +61 -0
  138. crackerjack/services/unified_config.py +437 -0
  139. crackerjack/services/version_checker.py +248 -0
  140. crackerjack/slash_commands/__init__.py +14 -0
  141. crackerjack/slash_commands/init.md +122 -0
  142. crackerjack/slash_commands/run.md +163 -0
  143. crackerjack/slash_commands/status.md +127 -0
  144. crackerjack-0.31.4.dist-info/METADATA +742 -0
  145. crackerjack-0.31.4.dist-info/RECORD +148 -0
  146. crackerjack-0.31.4.dist-info/entry_points.txt +2 -0
  147. crackerjack/.gitignore +0 -34
  148. crackerjack/.libcst.codemod.yaml +0 -18
  149. crackerjack/.pdm.toml +0 -1
  150. crackerjack/.pre-commit-config-ai.yaml +0 -149
  151. crackerjack/.pre-commit-config-fast.yaml +0 -69
  152. crackerjack/.pre-commit-config.yaml +0 -114
  153. crackerjack/crackerjack.py +0 -4140
  154. crackerjack/pyproject.toml +0 -285
  155. crackerjack-0.29.0.dist-info/METADATA +0 -1289
  156. crackerjack-0.29.0.dist-info/RECORD +0 -17
  157. {crackerjack-0.29.0.dist-info → crackerjack-0.31.4.dist-info}/WHEEL +0 -0
  158. {crackerjack-0.29.0.dist-info → crackerjack-0.31.4.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,515 @@
1
+ import ast
2
+ import logging
3
+ from dataclasses import dataclass
4
+ from pathlib import Path
5
+
6
+ from ..agents.base import Issue, IssueType, Priority
7
+ from .pattern_cache import CachedPattern, PatternCache
8
+
9
+
10
+ @dataclass
11
+ class AntiPattern:
12
+ """Detected anti-pattern that could lead to issues."""
13
+
14
+ pattern_type: str
15
+ severity: Priority
16
+ file_path: str
17
+ line_number: int | None
18
+ description: str
19
+ suggestion: str
20
+ prevention_strategy: str
21
+
22
+
23
+ class PatternDetector:
24
+ """Detects anti-patterns and suggests proactive refactoring.
25
+
26
+ Analyzes code to identify patterns that commonly lead to quality
27
+ violations, allowing proactive fixes before issues occur.
28
+ """
29
+
30
+ def __init__(self, project_path: Path, pattern_cache: PatternCache) -> None:
31
+ self.project_path = project_path
32
+ self.pattern_cache = pattern_cache
33
+ self.logger = logging.getLogger(__name__)
34
+
35
+ # Known anti-patterns and their detection rules
36
+ self._anti_patterns = {
37
+ "complexity_hotspot": {
38
+ "detector": self._detect_complexity_hotspots,
39
+ "description": "Functions approaching complexity limits",
40
+ "prevention": "Extract methods, use helper functions",
41
+ },
42
+ "code_duplication": {
43
+ "detector": self._detect_code_duplication,
44
+ "description": "Repeated code patterns across files",
45
+ "prevention": "Extract common functionality to utilities",
46
+ },
47
+ "performance_issues": {
48
+ "detector": self._detect_performance_issues,
49
+ "description": "Inefficient code patterns",
50
+ "prevention": "Use optimized algorithms and data structures",
51
+ },
52
+ "security_risks": {
53
+ "detector": self._detect_security_risks,
54
+ "description": "Potentially unsafe code patterns",
55
+ "prevention": "Apply secure coding practices",
56
+ },
57
+ "import_complexity": {
58
+ "detector": self._detect_import_complexity,
59
+ "description": "Complex or problematic import patterns",
60
+ "prevention": "Organize imports, avoid circular dependencies",
61
+ },
62
+ }
63
+
64
+ async def analyze_codebase(self) -> list[AntiPattern]:
65
+ """Analyze the entire codebase for anti-patterns."""
66
+ self.logger.info("Starting proactive anti-pattern analysis")
67
+
68
+ anti_patterns = []
69
+ python_files = list(self.project_path.glob("**/*.py"))
70
+
71
+ for file_path in python_files:
72
+ # Skip files in common ignore patterns
73
+ if self._should_skip_file(file_path):
74
+ continue
75
+
76
+ file_anti_patterns = await self._analyze_file(file_path)
77
+ anti_patterns.extend(file_anti_patterns)
78
+
79
+ self.logger.info(f"Detected {len(anti_patterns)} potential anti-patterns")
80
+ return anti_patterns
81
+
82
+ async def _analyze_file(self, file_path: Path) -> list[AntiPattern]:
83
+ """Analyze a single file for anti-patterns."""
84
+ anti_patterns = []
85
+
86
+ try:
87
+ content = file_path.read_text(encoding="utf-8")
88
+
89
+ # Parse AST for analysis
90
+ try:
91
+ tree = ast.parse(content, filename=str(file_path))
92
+ except SyntaxError as e:
93
+ # File has syntax errors, skip analysis
94
+ self.logger.warning(f"Syntax error in {file_path}: {e}")
95
+ return []
96
+
97
+ # Run all anti-pattern detectors
98
+ for pattern_name, pattern_info in self._anti_patterns.items():
99
+ detector = pattern_info["detector"]
100
+ try:
101
+ detected = await detector(file_path, content, tree) # type: ignore[operator]
102
+ anti_patterns.extend(detected)
103
+ except Exception as e:
104
+ self.logger.warning(
105
+ f"Error in {pattern_name} detector for {file_path}: {e}"
106
+ )
107
+
108
+ except Exception as e:
109
+ self.logger.warning(f"Failed to analyze {file_path}: {e}")
110
+
111
+ return anti_patterns
112
+
113
+ async def _detect_complexity_hotspots(
114
+ self, file_path: Path, content: str, tree: ast.AST
115
+ ) -> list[AntiPattern]:
116
+ """Detect functions that are approaching complexity limits."""
117
+ anti_patterns = []
118
+
119
+ class ComplexityVisitor(ast.NodeVisitor):
120
+ def __init__(self) -> None:
121
+ self.functions: list[
122
+ tuple[str, int, int]
123
+ ] = [] # name, line, complexity_estimate
124
+
125
+ def visit_FunctionDef(self, node: ast.FunctionDef) -> None:
126
+ # Simple complexity estimation based on control structures
127
+ complexity = 1 # Base complexity
128
+
129
+ for child in ast.walk(node):
130
+ if isinstance(child, ast.If | ast.For | ast.While | ast.With):
131
+ complexity += 1
132
+ elif isinstance(child, ast.Try):
133
+ complexity += 1
134
+ elif isinstance(child, ast.ExceptHandler):
135
+ complexity += 1
136
+ elif isinstance(child, ast.BoolOp):
137
+ complexity += len(child.values) - 1
138
+
139
+ self.functions.append((node.name, node.lineno, complexity))
140
+ self.generic_visit(node)
141
+
142
+ visitor = ComplexityVisitor()
143
+ visitor.visit(tree)
144
+
145
+ # Flag functions approaching the complexity limit (13)
146
+ for func_name, line_no, complexity in visitor.functions:
147
+ if complexity >= 10: # Warn before hitting the limit
148
+ anti_patterns.append(
149
+ AntiPattern(
150
+ pattern_type="complexity_hotspot",
151
+ severity=Priority.HIGH if complexity >= 12 else Priority.MEDIUM,
152
+ file_path=str(file_path),
153
+ line_number=line_no,
154
+ description=f"Function '{func_name}' has complexity {complexity} (approaching limit of 13)",
155
+ suggestion=f"Break down '{func_name}' into smaller helper methods",
156
+ prevention_strategy="extract_method",
157
+ )
158
+ )
159
+
160
+ return anti_patterns
161
+
162
+ async def _detect_code_duplication(
163
+ self, file_path: Path, content: str, tree: ast.AST
164
+ ) -> list[AntiPattern]:
165
+ """Detect potential code duplication patterns."""
166
+ anti_patterns = []
167
+
168
+ # Simple heuristic: look for repeated string literals or similar patterns
169
+ lines = content.split("\n")
170
+ line_groups = {}
171
+
172
+ for i, line in enumerate(lines, 1):
173
+ stripped = line.strip()
174
+ if len(stripped) > 20 and not stripped.startswith("#"):
175
+ if stripped in line_groups:
176
+ line_groups[stripped].append(i)
177
+ else:
178
+ line_groups[stripped] = [i]
179
+
180
+ # Flag lines that appear multiple times
181
+ for line_content, line_numbers in line_groups.items():
182
+ if len(line_numbers) >= 3: # Appears 3+ times
183
+ anti_patterns.append(
184
+ AntiPattern(
185
+ pattern_type="code_duplication",
186
+ severity=Priority.MEDIUM,
187
+ file_path=str(file_path),
188
+ line_number=line_numbers[0],
189
+ description=f"Line appears {len(line_numbers)} times: '{line_content[:50]}...'",
190
+ suggestion="Extract common functionality to a utility function",
191
+ prevention_strategy="extract_utility",
192
+ )
193
+ )
194
+
195
+ return anti_patterns
196
+
197
+ async def _detect_performance_issues(
198
+ self, file_path: Path, content: str, tree: ast.AST
199
+ ) -> list[AntiPattern]:
200
+ """Detect potential performance anti-patterns."""
201
+ anti_patterns = []
202
+
203
+ class PerformanceVisitor(ast.NodeVisitor):
204
+ def __init__(self) -> None:
205
+ self.issues: list[
206
+ tuple[int, str, str]
207
+ ] = [] # line, description, suggestion
208
+
209
+ def visit_For(self, node: ast.For) -> None:
210
+ # Check for nested loops (potential O(n²))
211
+ for child in ast.walk(node.body[0] if node.body else node):
212
+ if isinstance(child, ast.For | ast.While) and child != node:
213
+ self.issues.append(
214
+ (
215
+ node.lineno,
216
+ "Nested loop detected - potential O(n²) complexity",
217
+ "Consider using dictionary lookups or set operations",
218
+ )
219
+ )
220
+ break
221
+
222
+ # Check for list concatenation in loops
223
+ for stmt in node.body:
224
+ if (
225
+ isinstance(stmt, ast.AugAssign)
226
+ and isinstance(stmt.op, ast.Add)
227
+ and isinstance(stmt.target, ast.Name)
228
+ ):
229
+ self.issues.append(
230
+ (
231
+ stmt.lineno,
232
+ "List concatenation in loop - inefficient",
233
+ "Use list.append() and join at the end",
234
+ )
235
+ )
236
+
237
+ self.generic_visit(node)
238
+
239
+ visitor = PerformanceVisitor()
240
+ visitor.visit(tree)
241
+
242
+ for line_no, description, suggestion in visitor.issues:
243
+ anti_patterns.append(
244
+ AntiPattern(
245
+ pattern_type="performance_issues",
246
+ severity=Priority.MEDIUM,
247
+ file_path=str(file_path),
248
+ line_number=line_no,
249
+ description=description,
250
+ suggestion=suggestion,
251
+ prevention_strategy="optimize_algorithm",
252
+ )
253
+ )
254
+
255
+ return anti_patterns
256
+
257
+ async def _detect_security_risks(
258
+ self, file_path: Path, content: str, tree: ast.AST
259
+ ) -> list[AntiPattern]:
260
+ """Detect potential security anti-patterns."""
261
+ anti_patterns = []
262
+
263
+ # Check for hardcoded paths
264
+ hardcoded_path_patterns = self._check_hardcoded_paths(file_path, content)
265
+ anti_patterns.extend(hardcoded_path_patterns)
266
+
267
+ # Check for subprocess security issues
268
+ subprocess_patterns = self._check_subprocess_security(file_path, tree)
269
+ anti_patterns.extend(subprocess_patterns)
270
+
271
+ return anti_patterns
272
+
273
+ def _check_hardcoded_paths(
274
+ self, file_path: Path, content: str
275
+ ) -> list[AntiPattern]:
276
+ """Check for hardcoded paths (common security issue)."""
277
+ anti_patterns = []
278
+
279
+ if "/tmp/" in content or "C:\\" in content: # nosec B108
280
+ lines = content.split("\n")
281
+ for i, line in enumerate(lines, 1):
282
+ if "/tmp/" in line or "C:\\" in line: # nosec B108
283
+ anti_patterns.append(
284
+ AntiPattern(
285
+ pattern_type="security_risks",
286
+ severity=Priority.HIGH,
287
+ file_path=str(file_path),
288
+ line_number=i,
289
+ description="Hardcoded path detected - potential security risk",
290
+ suggestion="Use tempfile module for temporary files",
291
+ prevention_strategy="use_secure_temp_files",
292
+ )
293
+ )
294
+ break
295
+
296
+ return anti_patterns
297
+
298
+ def _check_subprocess_security(
299
+ self, file_path: Path, tree: ast.AST
300
+ ) -> list[AntiPattern]:
301
+ """Check for shell=True in subprocess calls."""
302
+ anti_patterns = []
303
+
304
+ class SecurityVisitor(ast.NodeVisitor):
305
+ def __init__(self) -> None:
306
+ self.issues: list[tuple[int, str, str]] = []
307
+
308
+ def visit_Call(self, node: ast.Call) -> None:
309
+ # Check for subprocess with shell=True
310
+ if (
311
+ isinstance(node.func, ast.Attribute)
312
+ and isinstance(node.func.value, ast.Name)
313
+ and node.func.value.id == "subprocess"
314
+ ):
315
+ for keyword in node.keywords:
316
+ if (
317
+ keyword.arg == "shell"
318
+ and isinstance(keyword.value, ast.Constant)
319
+ and keyword.value.value is True
320
+ ):
321
+ self.issues.append(
322
+ (
323
+ node.lineno,
324
+ "subprocess with shell=True - security risk",
325
+ "Avoid shell=True or validate inputs carefully",
326
+ )
327
+ )
328
+
329
+ self.generic_visit(node)
330
+
331
+ visitor = SecurityVisitor()
332
+ visitor.visit(tree)
333
+
334
+ for line_no, description, suggestion in visitor.issues:
335
+ anti_patterns.append(
336
+ AntiPattern(
337
+ pattern_type="security_risks",
338
+ severity=Priority.HIGH,
339
+ file_path=str(file_path),
340
+ line_number=line_no,
341
+ description=description,
342
+ suggestion=suggestion,
343
+ prevention_strategy="secure_subprocess",
344
+ )
345
+ )
346
+
347
+ return anti_patterns
348
+
349
+ async def _detect_import_complexity(
350
+ self, file_path: Path, content: str, tree: ast.AST
351
+ ) -> list[AntiPattern]:
352
+ """Detect complex or problematic import patterns."""
353
+ anti_patterns = []
354
+
355
+ class ImportVisitor(ast.NodeVisitor):
356
+ def __init__(self) -> None:
357
+ self.imports: list[tuple[int, str]] = []
358
+ self.import_count = 0
359
+
360
+ def visit_Import(self, node: ast.Import) -> None:
361
+ self.import_count += len(node.names)
362
+ for alias in node.names:
363
+ if alias.name.count(".") > 2: # Deep import
364
+ self.imports.append((node.lineno, f"Deep import: {alias.name}"))
365
+ self.generic_visit(node)
366
+
367
+ def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
368
+ if node.module:
369
+ self.import_count += len(node.names) if node.names else 1
370
+ if (
371
+ node.names and len(node.names) > 10
372
+ ): # Many imports from one module
373
+ self.imports.append(
374
+ (node.lineno, f"Many imports from {node.module}")
375
+ )
376
+ self.generic_visit(node)
377
+
378
+ visitor = ImportVisitor()
379
+ visitor.visit(tree)
380
+
381
+ # Flag files with excessive imports
382
+ if visitor.import_count > 50:
383
+ anti_patterns.append(
384
+ AntiPattern(
385
+ pattern_type="import_complexity",
386
+ severity=Priority.MEDIUM,
387
+ file_path=str(file_path),
388
+ line_number=1,
389
+ description=f"File has {visitor.import_count} imports - may indicate tight coupling",
390
+ suggestion="Consider breaking file into smaller modules",
391
+ prevention_strategy="modular_design",
392
+ )
393
+ )
394
+
395
+ # Flag specific problematic imports
396
+ for line_no, description in visitor.imports:
397
+ anti_patterns.append(
398
+ AntiPattern(
399
+ pattern_type="import_complexity",
400
+ severity=Priority.LOW,
401
+ file_path=str(file_path),
402
+ line_number=line_no,
403
+ description=description,
404
+ suggestion="Simplify import structure",
405
+ prevention_strategy="clean_imports",
406
+ )
407
+ )
408
+
409
+ return anti_patterns
410
+
411
+ def _should_skip_file(self, file_path: Path) -> bool:
412
+ """Check if file should be skipped in analysis."""
413
+ skip_patterns = [
414
+ "__pycache__",
415
+ ".git",
416
+ ".venv",
417
+ "venv",
418
+ ".tox",
419
+ "build",
420
+ "dist",
421
+ ".pytest_cache",
422
+ "node_modules",
423
+ ]
424
+
425
+ path_str = str(file_path)
426
+ return any(pattern in path_str for pattern in skip_patterns)
427
+
428
+ async def suggest_proactive_refactoring(
429
+ self, anti_patterns: list[AntiPattern]
430
+ ) -> list[Issue]:
431
+ """Convert anti-patterns to proactive refactoring issues."""
432
+ issues = []
433
+
434
+ for anti_pattern in anti_patterns:
435
+ # Map anti-pattern types to issue types
436
+ issue_type_map = {
437
+ "complexity_hotspot": IssueType.COMPLEXITY,
438
+ "code_duplication": IssueType.DRY_VIOLATION,
439
+ "performance_issues": IssueType.PERFORMANCE,
440
+ "security_risks": IssueType.SECURITY,
441
+ "import_complexity": IssueType.IMPORT_ERROR,
442
+ }
443
+
444
+ issue_type = issue_type_map.get(
445
+ anti_pattern.pattern_type, IssueType.FORMATTING
446
+ )
447
+
448
+ issue = Issue(
449
+ id=f"proactive_{anti_pattern.pattern_type}_{hash(anti_pattern.file_path + str(anti_pattern.line_number))}",
450
+ type=issue_type,
451
+ severity=anti_pattern.severity,
452
+ message=f"Proactive: {anti_pattern.description}",
453
+ file_path=anti_pattern.file_path,
454
+ line_number=anti_pattern.line_number,
455
+ details=[
456
+ anti_pattern.suggestion,
457
+ f"Prevention strategy: {anti_pattern.prevention_strategy}",
458
+ ],
459
+ stage="proactive_analysis",
460
+ )
461
+
462
+ issues.append(issue)
463
+
464
+ return issues
465
+
466
+ async def get_cached_solutions(
467
+ self, anti_patterns: list[AntiPattern]
468
+ ) -> dict[str, CachedPattern]:
469
+ """Get cached solutions for detected anti-patterns."""
470
+ solutions = {}
471
+
472
+ for anti_pattern in anti_patterns:
473
+ solution_key = self._generate_solution_key(anti_pattern)
474
+ cached_pattern = self._find_cached_pattern_for_anti_pattern(anti_pattern)
475
+
476
+ if cached_pattern:
477
+ solutions[solution_key] = cached_pattern
478
+
479
+ return solutions
480
+
481
+ def _generate_solution_key(self, anti_pattern: AntiPattern) -> str:
482
+ """Generate unique key for anti-pattern solution."""
483
+ return f"{anti_pattern.pattern_type}_{anti_pattern.file_path}_{anti_pattern.line_number}"
484
+
485
+ def _find_cached_pattern_for_anti_pattern(
486
+ self, anti_pattern: AntiPattern
487
+ ) -> CachedPattern | None:
488
+ """Find cached pattern for a specific anti-pattern."""
489
+ issue_type = self._map_anti_pattern_to_issue_type(anti_pattern.pattern_type)
490
+ if not issue_type:
491
+ return None
492
+
493
+ temp_issue = self._create_temp_issue_for_lookup(anti_pattern, issue_type)
494
+ return self.pattern_cache.get_best_pattern_for_issue(temp_issue)
495
+
496
+ def _map_anti_pattern_to_issue_type(self, pattern_type: str) -> IssueType | None:
497
+ """Map anti-pattern type to issue type."""
498
+ return {
499
+ "complexity_hotspot": IssueType.COMPLEXITY,
500
+ "code_duplication": IssueType.DRY_VIOLATION,
501
+ "performance_issues": IssueType.PERFORMANCE,
502
+ "security_risks": IssueType.SECURITY,
503
+ }.get(pattern_type)
504
+
505
+ def _create_temp_issue_for_lookup(
506
+ self, anti_pattern: AntiPattern, issue_type: IssueType
507
+ ) -> Issue:
508
+ """Create temporary issue for pattern cache lookup."""
509
+ return Issue(
510
+ id="temp",
511
+ type=issue_type,
512
+ severity=anti_pattern.severity,
513
+ message=anti_pattern.description,
514
+ file_path=anti_pattern.file_path,
515
+ )