@voodocs/cli 2.4.0 → 2.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +47 -0
- package/README.md +26 -0
- package/lib/cli/analyze.py +277 -0
- package/lib/darkarts/priority_analyzer/__init__.py +0 -0
- package/lib/darkarts/priority_analyzer/analyzer.py +301 -0
- package/lib/darkarts/priority_analyzer/complexity.py +271 -0
- package/lib/darkarts/priority_analyzer/dependencies.py +275 -0
- package/lib/darkarts/priority_analyzer/security.py +200 -0
- package/lib/darkarts/voodocs_lite_dict.py +216 -0
- package/lib/darkarts/voodocs_lite_dict_v2.py +198 -0
- package/lib/darkarts/voodocs_lite_parser.py +343 -0
- package/package.json +5 -1
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Complexity Analyzer for VooDocs Priority System
|
|
3
|
+
|
|
4
|
+
Analyzes file complexity based on:
|
|
5
|
+
- Lines of code (LOC)
|
|
6
|
+
- Cyclomatic complexity (control flow)
|
|
7
|
+
- Function count
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import re
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Dict
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ComplexityAnalyzer:
|
|
16
|
+
"""Analyzes code complexity for priority scoring."""
|
|
17
|
+
|
|
18
|
+
# Control flow keywords by language
|
|
19
|
+
CONTROL_FLOW_PATTERNS = {
|
|
20
|
+
'if': r'\bif\s*\(',
|
|
21
|
+
'else': r'\belse\b',
|
|
22
|
+
'elif': r'\b(elif|else\s+if)\s*\(',
|
|
23
|
+
'for': r'\bfor\s*\(',
|
|
24
|
+
'while': r'\bwhile\s*\(',
|
|
25
|
+
'switch': r'\bswitch\s*\(',
|
|
26
|
+
'case': r'\bcase\s+',
|
|
27
|
+
'try': r'\btry\s*\{',
|
|
28
|
+
'catch': r'\bcatch\s*\(',
|
|
29
|
+
'except': r'\bexcept\s*:',
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
# Function definition patterns by language
|
|
33
|
+
FUNCTION_PATTERNS = {
|
|
34
|
+
'python': r'^\s*def\s+\w+\s*\(',
|
|
35
|
+
'typescript': r'^\s*(export\s+)?(async\s+)?function\s+\w+\s*\(',
|
|
36
|
+
'javascript': r'^\s*(export\s+)?(async\s+)?function\s+\w+\s*\(',
|
|
37
|
+
'arrow_function': r'^\s*(const|let|var)\s+\w+\s*=\s*(\([^)]*\)|[^=]+)\s*=>',
|
|
38
|
+
'method': r'^\s*(public|private|protected)?\s*(static\s+)?(async\s+)?\w+\s*\(',
|
|
39
|
+
'solidity': r'^\s*function\s+\w+\s*\(',
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
def __init__(self):
|
|
43
|
+
"""Initialize complexity analyzer."""
|
|
44
|
+
pass
|
|
45
|
+
|
|
46
|
+
def analyze_file(self, filepath: str) -> Dict[str, int]:
|
|
47
|
+
"""
|
|
48
|
+
Analyze a file's complexity.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
filepath: Path to file to analyze
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
Dictionary with complexity metrics and scores
|
|
55
|
+
"""
|
|
56
|
+
try:
|
|
57
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
58
|
+
content = f.read()
|
|
59
|
+
except Exception as e:
|
|
60
|
+
return {
|
|
61
|
+
'loc': 0,
|
|
62
|
+
'loc_score': 0,
|
|
63
|
+
'cyclomatic': 0,
|
|
64
|
+
'cyclomatic_score': 0,
|
|
65
|
+
'functions': 0,
|
|
66
|
+
'function_score': 0,
|
|
67
|
+
'total_score': 0,
|
|
68
|
+
'error': str(e)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
# Calculate metrics
|
|
72
|
+
loc = self._count_lines_of_code(content)
|
|
73
|
+
cyclomatic = self._calculate_cyclomatic_complexity(content)
|
|
74
|
+
functions = self._count_functions(content, filepath)
|
|
75
|
+
|
|
76
|
+
# Calculate scores
|
|
77
|
+
loc_score = self._calculate_loc_score(loc)
|
|
78
|
+
cyclomatic_score = self._calculate_cyclomatic_score(cyclomatic)
|
|
79
|
+
function_score = self._calculate_function_score(functions)
|
|
80
|
+
|
|
81
|
+
# Total score (capped at 100)
|
|
82
|
+
total_score = min(100, loc_score + cyclomatic_score + function_score)
|
|
83
|
+
|
|
84
|
+
return {
|
|
85
|
+
'loc': loc,
|
|
86
|
+
'loc_score': loc_score,
|
|
87
|
+
'cyclomatic': cyclomatic,
|
|
88
|
+
'cyclomatic_score': cyclomatic_score,
|
|
89
|
+
'functions': functions,
|
|
90
|
+
'function_score': function_score,
|
|
91
|
+
'total_score': total_score
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
def _count_lines_of_code(self, content: str) -> int:
|
|
95
|
+
"""
|
|
96
|
+
Count non-empty, non-comment lines of code.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
content: File content
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
Line count
|
|
103
|
+
"""
|
|
104
|
+
lines = content.split('\n')
|
|
105
|
+
loc = 0
|
|
106
|
+
|
|
107
|
+
in_multiline_comment = False
|
|
108
|
+
|
|
109
|
+
for line in lines:
|
|
110
|
+
stripped = line.strip()
|
|
111
|
+
|
|
112
|
+
# Skip empty lines
|
|
113
|
+
if not stripped:
|
|
114
|
+
continue
|
|
115
|
+
|
|
116
|
+
# Handle multi-line comments
|
|
117
|
+
if '/*' in stripped:
|
|
118
|
+
in_multiline_comment = True
|
|
119
|
+
if '*/' in stripped:
|
|
120
|
+
in_multiline_comment = False
|
|
121
|
+
continue
|
|
122
|
+
if in_multiline_comment:
|
|
123
|
+
continue
|
|
124
|
+
|
|
125
|
+
# Skip single-line comments
|
|
126
|
+
if stripped.startswith('//') or stripped.startswith('#'):
|
|
127
|
+
continue
|
|
128
|
+
|
|
129
|
+
# Count as LOC
|
|
130
|
+
loc += 1
|
|
131
|
+
|
|
132
|
+
return loc
|
|
133
|
+
|
|
134
|
+
def _calculate_cyclomatic_complexity(self, content: str) -> int:
|
|
135
|
+
"""
|
|
136
|
+
Calculate cyclomatic complexity by counting control flow statements.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
content: File content
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Cyclomatic complexity score
|
|
143
|
+
"""
|
|
144
|
+
complexity = 0
|
|
145
|
+
|
|
146
|
+
# Count each type of control flow
|
|
147
|
+
for keyword, pattern in self.CONTROL_FLOW_PATTERNS.items():
|
|
148
|
+
matches = re.findall(pattern, content, re.MULTILINE)
|
|
149
|
+
count = len(matches)
|
|
150
|
+
|
|
151
|
+
# Weight different constructs
|
|
152
|
+
if keyword in ['if', 'elif', 'else']:
|
|
153
|
+
complexity += count * 2
|
|
154
|
+
elif keyword in ['for', 'while']:
|
|
155
|
+
complexity += count * 3
|
|
156
|
+
elif keyword in ['try', 'catch', 'except']:
|
|
157
|
+
complexity += count * 3
|
|
158
|
+
elif keyword in ['switch', 'case']:
|
|
159
|
+
complexity += count * 2
|
|
160
|
+
else:
|
|
161
|
+
complexity += count
|
|
162
|
+
|
|
163
|
+
# Count nesting depth (rough estimate)
|
|
164
|
+
nesting_depth = self._estimate_nesting_depth(content)
|
|
165
|
+
complexity += nesting_depth
|
|
166
|
+
|
|
167
|
+
return complexity
|
|
168
|
+
|
|
169
|
+
def _estimate_nesting_depth(self, content: str) -> int:
|
|
170
|
+
"""
|
|
171
|
+
Estimate maximum nesting depth by counting indentation.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
content: File content
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Estimated nesting depth
|
|
178
|
+
"""
|
|
179
|
+
max_depth = 0
|
|
180
|
+
lines = content.split('\n')
|
|
181
|
+
|
|
182
|
+
for line in lines:
|
|
183
|
+
if not line.strip():
|
|
184
|
+
continue
|
|
185
|
+
|
|
186
|
+
# Count leading spaces/tabs
|
|
187
|
+
leading_spaces = len(line) - len(line.lstrip())
|
|
188
|
+
|
|
189
|
+
# Assume 2 or 4 spaces per indent level
|
|
190
|
+
if '\t' in line[:leading_spaces]:
|
|
191
|
+
depth = line[:leading_spaces].count('\t')
|
|
192
|
+
else:
|
|
193
|
+
# Try 4 spaces first, then 2
|
|
194
|
+
depth = leading_spaces // 4
|
|
195
|
+
if depth == 0 and leading_spaces >= 2:
|
|
196
|
+
depth = leading_spaces // 2
|
|
197
|
+
|
|
198
|
+
max_depth = max(max_depth, depth)
|
|
199
|
+
|
|
200
|
+
return max_depth
|
|
201
|
+
|
|
202
|
+
def _count_functions(self, content: str, filepath: str) -> int:
|
|
203
|
+
"""
|
|
204
|
+
Count function definitions in file.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
content: File content
|
|
208
|
+
filepath: File path (to determine language)
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
Function count
|
|
212
|
+
"""
|
|
213
|
+
function_count = 0
|
|
214
|
+
|
|
215
|
+
# Determine language from extension
|
|
216
|
+
ext = Path(filepath).suffix.lower()
|
|
217
|
+
|
|
218
|
+
# Apply appropriate patterns
|
|
219
|
+
if ext in ['.py']:
|
|
220
|
+
pattern = self.FUNCTION_PATTERNS['python']
|
|
221
|
+
function_count += len(re.findall(pattern, content, re.MULTILINE))
|
|
222
|
+
|
|
223
|
+
elif ext in ['.ts', '.tsx']:
|
|
224
|
+
# TypeScript: regular functions + arrow functions + methods
|
|
225
|
+
function_count += len(re.findall(self.FUNCTION_PATTERNS['typescript'], content, re.MULTILINE))
|
|
226
|
+
function_count += len(re.findall(self.FUNCTION_PATTERNS['arrow_function'], content, re.MULTILINE))
|
|
227
|
+
function_count += len(re.findall(self.FUNCTION_PATTERNS['method'], content, re.MULTILINE))
|
|
228
|
+
|
|
229
|
+
elif ext in ['.js', '.jsx']:
|
|
230
|
+
# JavaScript: regular functions + arrow functions
|
|
231
|
+
function_count += len(re.findall(self.FUNCTION_PATTERNS['javascript'], content, re.MULTILINE))
|
|
232
|
+
function_count += len(re.findall(self.FUNCTION_PATTERNS['arrow_function'], content, re.MULTILINE))
|
|
233
|
+
|
|
234
|
+
elif ext in ['.sol']:
|
|
235
|
+
# Solidity: function definitions
|
|
236
|
+
function_count += len(re.findall(self.FUNCTION_PATTERNS['solidity'], content, re.MULTILINE))
|
|
237
|
+
|
|
238
|
+
return function_count
|
|
239
|
+
|
|
240
|
+
def _calculate_loc_score(self, loc: int) -> int:
|
|
241
|
+
"""Calculate score based on lines of code."""
|
|
242
|
+
if loc < 50:
|
|
243
|
+
return 0
|
|
244
|
+
elif loc < 100:
|
|
245
|
+
return 10
|
|
246
|
+
elif loc < 200:
|
|
247
|
+
return 20
|
|
248
|
+
elif loc < 500:
|
|
249
|
+
return 40
|
|
250
|
+
elif loc < 1000:
|
|
251
|
+
return 60
|
|
252
|
+
else:
|
|
253
|
+
return 80
|
|
254
|
+
|
|
255
|
+
def _calculate_cyclomatic_score(self, cyclomatic: int) -> int:
|
|
256
|
+
"""Calculate score based on cyclomatic complexity."""
|
|
257
|
+
# Cap at 100
|
|
258
|
+
return min(100, cyclomatic)
|
|
259
|
+
|
|
260
|
+
def _calculate_function_score(self, functions: int) -> int:
|
|
261
|
+
"""Calculate score based on function count."""
|
|
262
|
+
if functions <= 5:
|
|
263
|
+
return 0
|
|
264
|
+
elif functions <= 10:
|
|
265
|
+
return 5
|
|
266
|
+
elif functions <= 20:
|
|
267
|
+
return 10
|
|
268
|
+
elif functions <= 50:
|
|
269
|
+
return 20
|
|
270
|
+
else:
|
|
271
|
+
return 30
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Dependency Analyzer for VooDocs Priority System
|
|
3
|
+
|
|
4
|
+
Analyzes import/dependency relationships between files.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import re
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Dict, List, Set
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class DependencyAnalyzer:
|
|
13
|
+
"""Analyzes file dependencies for priority scoring."""
|
|
14
|
+
|
|
15
|
+
# Import patterns by language
|
|
16
|
+
IMPORT_PATTERNS = {
|
|
17
|
+
'python': [
|
|
18
|
+
r'^\s*import\s+([^\s]+)',
|
|
19
|
+
r'^\s*from\s+([^\s]+)\s+import',
|
|
20
|
+
],
|
|
21
|
+
'typescript': [
|
|
22
|
+
r'^\s*import\s+.*\s+from\s+["\']([^"\']+)["\']',
|
|
23
|
+
r'^\s*import\s+["\']([^"\']+)["\']',
|
|
24
|
+
r'^\s*export\s+.*\s+from\s+["\']([^"\']+)["\']',
|
|
25
|
+
],
|
|
26
|
+
'javascript': [
|
|
27
|
+
r'^\s*import\s+.*\s+from\s+["\']([^"\']+)["\']',
|
|
28
|
+
r'^\s*import\s+["\']([^"\']+)["\']',
|
|
29
|
+
r'^\s*const\s+.*\s*=\s*require\(["\']([^"\']+)["\']\)',
|
|
30
|
+
r'^\s*export\s+.*\s+from\s+["\']([^"\']+)["\']',
|
|
31
|
+
],
|
|
32
|
+
'solidity': [
|
|
33
|
+
r'^\s*import\s+["\']([^"\']+)["\']',
|
|
34
|
+
],
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
def __init__(self, project_root: str = None):
|
|
38
|
+
"""
|
|
39
|
+
Initialize dependency analyzer.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
project_root: Root directory of the project
|
|
43
|
+
"""
|
|
44
|
+
self.project_root = Path(project_root) if project_root else None
|
|
45
|
+
self.file_cache = {} # Cache for file analysis
|
|
46
|
+
|
|
47
|
+
def analyze_file(self, filepath: str, all_files: List[str] = None) -> Dict:
|
|
48
|
+
"""
|
|
49
|
+
Analyze a file's dependencies.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
filepath: Path to file to analyze
|
|
53
|
+
all_files: List of all files in project (for dependent analysis)
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
Dictionary with dependency analysis results
|
|
57
|
+
"""
|
|
58
|
+
try:
|
|
59
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
60
|
+
content = f.read()
|
|
61
|
+
except Exception as e:
|
|
62
|
+
return {
|
|
63
|
+
'imports': [],
|
|
64
|
+
'import_count': 0,
|
|
65
|
+
'dependents': [],
|
|
66
|
+
'dependent_count': 0,
|
|
67
|
+
'depends_score': 0,
|
|
68
|
+
'depended_score': 0,
|
|
69
|
+
'total_score': 0,
|
|
70
|
+
'error': str(e)
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# Find imports (what this file depends on)
|
|
74
|
+
imports = self._find_imports(content, filepath)
|
|
75
|
+
|
|
76
|
+
# Find dependents (what depends on this file)
|
|
77
|
+
dependents = []
|
|
78
|
+
if all_files:
|
|
79
|
+
dependents = self._find_dependents(filepath, all_files)
|
|
80
|
+
|
|
81
|
+
# Calculate scores
|
|
82
|
+
depends_score = min(50, len(imports) * 2)
|
|
83
|
+
depended_score = min(50, len(dependents) * 5)
|
|
84
|
+
total_score = depends_score + depended_score
|
|
85
|
+
|
|
86
|
+
return {
|
|
87
|
+
'imports': imports,
|
|
88
|
+
'import_count': len(imports),
|
|
89
|
+
'dependents': dependents,
|
|
90
|
+
'dependent_count': len(dependents),
|
|
91
|
+
'depends_score': depends_score,
|
|
92
|
+
'depended_score': depended_score,
|
|
93
|
+
'total_score': total_score
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
def _find_imports(self, content: str, filepath: str) -> List[str]:
|
|
97
|
+
"""
|
|
98
|
+
Find all imports in a file.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
content: File content
|
|
102
|
+
filepath: File path (to determine language)
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
List of imported module/file paths
|
|
106
|
+
"""
|
|
107
|
+
imports = set()
|
|
108
|
+
|
|
109
|
+
# Determine language from extension
|
|
110
|
+
ext = Path(filepath).suffix.lower()
|
|
111
|
+
|
|
112
|
+
# Select appropriate patterns
|
|
113
|
+
patterns = []
|
|
114
|
+
if ext in ['.py']:
|
|
115
|
+
patterns = self.IMPORT_PATTERNS['python']
|
|
116
|
+
elif ext in ['.ts', '.tsx']:
|
|
117
|
+
patterns = self.IMPORT_PATTERNS['typescript']
|
|
118
|
+
elif ext in ['.js', '.jsx']:
|
|
119
|
+
patterns = self.IMPORT_PATTERNS['javascript']
|
|
120
|
+
elif ext in ['.sol']:
|
|
121
|
+
patterns = self.IMPORT_PATTERNS['solidity']
|
|
122
|
+
|
|
123
|
+
# Find all imports
|
|
124
|
+
for pattern in patterns:
|
|
125
|
+
matches = re.findall(pattern, content, re.MULTILINE)
|
|
126
|
+
imports.update(matches)
|
|
127
|
+
|
|
128
|
+
# Filter out external packages (keep only relative imports)
|
|
129
|
+
local_imports = []
|
|
130
|
+
for imp in imports:
|
|
131
|
+
# Relative imports start with . or /
|
|
132
|
+
if imp.startswith('.') or imp.startswith('/'):
|
|
133
|
+
local_imports.append(imp)
|
|
134
|
+
# Or are within the project structure
|
|
135
|
+
elif self.project_root and not imp.startswith('@') and '/' in imp:
|
|
136
|
+
local_imports.append(imp)
|
|
137
|
+
|
|
138
|
+
return sorted(list(local_imports))
|
|
139
|
+
|
|
140
|
+
def _find_dependents(self, filepath: str, all_files: List[str]) -> List[str]:
|
|
141
|
+
"""
|
|
142
|
+
Find all files that import/depend on this file.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
filepath: Path to file
|
|
146
|
+
all_files: List of all files in project
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
List of files that depend on this file
|
|
150
|
+
"""
|
|
151
|
+
dependents = []
|
|
152
|
+
|
|
153
|
+
# Convert filepath to relative path for matching
|
|
154
|
+
file_path = Path(filepath)
|
|
155
|
+
|
|
156
|
+
# Check each file for imports of this file
|
|
157
|
+
for other_file in all_files:
|
|
158
|
+
if other_file == filepath:
|
|
159
|
+
continue
|
|
160
|
+
|
|
161
|
+
# Get imports from other file (use cache if available)
|
|
162
|
+
if other_file not in self.file_cache:
|
|
163
|
+
try:
|
|
164
|
+
with open(other_file, 'r', encoding='utf-8') as f:
|
|
165
|
+
content = f.read()
|
|
166
|
+
imports = self._find_imports(content, other_file)
|
|
167
|
+
self.file_cache[other_file] = imports
|
|
168
|
+
except:
|
|
169
|
+
self.file_cache[other_file] = []
|
|
170
|
+
|
|
171
|
+
imports = self.file_cache[other_file]
|
|
172
|
+
|
|
173
|
+
# Check if any import matches this file
|
|
174
|
+
for imp in imports:
|
|
175
|
+
if self._import_matches_file(imp, filepath, other_file):
|
|
176
|
+
dependents.append(other_file)
|
|
177
|
+
break
|
|
178
|
+
|
|
179
|
+
return sorted(dependents)
|
|
180
|
+
|
|
181
|
+
def _import_matches_file(self, import_path: str, target_file: str, source_file: str) -> bool:
|
|
182
|
+
"""
|
|
183
|
+
Check if an import path matches a target file.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
import_path: Import statement path
|
|
187
|
+
target_file: File to check against
|
|
188
|
+
source_file: File containing the import
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
True if import matches target file
|
|
192
|
+
"""
|
|
193
|
+
# Convert to Path objects
|
|
194
|
+
target = Path(target_file)
|
|
195
|
+
source = Path(source_file)
|
|
196
|
+
|
|
197
|
+
# Handle relative imports
|
|
198
|
+
if import_path.startswith('.'):
|
|
199
|
+
# Resolve relative to source file's directory
|
|
200
|
+
resolved = (source.parent / import_path).resolve()
|
|
201
|
+
|
|
202
|
+
# Try with and without extension
|
|
203
|
+
if resolved == target.resolve():
|
|
204
|
+
return True
|
|
205
|
+
|
|
206
|
+
# Try adding common extensions
|
|
207
|
+
for ext in ['.ts', '.tsx', '.js', '.jsx', '.py', '.sol']:
|
|
208
|
+
if (resolved.parent / (resolved.name + ext)).resolve() == target.resolve():
|
|
209
|
+
return True
|
|
210
|
+
|
|
211
|
+
# Try index files
|
|
212
|
+
for index_name in ['index.ts', 'index.tsx', 'index.js', 'index.jsx', '__init__.py']:
|
|
213
|
+
if (resolved / index_name).resolve() == target.resolve():
|
|
214
|
+
return True
|
|
215
|
+
|
|
216
|
+
# Handle absolute imports (check if target path ends with import path)
|
|
217
|
+
else:
|
|
218
|
+
target_str = str(target.resolve())
|
|
219
|
+
if import_path.replace('/', Path('/').as_posix()) in target_str:
|
|
220
|
+
return True
|
|
221
|
+
|
|
222
|
+
return False
|
|
223
|
+
|
|
224
|
+
def get_dependency_reasons(self, analysis: Dict) -> List[str]:
|
|
225
|
+
"""
|
|
226
|
+
Generate human-readable reasons for dependency score.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
analysis: Dependency analysis results
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
List of reason strings
|
|
233
|
+
"""
|
|
234
|
+
reasons = []
|
|
235
|
+
|
|
236
|
+
if analysis['dependent_count'] > 0:
|
|
237
|
+
reasons.append(f"Depended upon by {analysis['dependent_count']} file(s)")
|
|
238
|
+
|
|
239
|
+
if analysis['import_count'] > 10:
|
|
240
|
+
reasons.append(f"High dependency count ({analysis['import_count']} imports)")
|
|
241
|
+
elif analysis['import_count'] > 5:
|
|
242
|
+
reasons.append(f"Medium dependency count ({analysis['import_count']} imports)")
|
|
243
|
+
|
|
244
|
+
if analysis['dependent_count'] >= 10:
|
|
245
|
+
reasons.append("Critical shared module")
|
|
246
|
+
elif analysis['dependent_count'] >= 5:
|
|
247
|
+
reasons.append("Widely-used module")
|
|
248
|
+
|
|
249
|
+
return reasons
|
|
250
|
+
|
|
251
|
+
def get_dependency_suggestions(self, analysis: Dict) -> List[str]:
|
|
252
|
+
"""
|
|
253
|
+
Generate dependency-specific suggestions.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
analysis: Dependency analysis results
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
List of suggestion strings
|
|
260
|
+
"""
|
|
261
|
+
suggestions = []
|
|
262
|
+
|
|
263
|
+
if analysis['dependent_count'] >= 5:
|
|
264
|
+
suggestions.append("Document public API and contracts")
|
|
265
|
+
suggestions.append("Add usage examples for common patterns")
|
|
266
|
+
|
|
267
|
+
if analysis['import_count'] > 10:
|
|
268
|
+
suggestions.append("Document key dependencies and their roles")
|
|
269
|
+
suggestions.append("Consider documenting dependency graph")
|
|
270
|
+
|
|
271
|
+
if analysis['dependent_count'] >= 10:
|
|
272
|
+
suggestions.append("Critical module - prioritize comprehensive documentation")
|
|
273
|
+
suggestions.append("Add integration examples")
|
|
274
|
+
|
|
275
|
+
return suggestions
|