vcode-analysis 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- analyzers/__init__.py +24 -0
- analyzers/architecture.py +510 -0
- analyzers/code_review.py +150 -0
- analyzers/directory.py +867 -0
- analyzers/documentation.py +209 -0
- analyzers/security.py +671 -0
- core/__init__.py +17 -0
- core/analyzer.py +207 -0
- core/config.py +166 -0
- core/git_handler.py +718 -0
- core/llm_client.py +186 -0
- parsers/__init__.py +209 -0
- parsers/c/__init__.py +57 -0
- parsers/c/ast_parser.py +424 -0
- parsers/c/models.py +211 -0
- parsers/c/patterns.py +143 -0
- parsers/c/regex_parser.py +594 -0
- parsers/c_parser.py +275 -0
- parsers/java_parser.py +430 -0
- parsers/javascript_parser.py +587 -0
- parsers/kotlin/__init__.py +61 -0
- parsers/kotlin/ast_parser.py +591 -0
- parsers/kotlin/models.py +274 -0
- parsers/kotlin/patterns.py +146 -0
- parsers/kotlin/regex_parser.py +906 -0
- parsers/kotlin_parser.py +279 -0
- parsers/python_parser.py +429 -0
- parsers/typescript_parser.py +381 -0
- vcode_analysis-0.1.0.dist-info/METADATA +246 -0
- vcode_analysis-0.1.0.dist-info/RECORD +34 -0
- vcode_analysis-0.1.0.dist-info/WHEEL +5 -0
- vcode_analysis-0.1.0.dist-info/entry_points.txt +2 -0
- vcode_analysis-0.1.0.dist-info/licenses/LICENSE +21 -0
- vcode_analysis-0.1.0.dist-info/top_level.txt +3 -0
analyzers/__init__.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""分析器模块"""
|
|
2
|
+
|
|
3
|
+
from .code_review import CodeReviewAnalyzer, CodeReviewResult
|
|
4
|
+
from .documentation import DocumentationAnalyzer, DocumentationResult
|
|
5
|
+
from .architecture import ArchitectureAnalyzer, ArchitectureResult, ModuleInfo, DependencyInfo
|
|
6
|
+
from .security import SecurityAnalyzer, SecurityResult, SecurityIssue
|
|
7
|
+
from .directory import DirectoryAnalyzer, DirectoryAnalysisResult, ProjectInfo
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"CodeReviewAnalyzer",
|
|
11
|
+
"CodeReviewResult",
|
|
12
|
+
"DocumentationAnalyzer",
|
|
13
|
+
"DocumentationResult",
|
|
14
|
+
"ArchitectureAnalyzer",
|
|
15
|
+
"ArchitectureResult",
|
|
16
|
+
"ModuleInfo",
|
|
17
|
+
"DependencyInfo",
|
|
18
|
+
"SecurityAnalyzer",
|
|
19
|
+
"SecurityResult",
|
|
20
|
+
"SecurityIssue",
|
|
21
|
+
"DirectoryAnalyzer",
|
|
22
|
+
"DirectoryAnalysisResult",
|
|
23
|
+
"ProjectInfo",
|
|
24
|
+
]
|
|
@@ -0,0 +1,510 @@
|
|
|
1
|
+
"""架构分析器
|
|
2
|
+
|
|
3
|
+
分析项目结构、依赖关系、模块耦合度
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from typing import Optional
|
|
8
|
+
from collections import defaultdict
|
|
9
|
+
import re
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from core.analyzer import Analyzer, FileInfo, AnalysisResult
|
|
13
|
+
|
|
14
|
+
# Python AST 解析器(可选依赖)
|
|
15
|
+
try:
|
|
16
|
+
from parsers.python_parser import PythonASTParser
|
|
17
|
+
PYTHON_AST_AVAILABLE = True
|
|
18
|
+
except ImportError:
|
|
19
|
+
PYTHON_AST_AVAILABLE = False
|
|
20
|
+
|
|
21
|
+
# JavaScript AST 解析器(可选依赖)
|
|
22
|
+
try:
|
|
23
|
+
from parsers.javascript_parser import JavaScriptASTParser
|
|
24
|
+
JS_AST_AVAILABLE = True
|
|
25
|
+
except ImportError:
|
|
26
|
+
JS_AST_AVAILABLE = False
|
|
27
|
+
|
|
28
|
+
# TypeScript AST 解析器(可选依赖)
|
|
29
|
+
try:
|
|
30
|
+
from parsers.typescript_parser import TypeScriptASTParser
|
|
31
|
+
TS_AST_AVAILABLE = True
|
|
32
|
+
except ImportError:
|
|
33
|
+
TS_AST_AVAILABLE = False
|
|
34
|
+
|
|
35
|
+
# Java AST 解析器(可选依赖)
|
|
36
|
+
try:
|
|
37
|
+
from parsers.java_parser import JavaASTParser
|
|
38
|
+
JAVA_AST_AVAILABLE = True
|
|
39
|
+
except ImportError:
|
|
40
|
+
JAVA_AST_AVAILABLE = False
|
|
41
|
+
|
|
42
|
+
# Kotlin AST 解析器(可选依赖)
|
|
43
|
+
try:
|
|
44
|
+
from parsers.kotlin_parser import KotlinASTParser
|
|
45
|
+
KOTLIN_AST_AVAILABLE = True
|
|
46
|
+
except ImportError:
|
|
47
|
+
KOTLIN_AST_AVAILABLE = False
|
|
48
|
+
|
|
49
|
+
# C AST 解析器(可选依赖)
|
|
50
|
+
try:
|
|
51
|
+
from parsers.c_parser import CASTParser
|
|
52
|
+
C_AST_AVAILABLE = True
|
|
53
|
+
except ImportError:
|
|
54
|
+
C_AST_AVAILABLE = False
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class DependencyInfo:
|
|
59
|
+
"""依赖信息"""
|
|
60
|
+
source: str
|
|
61
|
+
target: str
|
|
62
|
+
import_type: str # import, from_import, require, include
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class ModuleInfo:
|
|
67
|
+
"""模块信息"""
|
|
68
|
+
path: str
|
|
69
|
+
language: str
|
|
70
|
+
imports: list[str]
|
|
71
|
+
exports: list[str]
|
|
72
|
+
lines: int
|
|
73
|
+
classes: int
|
|
74
|
+
functions: int
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
@dataclass
|
|
78
|
+
class ArchitectureResult:
|
|
79
|
+
"""架构分析结果"""
|
|
80
|
+
total_files: int
|
|
81
|
+
total_lines: int
|
|
82
|
+
languages: dict[str, int]
|
|
83
|
+
modules: list[ModuleInfo]
|
|
84
|
+
dependencies: list[DependencyInfo]
|
|
85
|
+
circular_dependencies: list[tuple[str, str]]
|
|
86
|
+
coupling_score: float # 0-100, 越低越好
|
|
87
|
+
cohesion_score: float # 0-100, 越高越好
|
|
88
|
+
summary: str
|
|
89
|
+
suggestions: list[str]
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
# 架构分析提示词
|
|
93
|
+
ARCH_ANALYSIS_PROMPT = """分析以下项目的架构信息并提供改进建议:
|
|
94
|
+
|
|
95
|
+
项目统计:
|
|
96
|
+
- 文件数: {total_files}
|
|
97
|
+
- 代码行数: {total_lines}
|
|
98
|
+
- 语言分布: {languages}
|
|
99
|
+
- 模块数: {modules_count}
|
|
100
|
+
- 循环依赖: {circular_deps}
|
|
101
|
+
|
|
102
|
+
模块列表:
|
|
103
|
+
{modules_summary}
|
|
104
|
+
|
|
105
|
+
请分析项目的架构质量,包括:
|
|
106
|
+
1. 模块化程度评价
|
|
107
|
+
2. 依赖关系分析
|
|
108
|
+
3. 潜在的架构问题
|
|
109
|
+
4. 改进建议
|
|
110
|
+
|
|
111
|
+
以 JSON 格式输出:
|
|
112
|
+
{{"summary": "架构评价", "suggestions": ["建议1", "建议2"]}}"""
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
# 导入语句正则表达式
|
|
116
|
+
IMPORT_PATTERNS = {
|
|
117
|
+
"Python": [
|
|
118
|
+
r'^import\s+(\S+)',
|
|
119
|
+
r'^from\s+(\S+)\s+import',
|
|
120
|
+
],
|
|
121
|
+
"JavaScript": [
|
|
122
|
+
r'^import\s+.*from\s+[\'"]([^\'"]+)[\'"]',
|
|
123
|
+
r'^require\([\'"]([^\'"]+)[\'"]\)',
|
|
124
|
+
],
|
|
125
|
+
"TypeScript": [
|
|
126
|
+
r'^import\s+.*from\s+[\'"]([^\'"]+)[\'"]',
|
|
127
|
+
r'^import\s+[\'"]([^\'"]+)[\'"]',
|
|
128
|
+
],
|
|
129
|
+
"Java": [
|
|
130
|
+
r'^import\s+([\w\.]+);',
|
|
131
|
+
],
|
|
132
|
+
"Kotlin": [
|
|
133
|
+
r'^import\s+([\w\.]+)',
|
|
134
|
+
],
|
|
135
|
+
"C": [
|
|
136
|
+
r'^#include\s*<([^>]+)>',
|
|
137
|
+
r'^#include\s*"([^"]+)"',
|
|
138
|
+
],
|
|
139
|
+
"C++": [
|
|
140
|
+
r'^#include\s*<([^>]+)>',
|
|
141
|
+
r'^#include\s*"([^"]+)"',
|
|
142
|
+
],
|
|
143
|
+
"Go": [
|
|
144
|
+
r'^import\s+"([^"]+)"',
|
|
145
|
+
r'^import\s+\([^)]*\)',
|
|
146
|
+
],
|
|
147
|
+
"Rust": [
|
|
148
|
+
r'^use\s+([\w:]+)',
|
|
149
|
+
],
|
|
150
|
+
"Ruby": [
|
|
151
|
+
r'^require\s+[\'"]([^\'"]+)[\'"]',
|
|
152
|
+
r'^require_relative\s+[\'"]([^\'"]+)[\'"]',
|
|
153
|
+
],
|
|
154
|
+
"PHP": [
|
|
155
|
+
r'^use\s+([\w\\]+);',
|
|
156
|
+
r'^require(?:_once)?\s+[\'"]([^\'"]+)[\'"]',
|
|
157
|
+
],
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class ArchitectureAnalyzer:
|
|
162
|
+
"""架构分析器"""
|
|
163
|
+
|
|
164
|
+
def __init__(self, analyzer: Analyzer):
|
|
165
|
+
self.analyzer = analyzer
|
|
166
|
+
self.modules: list[ModuleInfo] = []
|
|
167
|
+
self.dependencies: list[DependencyInfo] = []
|
|
168
|
+
|
|
169
|
+
def _detect_imports(self, content: str, language: str, file_path: str = "") -> list[str]:
|
|
170
|
+
"""检测文件中的导入语句"""
|
|
171
|
+
imports = []
|
|
172
|
+
|
|
173
|
+
# Python 文件优先使用 AST 解析器
|
|
174
|
+
if language == "Python" and PYTHON_AST_AVAILABLE and file_path:
|
|
175
|
+
try:
|
|
176
|
+
parser = PythonASTParser()
|
|
177
|
+
result = parser.parse_code(content, file_path)
|
|
178
|
+
if result.success:
|
|
179
|
+
for imp in result.imports:
|
|
180
|
+
if imp.is_from and imp.module:
|
|
181
|
+
imports.append(imp.module)
|
|
182
|
+
else:
|
|
183
|
+
imports.extend(imp.names)
|
|
184
|
+
return imports
|
|
185
|
+
except Exception:
|
|
186
|
+
pass # 回退到正则表达式
|
|
187
|
+
|
|
188
|
+
# JavaScript 文件使用 AST 解析器
|
|
189
|
+
if language == "JavaScript" and JS_AST_AVAILABLE and file_path:
|
|
190
|
+
try:
|
|
191
|
+
parser = JavaScriptASTParser()
|
|
192
|
+
result = parser.parse_code(content, file_path)
|
|
193
|
+
if result.success:
|
|
194
|
+
for imp in result.imports:
|
|
195
|
+
imports.append(imp.source)
|
|
196
|
+
return imports
|
|
197
|
+
except Exception:
|
|
198
|
+
pass # 回退到正则表达式
|
|
199
|
+
|
|
200
|
+
# TypeScript 文件使用 AST 解析器
|
|
201
|
+
if language == "TypeScript" and TS_AST_AVAILABLE and file_path:
|
|
202
|
+
try:
|
|
203
|
+
parser = TypeScriptASTParser()
|
|
204
|
+
result = parser.parse_code(content, file_path)
|
|
205
|
+
if result.success:
|
|
206
|
+
for imp in result.imports:
|
|
207
|
+
imports.append(imp.source)
|
|
208
|
+
return imports
|
|
209
|
+
except Exception:
|
|
210
|
+
pass # 回退到正则表达式
|
|
211
|
+
|
|
212
|
+
# Java 文件使用 AST 解析器
|
|
213
|
+
if language == "Java" and JAVA_AST_AVAILABLE and file_path:
|
|
214
|
+
try:
|
|
215
|
+
parser = JavaASTParser()
|
|
216
|
+
result = parser.parse_code(content, file_path)
|
|
217
|
+
if result.success:
|
|
218
|
+
for imp in result.imports:
|
|
219
|
+
imports.append(imp.path)
|
|
220
|
+
return imports
|
|
221
|
+
except Exception:
|
|
222
|
+
pass # 回退到正则表达式
|
|
223
|
+
|
|
224
|
+
# 其他语言或 AST 解析失败时使用正则表达式
|
|
225
|
+
patterns = IMPORT_PATTERNS.get(language, [])
|
|
226
|
+
for line in content.split('\n'):
|
|
227
|
+
line = line.strip()
|
|
228
|
+
for pattern in patterns:
|
|
229
|
+
match = re.match(pattern, line)
|
|
230
|
+
if match:
|
|
231
|
+
imports.append(match.group(1))
|
|
232
|
+
|
|
233
|
+
return imports
|
|
234
|
+
|
|
235
|
+
def _count_structures(self, content: str, language: str, file_path: str = "") -> dict:
|
|
236
|
+
"""统计代码结构"""
|
|
237
|
+
result = {"classes": 0, "functions": 0, "docstring_coverage": 0.0, "type_hint_coverage": 0.0}
|
|
238
|
+
|
|
239
|
+
# Python 文件优先使用 AST 解析器
|
|
240
|
+
if language == "Python" and PYTHON_AST_AVAILABLE and file_path:
|
|
241
|
+
try:
|
|
242
|
+
parser = PythonASTParser()
|
|
243
|
+
ast_result = parser.parse_code(content, file_path)
|
|
244
|
+
if ast_result.success:
|
|
245
|
+
result["classes"] = len(ast_result.classes)
|
|
246
|
+
result["functions"] = len(ast_result.functions)
|
|
247
|
+
# 统计类中的方法
|
|
248
|
+
for cls in ast_result.classes:
|
|
249
|
+
result["functions"] += len(cls.methods)
|
|
250
|
+
result["docstring_coverage"] = ast_result.docstring_coverage
|
|
251
|
+
result["type_hint_coverage"] = ast_result.type_hint_coverage
|
|
252
|
+
return result
|
|
253
|
+
except Exception:
|
|
254
|
+
pass # 回退到正则表达式
|
|
255
|
+
|
|
256
|
+
# JavaScript 文件使用 AST 解析器
|
|
257
|
+
if language == "JavaScript" and JS_AST_AVAILABLE and file_path:
|
|
258
|
+
try:
|
|
259
|
+
parser = JavaScriptASTParser()
|
|
260
|
+
ast_result = parser.parse_code(content, file_path)
|
|
261
|
+
if ast_result.success:
|
|
262
|
+
result["classes"] = len(ast_result.classes)
|
|
263
|
+
result["functions"] = len(ast_result.functions)
|
|
264
|
+
# 统计类中的方法
|
|
265
|
+
for cls in ast_result.classes:
|
|
266
|
+
result["functions"] += len(cls.methods)
|
|
267
|
+
return result
|
|
268
|
+
except Exception:
|
|
269
|
+
pass # 回退到正则表达式
|
|
270
|
+
|
|
271
|
+
# TypeScript 文件使用 AST 解析器
|
|
272
|
+
if language == "TypeScript" and TS_AST_AVAILABLE and file_path:
|
|
273
|
+
try:
|
|
274
|
+
parser = TypeScriptASTParser()
|
|
275
|
+
ast_result = parser.parse_code(content, file_path)
|
|
276
|
+
if ast_result.success:
|
|
277
|
+
result["classes"] = len(ast_result.classes)
|
|
278
|
+
result["functions"] = len(ast_result.functions)
|
|
279
|
+
# 统计类中的方法
|
|
280
|
+
for cls in ast_result.classes:
|
|
281
|
+
result["functions"] += len(cls.methods)
|
|
282
|
+
# 统计 TypeScript 特有结构
|
|
283
|
+
result["interfaces"] = len(ast_result.interfaces)
|
|
284
|
+
result["types"] = len(ast_result.types)
|
|
285
|
+
result["enums"] = len(ast_result.enums)
|
|
286
|
+
return result
|
|
287
|
+
except Exception:
|
|
288
|
+
pass # 回退到正则表达式
|
|
289
|
+
|
|
290
|
+
# Java 文件使用 AST 解析器
|
|
291
|
+
if language == "Java" and JAVA_AST_AVAILABLE and file_path:
|
|
292
|
+
try:
|
|
293
|
+
parser = JavaASTParser()
|
|
294
|
+
ast_result = parser.parse_code(content, file_path)
|
|
295
|
+
if ast_result.success:
|
|
296
|
+
result["classes"] = len(ast_result.classes) + len(ast_result.interfaces) + len(ast_result.enums)
|
|
297
|
+
result["functions"] = 0
|
|
298
|
+
# 统计类中的方法
|
|
299
|
+
for cls in ast_result.classes:
|
|
300
|
+
result["functions"] += len(cls.methods)
|
|
301
|
+
for iface in ast_result.interfaces:
|
|
302
|
+
result["functions"] += len(iface.methods)
|
|
303
|
+
for enum in ast_result.enums:
|
|
304
|
+
result["functions"] += len(enum.methods)
|
|
305
|
+
return result
|
|
306
|
+
except Exception:
|
|
307
|
+
pass # 回退到正则表达式
|
|
308
|
+
|
|
309
|
+
# Kotlin 文件使用 AST 解析器
|
|
310
|
+
if language == "Kotlin" and KOTLIN_AST_AVAILABLE and file_path:
|
|
311
|
+
try:
|
|
312
|
+
parser = KotlinASTParser()
|
|
313
|
+
ast_result = parser.parse_code(content, file_path)
|
|
314
|
+
if ast_result.success:
|
|
315
|
+
result["classes"] = len(ast_result.classes)
|
|
316
|
+
result["functions"] = len(ast_result.functions)
|
|
317
|
+
# 统计类中的方法
|
|
318
|
+
for cls in ast_result.classes:
|
|
319
|
+
result["functions"] += len(cls.functions)
|
|
320
|
+
return result
|
|
321
|
+
except Exception:
|
|
322
|
+
pass # 回退到正则表达式
|
|
323
|
+
|
|
324
|
+
# 其他语言或 AST 解析失败时使用正则表达式
|
|
325
|
+
if language == "Python":
|
|
326
|
+
result["classes"] = len(re.findall(r'^class\s+\w+', content, re.MULTILINE))
|
|
327
|
+
result["functions"] = len(re.findall(r'^def\s+\w+', content, re.MULTILINE))
|
|
328
|
+
elif language in ["JavaScript", "TypeScript"]:
|
|
329
|
+
result["classes"] = len(re.findall(r'\bclass\s+\w+', content))
|
|
330
|
+
result["functions"] = len(re.findall(r'\bfunction\s+\w+', content))
|
|
331
|
+
elif language == "Java":
|
|
332
|
+
result["classes"] = len(re.findall(r'\bclass\s+\w+', content))
|
|
333
|
+
result["methods"] = len(re.findall(r'\b(public|private|protected)\s+\w+\s+\w+\s*\(', content))
|
|
334
|
+
elif language == "Kotlin":
|
|
335
|
+
result["classes"] = len(re.findall(r'\bclass\s+\w+', content))
|
|
336
|
+
result["functions"] = len(re.findall(r'\bfun\s+\w+', content))
|
|
337
|
+
|
|
338
|
+
return result
|
|
339
|
+
|
|
340
|
+
def analyze_file(self, file_info: FileInfo) -> ModuleInfo:
|
|
341
|
+
"""分析单个文件"""
|
|
342
|
+
try:
|
|
343
|
+
content = self.analyzer.read_file_content(file_info)
|
|
344
|
+
imports = self._detect_imports(content, file_info.language, file_info.relative_path)
|
|
345
|
+
structures = self._count_structures(content, file_info.language, file_info.relative_path)
|
|
346
|
+
|
|
347
|
+
return ModuleInfo(
|
|
348
|
+
path=file_info.relative_path,
|
|
349
|
+
language=file_info.language,
|
|
350
|
+
imports=imports,
|
|
351
|
+
exports=[], # TODO: 实现导出检测
|
|
352
|
+
lines=len(content.split('\n')),
|
|
353
|
+
classes=structures.get("classes", 0),
|
|
354
|
+
functions=structures.get("functions", 0),
|
|
355
|
+
)
|
|
356
|
+
except Exception:
|
|
357
|
+
return ModuleInfo(
|
|
358
|
+
path=file_info.relative_path,
|
|
359
|
+
language=file_info.language,
|
|
360
|
+
imports=[],
|
|
361
|
+
exports=[],
|
|
362
|
+
lines=0,
|
|
363
|
+
classes=0,
|
|
364
|
+
functions=0,
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
def _detect_circular_dependencies(self) -> list[tuple[str, str]]:
|
|
368
|
+
"""检测循环依赖"""
|
|
369
|
+
# 构建依赖图
|
|
370
|
+
graph = defaultdict(set)
|
|
371
|
+
for dep in self.dependencies:
|
|
372
|
+
graph[dep.source].add(dep.target)
|
|
373
|
+
|
|
374
|
+
# 检测环
|
|
375
|
+
circular = []
|
|
376
|
+
visited = set()
|
|
377
|
+
rec_stack = set()
|
|
378
|
+
|
|
379
|
+
def dfs(node, path):
|
|
380
|
+
visited.add(node)
|
|
381
|
+
rec_stack.add(node)
|
|
382
|
+
|
|
383
|
+
for neighbor in graph[node]:
|
|
384
|
+
if neighbor not in visited:
|
|
385
|
+
cycle = dfs(neighbor, path + [neighbor])
|
|
386
|
+
if cycle:
|
|
387
|
+
return cycle
|
|
388
|
+
elif neighbor in rec_stack:
|
|
389
|
+
# 找到环
|
|
390
|
+
return (node, neighbor)
|
|
391
|
+
|
|
392
|
+
rec_stack.remove(node)
|
|
393
|
+
return None
|
|
394
|
+
|
|
395
|
+
for node in list(graph.keys()):
|
|
396
|
+
if node not in visited:
|
|
397
|
+
cycle = dfs(node, [node])
|
|
398
|
+
if cycle:
|
|
399
|
+
circular.append(cycle)
|
|
400
|
+
|
|
401
|
+
return circular
|
|
402
|
+
|
|
403
|
+
def _calculate_coupling(self) -> float:
|
|
404
|
+
"""计算耦合度分数 (0-100, 越低越好)"""
|
|
405
|
+
if not self.modules:
|
|
406
|
+
return 0
|
|
407
|
+
|
|
408
|
+
# 平均每个模块的依赖数
|
|
409
|
+
total_deps = len(self.dependencies)
|
|
410
|
+
avg_deps = total_deps / len(self.modules)
|
|
411
|
+
|
|
412
|
+
# 标准化到 0-100
|
|
413
|
+
# 假设平均依赖 5 个为理想值,超过 20 为高耦合
|
|
414
|
+
if avg_deps <= 5:
|
|
415
|
+
return avg_deps * 10 # 0-50
|
|
416
|
+
else:
|
|
417
|
+
return min(100, 50 + (avg_deps - 5) * 5)
|
|
418
|
+
|
|
419
|
+
def _calculate_cohesion(self) -> float:
|
|
420
|
+
"""计算内聚度分数 (0-100, 越高越好)"""
|
|
421
|
+
if not self.modules:
|
|
422
|
+
return 100
|
|
423
|
+
|
|
424
|
+
# 基于模块的功能集中度
|
|
425
|
+
# 简化计算:如果模块内的类/函数比例合理,则内聚度高
|
|
426
|
+
scores = []
|
|
427
|
+
for module in self.modules:
|
|
428
|
+
if module.lines > 0:
|
|
429
|
+
# 功能密度 = (类数 + 函数数) / 代码行数 * 1000
|
|
430
|
+
density = (module.classes + module.functions) / module.lines * 100
|
|
431
|
+
scores.append(min(100, density * 10))
|
|
432
|
+
|
|
433
|
+
return sum(scores) / len(scores) if scores else 100
|
|
434
|
+
|
|
435
|
+
def analyze(self, target_path: str) -> ArchitectureResult:
|
|
436
|
+
"""分析项目架构"""
|
|
437
|
+
# 收集所有文件信息
|
|
438
|
+
file_infos = list(self.analyzer.scan_files(target_path))
|
|
439
|
+
|
|
440
|
+
# 分析每个文件
|
|
441
|
+
for file_info in file_infos:
|
|
442
|
+
module = self.analyze_file(file_info)
|
|
443
|
+
self.modules.append(module)
|
|
444
|
+
|
|
445
|
+
# 记录依赖关系
|
|
446
|
+
for imp in module.imports:
|
|
447
|
+
self.dependencies.append(DependencyInfo(
|
|
448
|
+
source=module.path,
|
|
449
|
+
target=imp,
|
|
450
|
+
import_type="import"
|
|
451
|
+
))
|
|
452
|
+
|
|
453
|
+
# 统计信息
|
|
454
|
+
languages = defaultdict(int)
|
|
455
|
+
total_lines = 0
|
|
456
|
+
for module in self.modules:
|
|
457
|
+
languages[module.language] += 1
|
|
458
|
+
total_lines += module.lines
|
|
459
|
+
|
|
460
|
+
# 检测循环依赖
|
|
461
|
+
circular_deps = self._detect_circular_dependencies()
|
|
462
|
+
|
|
463
|
+
# 计算分数
|
|
464
|
+
coupling_score = self._calculate_coupling()
|
|
465
|
+
cohesion_score = self._calculate_cohesion()
|
|
466
|
+
|
|
467
|
+
# 使用 LLM 生成总结和建议
|
|
468
|
+
modules_summary = "\n".join([
|
|
469
|
+
f"- {m.path}: {m.language}, {m.lines}行, {m.classes}类, {m.functions}函数"
|
|
470
|
+
for m in self.modules[:20] # 限制数量
|
|
471
|
+
])
|
|
472
|
+
|
|
473
|
+
prompt = ARCH_ANALYSIS_PROMPT.format(
|
|
474
|
+
total_files=len(self.modules),
|
|
475
|
+
total_lines=total_lines,
|
|
476
|
+
languages=dict(languages),
|
|
477
|
+
modules_count=len(self.modules),
|
|
478
|
+
circular_deps=len(circular_deps),
|
|
479
|
+
modules_summary=modules_summary,
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
try:
|
|
483
|
+
response = self.analyzer.llm.simple_chat(prompt)
|
|
484
|
+
import json
|
|
485
|
+
# 尝试解析 JSON
|
|
486
|
+
content = response
|
|
487
|
+
if "```json" in content:
|
|
488
|
+
content = content.split("```json")[1].split("```")[0]
|
|
489
|
+
elif "```" in content:
|
|
490
|
+
content = content.split("```")[1].split("```")[0]
|
|
491
|
+
|
|
492
|
+
data = json.loads(content.strip())
|
|
493
|
+
summary = data.get("summary", "")
|
|
494
|
+
suggestions = data.get("suggestions", [])
|
|
495
|
+
except Exception:
|
|
496
|
+
summary = f"项目包含 {len(self.modules)} 个文件,{total_lines} 行代码"
|
|
497
|
+
suggestions = ["建议添加更多文档", "考虑模块化重构"]
|
|
498
|
+
|
|
499
|
+
return ArchitectureResult(
|
|
500
|
+
total_files=len(self.modules),
|
|
501
|
+
total_lines=total_lines,
|
|
502
|
+
languages=dict(languages),
|
|
503
|
+
modules=self.modules,
|
|
504
|
+
dependencies=self.dependencies,
|
|
505
|
+
circular_dependencies=circular_deps,
|
|
506
|
+
coupling_score=coupling_score,
|
|
507
|
+
cohesion_score=cohesion_score,
|
|
508
|
+
summary=summary,
|
|
509
|
+
suggestions=suggestions,
|
|
510
|
+
)
|
analyzers/code_review.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""代码审查分析器"""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from core.analyzer import Analyzer, FileInfo, AnalysisResult
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class CodeReviewResult:
|
|
10
|
+
"""代码审查结果"""
|
|
11
|
+
file_path: str
|
|
12
|
+
score: int # 1-10, 0 表示分析失败
|
|
13
|
+
issues: list[dict]
|
|
14
|
+
suggestions: list[str]
|
|
15
|
+
summary: str
|
|
16
|
+
success: bool = True
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# 代码审查提示词模板
|
|
20
|
+
CODE_REVIEW_PROMPT = """请审查以下 {language} 代码:
|
|
21
|
+
|
|
22
|
+
文件: {file_path}
|
|
23
|
+
|
|
24
|
+
```{language}
|
|
25
|
+
{content}
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
请进行代码审查,必须严格按照以下 JSON 格式输出,不要输出任何其他内容:
|
|
29
|
+
|
|
30
|
+
{{"score": 评分1-10, "issues": [{{"type": "问题类型", "severity": "high/medium/low", "line": 行号, "description": "问题描述"}}], "suggestions": ["改进建议"], "summary": "代码评价总结"}}"""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
COMMIT_REVIEW_PROMPT = """你是一位代码审查专家。请审查以下 Git 提交的变更。
|
|
34
|
+
|
|
35
|
+
提交信息: {commit_message}
|
|
36
|
+
作者: {author}
|
|
37
|
+
|
|
38
|
+
变更内容:
|
|
39
|
+
{diff}
|
|
40
|
+
|
|
41
|
+
请分析:
|
|
42
|
+
1. 变更的目的和影响
|
|
43
|
+
2. 潜在的问题或风险
|
|
44
|
+
3. 代码风格和最佳实践
|
|
45
|
+
4. 改进建议
|
|
46
|
+
|
|
47
|
+
以 Markdown 格式输出审查报告。"""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class CodeReviewAnalyzer:
|
|
51
|
+
"""代码审查分析器"""
|
|
52
|
+
|
|
53
|
+
def __init__(self, analyzer: Analyzer):
|
|
54
|
+
self.analyzer = analyzer
|
|
55
|
+
|
|
56
|
+
def review_file(self, file_info: FileInfo) -> CodeReviewResult:
|
|
57
|
+
"""审查单个文件"""
|
|
58
|
+
result = self.analyzer.analyze_file(
|
|
59
|
+
file_info,
|
|
60
|
+
CODE_REVIEW_PROMPT,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if not result.success:
|
|
64
|
+
return CodeReviewResult(
|
|
65
|
+
file_path=file_info.relative_path,
|
|
66
|
+
score=0,
|
|
67
|
+
issues=[{"type": "error", "severity": "high", "description": result.errors[0]}],
|
|
68
|
+
suggestions=[],
|
|
69
|
+
summary="分析失败",
|
|
70
|
+
success=False,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# 解析 JSON 结果
|
|
74
|
+
import json
|
|
75
|
+
try:
|
|
76
|
+
# 尝试从 markdown 代码块中提取 JSON
|
|
77
|
+
content = result.content
|
|
78
|
+
if "```json" in content:
|
|
79
|
+
content = content.split("```json")[1].split("```")[0]
|
|
80
|
+
elif "```" in content:
|
|
81
|
+
content = content.split("```")[1].split("```")[0]
|
|
82
|
+
|
|
83
|
+
data = json.loads(content.strip())
|
|
84
|
+
return CodeReviewResult(
|
|
85
|
+
file_path=file_info.relative_path,
|
|
86
|
+
score=data.get("score", 0),
|
|
87
|
+
issues=data.get("issues", []),
|
|
88
|
+
suggestions=data.get("suggestions", []),
|
|
89
|
+
summary=data.get("summary", ""),
|
|
90
|
+
)
|
|
91
|
+
except json.JSONDecodeError:
|
|
92
|
+
# 如果解析失败,返回原始内容作为总结
|
|
93
|
+
return CodeReviewResult(
|
|
94
|
+
file_path=file_info.relative_path,
|
|
95
|
+
score=0,
|
|
96
|
+
issues=[],
|
|
97
|
+
suggestions=[],
|
|
98
|
+
summary=result.content,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
def review_commit(self, commit_hash: str) -> AnalysisResult:
|
|
102
|
+
"""审查指定提交"""
|
|
103
|
+
git = self.analyzer.git
|
|
104
|
+
if not git:
|
|
105
|
+
return AnalysisResult(
|
|
106
|
+
file_path="",
|
|
107
|
+
analyzer="code_review",
|
|
108
|
+
success=False,
|
|
109
|
+
content="",
|
|
110
|
+
errors=["目标路径不是有效的 Git 仓库"],
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
commit_info = git.get_commit_info(commit_hash)
|
|
114
|
+
diffs = git.get_commit_diff(commit_hash)
|
|
115
|
+
|
|
116
|
+
# 构建差异内容
|
|
117
|
+
diff_content = ""
|
|
118
|
+
for diff in diffs:
|
|
119
|
+
diff_content += f"\n### {diff.file_path}\n"
|
|
120
|
+
diff_content += f"状态: {diff.status} (+{diff.additions}/-{diff.deletions})\n"
|
|
121
|
+
diff_content += f"```diff\n{diff.diff}\n```\n"
|
|
122
|
+
|
|
123
|
+
prompt = COMMIT_REVIEW_PROMPT.format(
|
|
124
|
+
commit_message=commit_info.message,
|
|
125
|
+
author=commit_info.author,
|
|
126
|
+
diff=diff_content,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
response = self.analyzer.llm.simple_chat(prompt)
|
|
130
|
+
|
|
131
|
+
return AnalysisResult(
|
|
132
|
+
file_path=commit_hash,
|
|
133
|
+
analyzer="code_review",
|
|
134
|
+
success=True,
|
|
135
|
+
content=response,
|
|
136
|
+
metadata={
|
|
137
|
+
"commit_hash": commit_hash,
|
|
138
|
+
"author": commit_info.author,
|
|
139
|
+
"date": commit_info.date.isoformat(),
|
|
140
|
+
"files_changed": commit_info.files_changed,
|
|
141
|
+
},
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
def review_files(self, file_infos: list[FileInfo]) -> list[CodeReviewResult]:
|
|
145
|
+
"""审查多个文件"""
|
|
146
|
+
results = []
|
|
147
|
+
for file_info in file_infos:
|
|
148
|
+
result = self.review_file(file_info)
|
|
149
|
+
results.append(result)
|
|
150
|
+
return results
|