tree-sitter-analyzer 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tree-sitter-analyzer might be problematic. Click here for more details.
- tree_sitter_analyzer/__init__.py +4 -3
- tree_sitter_analyzer/api.py +4 -2
- tree_sitter_analyzer/cli/__init__.py +3 -3
- tree_sitter_analyzer/cli/commands/advanced_command.py +1 -1
- tree_sitter_analyzer/cli/commands/base_command.py +1 -1
- tree_sitter_analyzer/cli/commands/partial_read_command.py +2 -2
- tree_sitter_analyzer/cli/commands/summary_command.py +2 -2
- tree_sitter_analyzer/cli/commands/table_command.py +11 -8
- tree_sitter_analyzer/cli_main.py +2 -1
- tree_sitter_analyzer/core/analysis_engine.py +33 -69
- tree_sitter_analyzer/core/engine.py +6 -4
- tree_sitter_analyzer/core/parser.py +1 -1
- tree_sitter_analyzer/core/query.py +16 -8
- tree_sitter_analyzer/encoding_utils.py +0 -2
- tree_sitter_analyzer/exceptions.py +23 -23
- tree_sitter_analyzer/file_handler.py +4 -11
- tree_sitter_analyzer/formatters/java_formatter.py +8 -4
- tree_sitter_analyzer/formatters/python_formatter.py +8 -4
- tree_sitter_analyzer/interfaces/cli.py +1 -1
- tree_sitter_analyzer/interfaces/cli_adapter.py +30 -9
- tree_sitter_analyzer/interfaces/mcp_adapter.py +43 -17
- tree_sitter_analyzer/interfaces/mcp_server.py +9 -9
- tree_sitter_analyzer/java_analyzer.py +20 -51
- tree_sitter_analyzer/language_loader.py +2 -2
- tree_sitter_analyzer/languages/java_plugin.py +86 -41
- tree_sitter_analyzer/{plugins → languages}/javascript_plugin.py +3 -3
- tree_sitter_analyzer/languages/python_plugin.py +16 -6
- tree_sitter_analyzer/mcp/resources/code_file_resource.py +0 -3
- tree_sitter_analyzer/mcp/resources/project_stats_resource.py +0 -5
- tree_sitter_analyzer/mcp/server.py +4 -4
- tree_sitter_analyzer/mcp/tools/analyze_scale_tool.py +63 -30
- tree_sitter_analyzer/mcp/tools/analyze_scale_tool_cli_compatible.py +9 -4
- tree_sitter_analyzer/mcp/tools/table_format_tool.py +2 -2
- tree_sitter_analyzer/mcp/utils/__init__.py +10 -8
- tree_sitter_analyzer/models.py +1 -1
- tree_sitter_analyzer/output_manager.py +4 -10
- tree_sitter_analyzer/plugins/__init__.py +9 -62
- tree_sitter_analyzer/plugins/base.py +20 -1
- tree_sitter_analyzer/plugins/manager.py +29 -12
- tree_sitter_analyzer/query_loader.py +4 -1
- tree_sitter_analyzer/table_formatter.py +4 -1
- tree_sitter_analyzer/utils.py +6 -6
- {tree_sitter_analyzer-0.3.0.dist-info → tree_sitter_analyzer-0.4.0.dist-info}/METADATA +3 -3
- tree_sitter_analyzer-0.4.0.dist-info/RECORD +73 -0
- {tree_sitter_analyzer-0.3.0.dist-info → tree_sitter_analyzer-0.4.0.dist-info}/entry_points.txt +2 -1
- tree_sitter_analyzer/plugins/java_plugin.py +0 -608
- tree_sitter_analyzer/plugins/plugin_loader.py +0 -85
- tree_sitter_analyzer/plugins/python_plugin.py +0 -606
- tree_sitter_analyzer/plugins/registry.py +0 -374
- tree_sitter_analyzer-0.3.0.dist-info/RECORD +0 -77
- {tree_sitter_analyzer-0.3.0.dist-info → tree_sitter_analyzer-0.4.0.dist-info}/WHEEL +0 -0
|
@@ -12,10 +12,11 @@ from typing import TYPE_CHECKING, Any, Optional
|
|
|
12
12
|
if TYPE_CHECKING:
|
|
13
13
|
import tree_sitter
|
|
14
14
|
|
|
15
|
-
from ..
|
|
15
|
+
from ..core.analysis_engine import AnalysisRequest
|
|
16
|
+
from ..models import AnalysisResult
|
|
16
17
|
|
|
17
18
|
from ..encoding_utils import extract_text_slice, safe_encode
|
|
18
|
-
from ..models import Class, Function, Import, Package, Variable
|
|
19
|
+
from ..models import Class, CodeElement, Function, Import, Package, Variable
|
|
19
20
|
from ..plugins.base import ElementExtractor, LanguagePlugin
|
|
20
21
|
from ..utils import log_debug, log_error, log_warning
|
|
21
22
|
|
|
@@ -209,7 +210,7 @@ class JavaElementExtractor(ElementExtractor):
|
|
|
209
210
|
Uses batch processing for optimal performance
|
|
210
211
|
"""
|
|
211
212
|
if not root_node:
|
|
212
|
-
return
|
|
213
|
+
return # type: ignore[unreachable]
|
|
213
214
|
|
|
214
215
|
# Target node types for extraction
|
|
215
216
|
target_node_types = set(extractors.keys())
|
|
@@ -611,6 +612,9 @@ class JavaElementExtractor(ElementExtractor):
|
|
|
611
612
|
"void_type",
|
|
612
613
|
"primitive_type",
|
|
613
614
|
"integral_type",
|
|
615
|
+
"boolean_type",
|
|
616
|
+
"floating_point_type",
|
|
617
|
+
"array_type",
|
|
614
618
|
]:
|
|
615
619
|
return_type = self._get_node_text_optimized(child)
|
|
616
620
|
break
|
|
@@ -650,7 +654,15 @@ class JavaElementExtractor(ElementExtractor):
|
|
|
650
654
|
# Extract type (exactly as in AdvancedAnalyzer)
|
|
651
655
|
field_type = None
|
|
652
656
|
for child in node.children:
|
|
653
|
-
if child.type in [
|
|
657
|
+
if child.type in [
|
|
658
|
+
"type_identifier",
|
|
659
|
+
"primitive_type",
|
|
660
|
+
"integral_type",
|
|
661
|
+
"generic_type",
|
|
662
|
+
"boolean_type",
|
|
663
|
+
"floating_point_type",
|
|
664
|
+
"array_type",
|
|
665
|
+
]:
|
|
654
666
|
field_type = self._get_node_text_optimized(child)
|
|
655
667
|
break
|
|
656
668
|
|
|
@@ -784,38 +796,44 @@ class JavaElementExtractor(ElementExtractor):
|
|
|
784
796
|
return complexity
|
|
785
797
|
|
|
786
798
|
def _extract_javadoc_for_line(self, target_line: int) -> str | None:
|
|
787
|
-
"""Extract JavaDoc comment immediately before the specified line
|
|
799
|
+
"""Extract JavaDoc comment immediately before the specified line"""
|
|
788
800
|
try:
|
|
801
|
+
if not self.content_lines or target_line <= 1:
|
|
802
|
+
return None
|
|
803
|
+
|
|
789
804
|
# Search backwards from target_line
|
|
790
805
|
javadoc_lines = []
|
|
791
806
|
current_line = target_line - 1
|
|
792
807
|
|
|
793
808
|
# Skip empty lines
|
|
794
|
-
while current_line > 0
|
|
809
|
+
while current_line > 0:
|
|
795
810
|
line = self.content_lines[current_line - 1].strip()
|
|
796
811
|
if line:
|
|
797
812
|
break
|
|
798
813
|
current_line -= 1
|
|
799
814
|
|
|
800
815
|
# Check for JavaDoc end
|
|
801
|
-
if current_line > 0
|
|
816
|
+
if current_line > 0:
|
|
802
817
|
line = self.content_lines[current_line - 1].strip()
|
|
803
818
|
if line.endswith("*/"):
|
|
804
|
-
|
|
819
|
+
# This might be a JavaDoc comment
|
|
820
|
+
javadoc_lines.append(self.content_lines[current_line - 1])
|
|
805
821
|
current_line -= 1
|
|
806
822
|
|
|
807
823
|
# Collect JavaDoc content
|
|
808
824
|
while current_line > 0:
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
break
|
|
813
|
-
current_line -= 1
|
|
825
|
+
line_content = self.content_lines[current_line - 1]
|
|
826
|
+
line_stripped = line_content.strip()
|
|
827
|
+
javadoc_lines.append(line_content)
|
|
814
828
|
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
829
|
+
if line_stripped.startswith("/**"):
|
|
830
|
+
# Found the start of JavaDoc
|
|
831
|
+
javadoc_lines.reverse()
|
|
832
|
+
javadoc_text = "\n".join(javadoc_lines)
|
|
833
|
+
|
|
834
|
+
# Clean up the JavaDoc
|
|
835
|
+
return self._clean_javadoc(javadoc_text)
|
|
836
|
+
current_line -= 1
|
|
819
837
|
|
|
820
838
|
return None
|
|
821
839
|
|
|
@@ -823,6 +841,31 @@ class JavaElementExtractor(ElementExtractor):
|
|
|
823
841
|
log_debug(f"Failed to extract JavaDoc: {e}")
|
|
824
842
|
return None
|
|
825
843
|
|
|
844
|
+
def _clean_javadoc(self, javadoc_text: str) -> str:
|
|
845
|
+
"""Clean JavaDoc text by removing comment markers"""
|
|
846
|
+
if not javadoc_text:
|
|
847
|
+
return ""
|
|
848
|
+
|
|
849
|
+
lines = javadoc_text.split("\n")
|
|
850
|
+
cleaned_lines = []
|
|
851
|
+
|
|
852
|
+
for line in lines:
|
|
853
|
+
# Remove leading/trailing whitespace
|
|
854
|
+
line = line.strip()
|
|
855
|
+
|
|
856
|
+
# Remove comment markers
|
|
857
|
+
if line.startswith("/**"):
|
|
858
|
+
line = line[3:].strip()
|
|
859
|
+
elif line.startswith("*/"):
|
|
860
|
+
line = line[2:].strip()
|
|
861
|
+
elif line.startswith("*"):
|
|
862
|
+
line = line[1:].strip()
|
|
863
|
+
|
|
864
|
+
if line: # Only add non-empty lines
|
|
865
|
+
cleaned_lines.append(line)
|
|
866
|
+
|
|
867
|
+
return " ".join(cleaned_lines) if cleaned_lines else ""
|
|
868
|
+
|
|
826
869
|
def _is_nested_class(self, node: "tree_sitter.Node") -> bool:
|
|
827
870
|
"""Check if this is a nested class (from AdvancedAnalyzer)"""
|
|
828
871
|
current = node.parent
|
|
@@ -991,7 +1034,7 @@ class JavaPlugin(LanguagePlugin):
|
|
|
991
1034
|
try:
|
|
992
1035
|
import tree_sitter_java as tsjava
|
|
993
1036
|
|
|
994
|
-
self._language_cache = tsjava.language()
|
|
1037
|
+
self._language_cache = tsjava.language() # type: ignore
|
|
995
1038
|
except ImportError:
|
|
996
1039
|
log_error("tree-sitter-java not available")
|
|
997
1040
|
return None
|
|
@@ -1069,32 +1112,34 @@ class JavaPlugin(LanguagePlugin):
|
|
|
1069
1112
|
# Extract elements
|
|
1070
1113
|
extractor = self.create_extractor()
|
|
1071
1114
|
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
log_debug("Java Plugin: Extracting packages...")
|
|
1077
|
-
packages = extractor.extract_packages(parse_result.tree, source_code)
|
|
1078
|
-
log_debug(f"Java Plugin: Found {len(packages)} packages")
|
|
1079
|
-
|
|
1080
|
-
log_debug("Java Plugin: Extracting functions...")
|
|
1081
|
-
functions = extractor.extract_functions(parse_result.tree, source_code)
|
|
1082
|
-
log_debug(f"Java Plugin: Found {len(functions)} functions")
|
|
1115
|
+
if parse_result.tree:
|
|
1116
|
+
log_debug("Java Plugin: Extracting functions...")
|
|
1117
|
+
functions = extractor.extract_functions(parse_result.tree, source_code)
|
|
1118
|
+
log_debug(f"Java Plugin: Found {len(functions)} functions")
|
|
1083
1119
|
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1120
|
+
log_debug("Java Plugin: Extracting classes...")
|
|
1121
|
+
classes = extractor.extract_classes(parse_result.tree, source_code)
|
|
1122
|
+
log_debug(f"Java Plugin: Found {len(classes)} classes")
|
|
1087
1123
|
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1124
|
+
log_debug("Java Plugin: Extracting variables...")
|
|
1125
|
+
variables = extractor.extract_variables(parse_result.tree, source_code)
|
|
1126
|
+
log_debug(f"Java Plugin: Found {len(variables)} variables")
|
|
1091
1127
|
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1128
|
+
log_debug("Java Plugin: Extracting imports...")
|
|
1129
|
+
imports = extractor.extract_imports(parse_result.tree, source_code)
|
|
1130
|
+
log_debug(f"Java Plugin: Found {len(imports)} imports")
|
|
1131
|
+
else:
|
|
1132
|
+
functions = []
|
|
1133
|
+
classes = []
|
|
1134
|
+
variables = []
|
|
1135
|
+
imports = []
|
|
1136
|
+
|
|
1137
|
+
# Combine all elements
|
|
1138
|
+
all_elements: list[CodeElement] = []
|
|
1139
|
+
all_elements.extend(functions)
|
|
1140
|
+
all_elements.extend(classes)
|
|
1141
|
+
all_elements.extend(variables)
|
|
1142
|
+
all_elements.extend(imports)
|
|
1098
1143
|
log_debug(f"Java Plugin: Total elements: {len(all_elements)}")
|
|
1099
1144
|
|
|
1100
1145
|
return AnalysisResult(
|
|
@@ -19,9 +19,9 @@ except ImportError:
|
|
|
19
19
|
|
|
20
20
|
from ..core.analysis_engine import AnalysisRequest
|
|
21
21
|
from ..language_loader import loader
|
|
22
|
-
from ..models import AnalysisResult, Class, Function, Import, Variable
|
|
22
|
+
from ..models import AnalysisResult, Class, CodeElement, Function, Import, Variable
|
|
23
|
+
from ..plugins.base import ElementExtractor, LanguagePlugin
|
|
23
24
|
from ..utils import log_error, log_warning
|
|
24
|
-
from . import ElementExtractor, LanguagePlugin
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
class JavaScriptElementExtractor(ElementExtractor):
|
|
@@ -416,7 +416,7 @@ class JavaScriptPlugin(LanguagePlugin):
|
|
|
416
416
|
tree = parser.parse(bytes(source_code, "utf8"))
|
|
417
417
|
|
|
418
418
|
extractor = self.create_extractor()
|
|
419
|
-
elements = []
|
|
419
|
+
elements: list[CodeElement] = []
|
|
420
420
|
elements.extend(extractor.extract_functions(tree, source_code))
|
|
421
421
|
elements.extend(extractor.extract_classes(tree, source_code))
|
|
422
422
|
elements.extend(extractor.extract_variables(tree, source_code))
|
|
@@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
|
|
14
14
|
from ..core.analysis_engine import AnalysisRequest
|
|
15
15
|
from ..models import AnalysisResult
|
|
16
16
|
|
|
17
|
-
from ..models import Class, Function, Import, Variable
|
|
17
|
+
from ..models import Class, CodeElement, Function, Import, Variable
|
|
18
18
|
from ..plugins.base import ElementExtractor, LanguagePlugin
|
|
19
19
|
from ..utils import log_error, log_warning
|
|
20
20
|
|
|
@@ -677,13 +677,23 @@ class PythonPlugin(LanguagePlugin):
|
|
|
677
677
|
|
|
678
678
|
# Extract elements
|
|
679
679
|
extractor = self.create_extractor()
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
680
|
+
if parse_result.tree:
|
|
681
|
+
functions = extractor.extract_functions(parse_result.tree, source_code)
|
|
682
|
+
classes = extractor.extract_classes(parse_result.tree, source_code)
|
|
683
|
+
variables = extractor.extract_variables(parse_result.tree, source_code)
|
|
684
|
+
imports = extractor.extract_imports(parse_result.tree, source_code)
|
|
685
|
+
else:
|
|
686
|
+
functions = []
|
|
687
|
+
classes = []
|
|
688
|
+
variables = []
|
|
689
|
+
imports = []
|
|
684
690
|
|
|
685
691
|
# Combine all elements
|
|
686
|
-
all_elements =
|
|
692
|
+
all_elements: list[CodeElement] = []
|
|
693
|
+
all_elements.extend(functions)
|
|
694
|
+
all_elements.extend(classes)
|
|
695
|
+
all_elements.extend(variables)
|
|
696
|
+
all_elements.extend(imports)
|
|
687
697
|
|
|
688
698
|
return AnalysisResult(
|
|
689
699
|
file_path=file_path,
|
|
@@ -83,9 +83,6 @@ class ProjectStatsResource:
|
|
|
83
83
|
Returns:
|
|
84
84
|
True if the URI matches the project stats pattern
|
|
85
85
|
"""
|
|
86
|
-
if not isinstance(uri, str):
|
|
87
|
-
return False
|
|
88
|
-
|
|
89
86
|
return bool(self._uri_pattern.match(uri))
|
|
90
87
|
|
|
91
88
|
def _extract_stats_type(self, uri: str) -> str:
|
|
@@ -336,8 +333,6 @@ class ProjectStatsResource:
|
|
|
336
333
|
# Use appropriate analyzer based on language
|
|
337
334
|
if language == "java":
|
|
338
335
|
# Use advanced analyzer for Java
|
|
339
|
-
if self._advanced_analyzer is None:
|
|
340
|
-
self._advanced_analyzer = get_analysis_engine()
|
|
341
336
|
# 使用 await 调用异步方法
|
|
342
337
|
file_analysis = await self._advanced_analyzer.analyze_file(
|
|
343
338
|
str(file_path)
|
|
@@ -93,7 +93,7 @@ class TreeSitterAnalyzerMCPServer:
|
|
|
93
93
|
server: Server = Server(self.name)
|
|
94
94
|
|
|
95
95
|
# Register tools
|
|
96
|
-
@server.list_tools()
|
|
96
|
+
@server.list_tools() # type: ignore
|
|
97
97
|
async def handle_list_tools() -> list[Tool]:
|
|
98
98
|
"""List available tools."""
|
|
99
99
|
tools = [
|
|
@@ -144,7 +144,7 @@ class TreeSitterAnalyzerMCPServer:
|
|
|
144
144
|
|
|
145
145
|
return tools
|
|
146
146
|
|
|
147
|
-
@server.call_tool()
|
|
147
|
+
@server.call_tool() # type: ignore
|
|
148
148
|
async def handle_call_tool(
|
|
149
149
|
name: str, arguments: dict[str, Any]
|
|
150
150
|
) -> list[TextContent]:
|
|
@@ -201,7 +201,7 @@ class TreeSitterAnalyzerMCPServer:
|
|
|
201
201
|
]
|
|
202
202
|
|
|
203
203
|
# Register resources
|
|
204
|
-
@server.list_resources()
|
|
204
|
+
@server.list_resources() # type: ignore
|
|
205
205
|
async def handle_list_resources() -> list[Resource]:
|
|
206
206
|
"""List available resources."""
|
|
207
207
|
return [
|
|
@@ -225,7 +225,7 @@ class TreeSitterAnalyzerMCPServer:
|
|
|
225
225
|
),
|
|
226
226
|
]
|
|
227
227
|
|
|
228
|
-
@server.read_resource()
|
|
228
|
+
@server.read_resource() # type: ignore
|
|
229
229
|
async def handle_read_resource(uri: str) -> str:
|
|
230
230
|
"""Read resource content."""
|
|
231
231
|
try:
|
|
@@ -214,7 +214,7 @@ class AnalyzeScaleTool:
|
|
|
214
214
|
Returns:
|
|
215
215
|
Dictionary containing LLM guidance
|
|
216
216
|
"""
|
|
217
|
-
guidance = {
|
|
217
|
+
guidance: dict[str, Any] = {
|
|
218
218
|
"analysis_strategy": "",
|
|
219
219
|
"recommended_tools": [],
|
|
220
220
|
"key_areas": [],
|
|
@@ -417,35 +417,47 @@ class AnalyzeScaleTool:
|
|
|
417
417
|
"classes": len(
|
|
418
418
|
[
|
|
419
419
|
e
|
|
420
|
-
for e in
|
|
420
|
+
for e in (
|
|
421
|
+
analysis_result.elements if analysis_result else []
|
|
422
|
+
)
|
|
421
423
|
if e.__class__.__name__ == "Class"
|
|
422
424
|
]
|
|
423
425
|
),
|
|
424
426
|
"methods": len(
|
|
425
427
|
[
|
|
426
428
|
e
|
|
427
|
-
for e in
|
|
429
|
+
for e in (
|
|
430
|
+
analysis_result.elements if analysis_result else []
|
|
431
|
+
)
|
|
428
432
|
if e.__class__.__name__ == "Function"
|
|
429
433
|
]
|
|
430
434
|
),
|
|
431
435
|
"fields": len(
|
|
432
436
|
[
|
|
433
437
|
e
|
|
434
|
-
for e in
|
|
438
|
+
for e in (
|
|
439
|
+
analysis_result.elements if analysis_result else []
|
|
440
|
+
)
|
|
435
441
|
if e.__class__.__name__ == "Variable"
|
|
436
442
|
]
|
|
437
443
|
),
|
|
438
444
|
"imports": len(
|
|
439
445
|
[
|
|
440
446
|
e
|
|
441
|
-
for e in
|
|
447
|
+
for e in (
|
|
448
|
+
analysis_result.elements if analysis_result else []
|
|
449
|
+
)
|
|
442
450
|
if e.__class__.__name__ == "Import"
|
|
443
451
|
]
|
|
444
452
|
),
|
|
445
|
-
"annotations": len(
|
|
453
|
+
"annotations": len(
|
|
454
|
+
getattr(analysis_result, "annotations", [])
|
|
455
|
+
if analysis_result
|
|
456
|
+
else []
|
|
457
|
+
),
|
|
446
458
|
"package": (
|
|
447
459
|
analysis_result.package.name
|
|
448
|
-
if analysis_result.package
|
|
460
|
+
if analysis_result and analysis_result.package
|
|
449
461
|
else None
|
|
450
462
|
),
|
|
451
463
|
},
|
|
@@ -458,20 +470,27 @@ class AnalyzeScaleTool:
|
|
|
458
470
|
# Add detailed information if requested (backward compatibility)
|
|
459
471
|
if include_details:
|
|
460
472
|
result["detailed_analysis"] = {
|
|
461
|
-
"statistics":
|
|
473
|
+
"statistics": (
|
|
474
|
+
analysis_result.get_statistics() if analysis_result else {}
|
|
475
|
+
),
|
|
462
476
|
"classes": [
|
|
463
477
|
{
|
|
464
478
|
"name": cls.name,
|
|
465
|
-
"type": cls
|
|
466
|
-
"visibility": cls
|
|
467
|
-
"extends": cls
|
|
468
|
-
"implements": cls
|
|
469
|
-
"annotations": [
|
|
479
|
+
"type": getattr(cls, "class_type", "unknown"),
|
|
480
|
+
"visibility": getattr(cls, "visibility", "unknown"),
|
|
481
|
+
"extends": getattr(cls, "extends_class", None),
|
|
482
|
+
"implements": getattr(cls, "implements_interfaces", []),
|
|
483
|
+
"annotations": [
|
|
484
|
+
getattr(ann, "name", str(ann))
|
|
485
|
+
for ann in getattr(cls, "annotations", [])
|
|
486
|
+
],
|
|
470
487
|
"lines": f"{cls.start_line}-{cls.end_line}",
|
|
471
488
|
}
|
|
472
489
|
for cls in [
|
|
473
490
|
e
|
|
474
|
-
for e in
|
|
491
|
+
for e in (
|
|
492
|
+
analysis_result.elements if analysis_result else []
|
|
493
|
+
)
|
|
475
494
|
if e.__class__.__name__ == "Class"
|
|
476
495
|
]
|
|
477
496
|
],
|
|
@@ -479,35 +498,49 @@ class AnalyzeScaleTool:
|
|
|
479
498
|
{
|
|
480
499
|
"name": method.name,
|
|
481
500
|
"file_path": getattr(method, "file_path", file_path),
|
|
482
|
-
"visibility": method
|
|
483
|
-
"return_type":
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
"
|
|
487
|
-
"
|
|
488
|
-
|
|
501
|
+
"visibility": getattr(method, "visibility", "unknown"),
|
|
502
|
+
"return_type": getattr(
|
|
503
|
+
method, "return_type", "unknown"
|
|
504
|
+
),
|
|
505
|
+
"parameters": len(getattr(method, "parameters", [])),
|
|
506
|
+
"annotations": [
|
|
507
|
+
getattr(ann, "name", str(ann))
|
|
508
|
+
for ann in getattr(method, "annotations", [])
|
|
509
|
+
],
|
|
510
|
+
"is_constructor": getattr(
|
|
511
|
+
method, "is_constructor", False
|
|
512
|
+
),
|
|
513
|
+
"is_static": getattr(method, "is_static", False),
|
|
514
|
+
"complexity": getattr(method, "complexity_score", 0),
|
|
489
515
|
"lines": f"{method.start_line}-{method.end_line}",
|
|
490
516
|
}
|
|
491
517
|
for method in [
|
|
492
518
|
e
|
|
493
|
-
for e in
|
|
519
|
+
for e in (
|
|
520
|
+
analysis_result.elements if analysis_result else []
|
|
521
|
+
)
|
|
494
522
|
if e.__class__.__name__ == "Function"
|
|
495
523
|
]
|
|
496
524
|
],
|
|
497
525
|
"fields": [
|
|
498
526
|
{
|
|
499
527
|
"name": field.name,
|
|
500
|
-
"type": field
|
|
528
|
+
"type": getattr(field, "field_type", "unknown"),
|
|
501
529
|
"file_path": getattr(field, "file_path", file_path),
|
|
502
|
-
"visibility": field
|
|
503
|
-
"is_static": field
|
|
504
|
-
"is_final": field
|
|
505
|
-
"annotations": [
|
|
530
|
+
"visibility": getattr(field, "visibility", "unknown"),
|
|
531
|
+
"is_static": getattr(field, "is_static", False),
|
|
532
|
+
"is_final": getattr(field, "is_final", False),
|
|
533
|
+
"annotations": [
|
|
534
|
+
getattr(ann, "name", str(ann))
|
|
535
|
+
for ann in getattr(field, "annotations", [])
|
|
536
|
+
],
|
|
506
537
|
"lines": f"{field.start_line}-{field.end_line}",
|
|
507
538
|
}
|
|
508
539
|
for field in [
|
|
509
540
|
e
|
|
510
|
-
for e in
|
|
541
|
+
for e in (
|
|
542
|
+
analysis_result.elements if analysis_result else []
|
|
543
|
+
)
|
|
511
544
|
if e.__class__.__name__ == "Variable"
|
|
512
545
|
]
|
|
513
546
|
],
|
|
@@ -517,14 +550,14 @@ class AnalyzeScaleTool:
|
|
|
517
550
|
classes_count = len(
|
|
518
551
|
[
|
|
519
552
|
e
|
|
520
|
-
for e in analysis_result.elements
|
|
553
|
+
for e in (analysis_result.elements if analysis_result else [])
|
|
521
554
|
if e.__class__.__name__ == "Class"
|
|
522
555
|
]
|
|
523
556
|
)
|
|
524
557
|
methods_count = len(
|
|
525
558
|
[
|
|
526
559
|
e
|
|
527
|
-
for e in analysis_result.elements
|
|
560
|
+
for e in (analysis_result.elements if analysis_result else [])
|
|
528
561
|
if e.__class__.__name__ == "Function"
|
|
529
562
|
]
|
|
530
563
|
)
|
|
@@ -8,7 +8,7 @@ that matches the CLI --advanced --statistics output exactly.
|
|
|
8
8
|
|
|
9
9
|
import time
|
|
10
10
|
from pathlib import Path
|
|
11
|
-
from typing import Any
|
|
11
|
+
from typing import Any, cast
|
|
12
12
|
|
|
13
13
|
from ...core.analysis_engine import get_analysis_engine
|
|
14
14
|
from ...language_detector import detect_language_from_file
|
|
@@ -104,8 +104,10 @@ class AnalyzeScaleToolCLICompatible:
|
|
|
104
104
|
# Use AdvancedAnalyzer for comprehensive analysis
|
|
105
105
|
analysis_result = await self.analysis_engine.analyze_file(file_path)
|
|
106
106
|
|
|
107
|
-
|
|
108
|
-
|
|
107
|
+
# Handle potential None result (for testing purposes with mocked engine)
|
|
108
|
+
# This can only happen in tests where the engine is mocked to return None
|
|
109
|
+
# Use cast to tell MyPy this is possible in testing scenarios
|
|
110
|
+
if cast(Any, analysis_result) is None:
|
|
109
111
|
return {
|
|
110
112
|
"file_path": file_path,
|
|
111
113
|
"success": False,
|
|
@@ -129,7 +131,10 @@ class AnalyzeScaleToolCLICompatible:
|
|
|
129
131
|
"file_path": file_path,
|
|
130
132
|
"success": True,
|
|
131
133
|
"package_name": (
|
|
132
|
-
analysis_result.package.name
|
|
134
|
+
analysis_result.package.name
|
|
135
|
+
if analysis_result.package
|
|
136
|
+
and hasattr(analysis_result.package, "name")
|
|
137
|
+
else None
|
|
133
138
|
),
|
|
134
139
|
"element_counts": {
|
|
135
140
|
"imports": len(analysis_result.imports),
|
|
@@ -157,7 +157,7 @@ class TableFormatTool:
|
|
|
157
157
|
# Fallback to original conversion method
|
|
158
158
|
return self._convert_parameters(parameters)
|
|
159
159
|
|
|
160
|
-
def _get_field_modifiers(self, field) -> list:
|
|
160
|
+
def _get_field_modifiers(self, field: Any) -> list[str]:
|
|
161
161
|
"""Extract field modifiers as a list"""
|
|
162
162
|
modifiers = []
|
|
163
163
|
|
|
@@ -172,7 +172,7 @@ class TableFormatTool:
|
|
|
172
172
|
modifiers.append("final")
|
|
173
173
|
return modifiers
|
|
174
174
|
|
|
175
|
-
def _convert_analysis_result_to_dict(self, result) -> dict[str, Any]:
|
|
175
|
+
def _convert_analysis_result_to_dict(self, result: Any) -> dict[str, Any]:
|
|
176
176
|
"""Convert AnalysisResult to dictionary format expected by TableFormatter"""
|
|
177
177
|
# Extract elements by type
|
|
178
178
|
classes = [e for e in result.elements if e.__class__.__name__ == "Class"]
|
|
@@ -9,6 +9,8 @@ Note: Cache and performance monitoring functionality has been moved to
|
|
|
9
9
|
the unified core services for better architecture.
|
|
10
10
|
"""
|
|
11
11
|
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
12
14
|
# Export main utility classes and functions
|
|
13
15
|
from .error_handler import (
|
|
14
16
|
AnalysisError,
|
|
@@ -50,36 +52,36 @@ try:
|
|
|
50
52
|
class BackwardCompatibleCacheManager:
|
|
51
53
|
"""Backward compatible cache manager wrapper"""
|
|
52
54
|
|
|
53
|
-
def __init__(self):
|
|
55
|
+
def __init__(self) -> None:
|
|
54
56
|
self._cache_service = UnifiedCacheService()
|
|
55
57
|
|
|
56
|
-
def clear_all_caches(self):
|
|
58
|
+
def clear_all_caches(self) -> None:
|
|
57
59
|
"""Backward compatibility: clear all caches"""
|
|
58
60
|
return self._cache_service.clear()
|
|
59
61
|
|
|
60
|
-
def get_cache_stats(self):
|
|
62
|
+
def get_cache_stats(self) -> dict[str, Any]:
|
|
61
63
|
"""Backward compatibility: get cache statistics"""
|
|
62
64
|
return self._cache_service.get_stats()
|
|
63
65
|
|
|
64
|
-
def __getattr__(self, name):
|
|
66
|
+
def __getattr__(self, name: str) -> Any:
|
|
65
67
|
"""Delegate other methods to the cache service"""
|
|
66
68
|
return getattr(self._cache_service, name)
|
|
67
69
|
|
|
68
|
-
def get_cache_manager():
|
|
70
|
+
def get_cache_manager() -> Any:
|
|
69
71
|
"""Backward compatibility: Get unified cache service"""
|
|
70
72
|
return BackwardCompatibleCacheManager()
|
|
71
73
|
|
|
72
|
-
def get_performance_monitor():
|
|
74
|
+
def get_performance_monitor() -> Any:
|
|
73
75
|
"""Backward compatibility: Get unified analysis engine for performance monitoring"""
|
|
74
76
|
return UnifiedAnalysisEngine()
|
|
75
77
|
|
|
76
78
|
except ImportError:
|
|
77
79
|
# Fallback if core services are not available
|
|
78
|
-
def get_cache_manager():
|
|
80
|
+
def get_cache_manager() -> Any:
|
|
79
81
|
"""Fallback cache manager"""
|
|
80
82
|
return None
|
|
81
83
|
|
|
82
|
-
def get_performance_monitor():
|
|
84
|
+
def get_performance_monitor() -> Any:
|
|
83
85
|
"""Fallback performance monitor"""
|
|
84
86
|
return None
|
|
85
87
|
|
tree_sitter_analyzer/models.py
CHANGED
|
@@ -378,7 +378,7 @@ class AnalysisResult:
|
|
|
378
378
|
package_info = {"name": str(self.package)}
|
|
379
379
|
|
|
380
380
|
# 安全なアイテム処理ヘルパー関数
|
|
381
|
-
def safe_get_attr(obj, attr, default=""):
|
|
381
|
+
def safe_get_attr(obj: Any, attr: str, default: Any = "") -> Any:
|
|
382
382
|
if hasattr(obj, attr):
|
|
383
383
|
return getattr(obj, attr)
|
|
384
384
|
elif isinstance(obj, dict):
|
|
@@ -159,11 +159,8 @@ class OutputManager:
|
|
|
159
159
|
|
|
160
160
|
def output_queries(self, queries: list[str]) -> None:
|
|
161
161
|
"""Output available queries"""
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
self.query_list(query_dict, "All")
|
|
165
|
-
else:
|
|
166
|
-
self.query_list(queries, "All")
|
|
162
|
+
query_dict = {q: f"Query {q}" for q in queries}
|
|
163
|
+
self.query_list(query_dict, "All")
|
|
167
164
|
|
|
168
165
|
def output_extensions(self, extensions: list[str]) -> None:
|
|
169
166
|
"""Output file extensions"""
|
|
@@ -244,11 +241,8 @@ def output_languages(
|
|
|
244
241
|
|
|
245
242
|
def output_queries(queries: list[str], language: str = "All") -> None:
|
|
246
243
|
"""Output available queries"""
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
_output_manager.query_list(query_dict, language)
|
|
250
|
-
else:
|
|
251
|
-
_output_manager.query_list(queries, language)
|
|
244
|
+
query_dict = {q: f"Query {q}" for q in queries}
|
|
245
|
+
_output_manager.query_list(query_dict, language)
|
|
252
246
|
|
|
253
247
|
|
|
254
248
|
def output_extensions(extensions: list[str]) -> None:
|