tree-sitter-analyzer 1.9.17.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tree_sitter_analyzer/__init__.py +132 -0
- tree_sitter_analyzer/__main__.py +11 -0
- tree_sitter_analyzer/api.py +853 -0
- tree_sitter_analyzer/cli/__init__.py +39 -0
- tree_sitter_analyzer/cli/__main__.py +12 -0
- tree_sitter_analyzer/cli/argument_validator.py +89 -0
- tree_sitter_analyzer/cli/commands/__init__.py +26 -0
- tree_sitter_analyzer/cli/commands/advanced_command.py +226 -0
- tree_sitter_analyzer/cli/commands/base_command.py +181 -0
- tree_sitter_analyzer/cli/commands/default_command.py +18 -0
- tree_sitter_analyzer/cli/commands/find_and_grep_cli.py +188 -0
- tree_sitter_analyzer/cli/commands/list_files_cli.py +133 -0
- tree_sitter_analyzer/cli/commands/partial_read_command.py +139 -0
- tree_sitter_analyzer/cli/commands/query_command.py +109 -0
- tree_sitter_analyzer/cli/commands/search_content_cli.py +161 -0
- tree_sitter_analyzer/cli/commands/structure_command.py +156 -0
- tree_sitter_analyzer/cli/commands/summary_command.py +116 -0
- tree_sitter_analyzer/cli/commands/table_command.py +414 -0
- tree_sitter_analyzer/cli/info_commands.py +124 -0
- tree_sitter_analyzer/cli_main.py +472 -0
- tree_sitter_analyzer/constants.py +85 -0
- tree_sitter_analyzer/core/__init__.py +15 -0
- tree_sitter_analyzer/core/analysis_engine.py +580 -0
- tree_sitter_analyzer/core/cache_service.py +333 -0
- tree_sitter_analyzer/core/engine.py +585 -0
- tree_sitter_analyzer/core/parser.py +293 -0
- tree_sitter_analyzer/core/query.py +605 -0
- tree_sitter_analyzer/core/query_filter.py +200 -0
- tree_sitter_analyzer/core/query_service.py +340 -0
- tree_sitter_analyzer/encoding_utils.py +530 -0
- tree_sitter_analyzer/exceptions.py +747 -0
- tree_sitter_analyzer/file_handler.py +246 -0
- tree_sitter_analyzer/formatters/__init__.py +1 -0
- tree_sitter_analyzer/formatters/base_formatter.py +201 -0
- tree_sitter_analyzer/formatters/csharp_formatter.py +367 -0
- tree_sitter_analyzer/formatters/formatter_config.py +197 -0
- tree_sitter_analyzer/formatters/formatter_factory.py +84 -0
- tree_sitter_analyzer/formatters/formatter_registry.py +377 -0
- tree_sitter_analyzer/formatters/formatter_selector.py +96 -0
- tree_sitter_analyzer/formatters/go_formatter.py +368 -0
- tree_sitter_analyzer/formatters/html_formatter.py +498 -0
- tree_sitter_analyzer/formatters/java_formatter.py +423 -0
- tree_sitter_analyzer/formatters/javascript_formatter.py +611 -0
- tree_sitter_analyzer/formatters/kotlin_formatter.py +268 -0
- tree_sitter_analyzer/formatters/language_formatter_factory.py +123 -0
- tree_sitter_analyzer/formatters/legacy_formatter_adapters.py +228 -0
- tree_sitter_analyzer/formatters/markdown_formatter.py +725 -0
- tree_sitter_analyzer/formatters/php_formatter.py +301 -0
- tree_sitter_analyzer/formatters/python_formatter.py +830 -0
- tree_sitter_analyzer/formatters/ruby_formatter.py +278 -0
- tree_sitter_analyzer/formatters/rust_formatter.py +233 -0
- tree_sitter_analyzer/formatters/sql_formatter_wrapper.py +689 -0
- tree_sitter_analyzer/formatters/sql_formatters.py +536 -0
- tree_sitter_analyzer/formatters/typescript_formatter.py +543 -0
- tree_sitter_analyzer/formatters/yaml_formatter.py +462 -0
- tree_sitter_analyzer/interfaces/__init__.py +9 -0
- tree_sitter_analyzer/interfaces/cli.py +535 -0
- tree_sitter_analyzer/interfaces/cli_adapter.py +359 -0
- tree_sitter_analyzer/interfaces/mcp_adapter.py +224 -0
- tree_sitter_analyzer/interfaces/mcp_server.py +428 -0
- tree_sitter_analyzer/language_detector.py +553 -0
- tree_sitter_analyzer/language_loader.py +271 -0
- tree_sitter_analyzer/languages/__init__.py +10 -0
- tree_sitter_analyzer/languages/csharp_plugin.py +1076 -0
- tree_sitter_analyzer/languages/css_plugin.py +449 -0
- tree_sitter_analyzer/languages/go_plugin.py +836 -0
- tree_sitter_analyzer/languages/html_plugin.py +496 -0
- tree_sitter_analyzer/languages/java_plugin.py +1299 -0
- tree_sitter_analyzer/languages/javascript_plugin.py +1622 -0
- tree_sitter_analyzer/languages/kotlin_plugin.py +656 -0
- tree_sitter_analyzer/languages/markdown_plugin.py +1928 -0
- tree_sitter_analyzer/languages/php_plugin.py +862 -0
- tree_sitter_analyzer/languages/python_plugin.py +1636 -0
- tree_sitter_analyzer/languages/ruby_plugin.py +757 -0
- tree_sitter_analyzer/languages/rust_plugin.py +673 -0
- tree_sitter_analyzer/languages/sql_plugin.py +2444 -0
- tree_sitter_analyzer/languages/typescript_plugin.py +1892 -0
- tree_sitter_analyzer/languages/yaml_plugin.py +695 -0
- tree_sitter_analyzer/legacy_table_formatter.py +860 -0
- tree_sitter_analyzer/mcp/__init__.py +34 -0
- tree_sitter_analyzer/mcp/resources/__init__.py +43 -0
- tree_sitter_analyzer/mcp/resources/code_file_resource.py +208 -0
- tree_sitter_analyzer/mcp/resources/project_stats_resource.py +586 -0
- tree_sitter_analyzer/mcp/server.py +869 -0
- tree_sitter_analyzer/mcp/tools/__init__.py +28 -0
- tree_sitter_analyzer/mcp/tools/analyze_scale_tool.py +779 -0
- tree_sitter_analyzer/mcp/tools/analyze_scale_tool_cli_compatible.py +291 -0
- tree_sitter_analyzer/mcp/tools/base_tool.py +139 -0
- tree_sitter_analyzer/mcp/tools/fd_rg_utils.py +816 -0
- tree_sitter_analyzer/mcp/tools/find_and_grep_tool.py +686 -0
- tree_sitter_analyzer/mcp/tools/list_files_tool.py +413 -0
- tree_sitter_analyzer/mcp/tools/output_format_validator.py +148 -0
- tree_sitter_analyzer/mcp/tools/query_tool.py +443 -0
- tree_sitter_analyzer/mcp/tools/read_partial_tool.py +464 -0
- tree_sitter_analyzer/mcp/tools/search_content_tool.py +836 -0
- tree_sitter_analyzer/mcp/tools/table_format_tool.py +572 -0
- tree_sitter_analyzer/mcp/tools/universal_analyze_tool.py +653 -0
- tree_sitter_analyzer/mcp/utils/__init__.py +113 -0
- tree_sitter_analyzer/mcp/utils/error_handler.py +569 -0
- tree_sitter_analyzer/mcp/utils/file_output_factory.py +217 -0
- tree_sitter_analyzer/mcp/utils/file_output_manager.py +322 -0
- tree_sitter_analyzer/mcp/utils/gitignore_detector.py +358 -0
- tree_sitter_analyzer/mcp/utils/path_resolver.py +414 -0
- tree_sitter_analyzer/mcp/utils/search_cache.py +343 -0
- tree_sitter_analyzer/models.py +840 -0
- tree_sitter_analyzer/mypy_current_errors.txt +2 -0
- tree_sitter_analyzer/output_manager.py +255 -0
- tree_sitter_analyzer/platform_compat/__init__.py +3 -0
- tree_sitter_analyzer/platform_compat/adapter.py +324 -0
- tree_sitter_analyzer/platform_compat/compare.py +224 -0
- tree_sitter_analyzer/platform_compat/detector.py +67 -0
- tree_sitter_analyzer/platform_compat/fixtures.py +228 -0
- tree_sitter_analyzer/platform_compat/profiles.py +217 -0
- tree_sitter_analyzer/platform_compat/record.py +55 -0
- tree_sitter_analyzer/platform_compat/recorder.py +155 -0
- tree_sitter_analyzer/platform_compat/report.py +92 -0
- tree_sitter_analyzer/plugins/__init__.py +280 -0
- tree_sitter_analyzer/plugins/base.py +647 -0
- tree_sitter_analyzer/plugins/manager.py +384 -0
- tree_sitter_analyzer/project_detector.py +328 -0
- tree_sitter_analyzer/queries/__init__.py +27 -0
- tree_sitter_analyzer/queries/csharp.py +216 -0
- tree_sitter_analyzer/queries/css.py +615 -0
- tree_sitter_analyzer/queries/go.py +275 -0
- tree_sitter_analyzer/queries/html.py +543 -0
- tree_sitter_analyzer/queries/java.py +402 -0
- tree_sitter_analyzer/queries/javascript.py +724 -0
- tree_sitter_analyzer/queries/kotlin.py +192 -0
- tree_sitter_analyzer/queries/markdown.py +258 -0
- tree_sitter_analyzer/queries/php.py +95 -0
- tree_sitter_analyzer/queries/python.py +859 -0
- tree_sitter_analyzer/queries/ruby.py +92 -0
- tree_sitter_analyzer/queries/rust.py +223 -0
- tree_sitter_analyzer/queries/sql.py +555 -0
- tree_sitter_analyzer/queries/typescript.py +871 -0
- tree_sitter_analyzer/queries/yaml.py +236 -0
- tree_sitter_analyzer/query_loader.py +272 -0
- tree_sitter_analyzer/security/__init__.py +22 -0
- tree_sitter_analyzer/security/boundary_manager.py +277 -0
- tree_sitter_analyzer/security/regex_checker.py +297 -0
- tree_sitter_analyzer/security/validator.py +599 -0
- tree_sitter_analyzer/table_formatter.py +782 -0
- tree_sitter_analyzer/utils/__init__.py +53 -0
- tree_sitter_analyzer/utils/logging.py +433 -0
- tree_sitter_analyzer/utils/tree_sitter_compat.py +289 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/METADATA +485 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/RECORD +149 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/WHEEL +4 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/entry_points.txt +25 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Standalone CLI for search_content (ripgrep wrapper)
|
|
4
|
+
|
|
5
|
+
Maps CLI flags to the MCP SearchContentTool and prints JSON/text.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import argparse
|
|
11
|
+
import asyncio
|
|
12
|
+
import sys
|
|
13
|
+
from typing import Any
|
|
14
|
+
|
|
15
|
+
from ...mcp.tools.search_content_tool import SearchContentTool
|
|
16
|
+
from ...output_manager import output_data, output_error, set_output_mode
|
|
17
|
+
from ...project_detector import detect_project_root
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _build_parser() -> argparse.ArgumentParser:
|
|
21
|
+
parser = argparse.ArgumentParser(
|
|
22
|
+
description="Search text content in files using ripgrep via MCP wrapper.",
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
roots_or_files = parser.add_mutually_exclusive_group(required=True)
|
|
26
|
+
roots_or_files.add_argument(
|
|
27
|
+
"--roots",
|
|
28
|
+
nargs="+",
|
|
29
|
+
help="Directory roots to search recursively",
|
|
30
|
+
)
|
|
31
|
+
roots_or_files.add_argument(
|
|
32
|
+
"--files",
|
|
33
|
+
nargs="+",
|
|
34
|
+
help="Explicit file list to search",
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
parser.add_argument("--query", required=True, help="Search pattern")
|
|
38
|
+
|
|
39
|
+
# Output
|
|
40
|
+
parser.add_argument(
|
|
41
|
+
"--output-format",
|
|
42
|
+
choices=["json", "text"],
|
|
43
|
+
default="json",
|
|
44
|
+
help="Output format (default: json)",
|
|
45
|
+
)
|
|
46
|
+
parser.add_argument(
|
|
47
|
+
"--quiet",
|
|
48
|
+
action="store_true",
|
|
49
|
+
help="Suppress non-essential output",
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# rg options
|
|
53
|
+
parser.add_argument(
|
|
54
|
+
"--case", choices=["smart", "insensitive", "sensitive"], default="smart"
|
|
55
|
+
)
|
|
56
|
+
parser.add_argument("--fixed-strings", action="store_true")
|
|
57
|
+
parser.add_argument("--word", action="store_true")
|
|
58
|
+
parser.add_argument("--multiline", action="store_true")
|
|
59
|
+
parser.add_argument("--include-globs", nargs="+")
|
|
60
|
+
parser.add_argument("--exclude-globs", nargs="+")
|
|
61
|
+
parser.add_argument("--follow-symlinks", action="store_true")
|
|
62
|
+
parser.add_argument("--hidden", action="store_true")
|
|
63
|
+
parser.add_argument("--no-ignore", action="store_true")
|
|
64
|
+
parser.add_argument("--max-filesize")
|
|
65
|
+
parser.add_argument("--context-before", type=int)
|
|
66
|
+
parser.add_argument("--context-after", type=int)
|
|
67
|
+
parser.add_argument("--encoding")
|
|
68
|
+
parser.add_argument("--max-count", type=int)
|
|
69
|
+
parser.add_argument("--timeout-ms", type=int)
|
|
70
|
+
parser.add_argument("--count-only-matches", action="store_true")
|
|
71
|
+
parser.add_argument("--summary-only", action="store_true")
|
|
72
|
+
parser.add_argument("--optimize-paths", action="store_true")
|
|
73
|
+
parser.add_argument("--group-by-file", action="store_true")
|
|
74
|
+
parser.add_argument("--total-only", action="store_true")
|
|
75
|
+
|
|
76
|
+
# project root
|
|
77
|
+
parser.add_argument(
|
|
78
|
+
"--project-root",
|
|
79
|
+
help="Project root directory for security boundary (auto-detected if omitted)",
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
return parser
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
async def _run(args: argparse.Namespace) -> int:
|
|
86
|
+
set_output_mode(quiet=bool(args.quiet), json_output=(args.output_format == "json"))
|
|
87
|
+
|
|
88
|
+
project_root = detect_project_root(None, args.project_root)
|
|
89
|
+
tool = SearchContentTool(project_root)
|
|
90
|
+
|
|
91
|
+
payload: dict[str, Any] = {
|
|
92
|
+
"query": args.query,
|
|
93
|
+
}
|
|
94
|
+
if args.roots:
|
|
95
|
+
payload["roots"] = list(args.roots)
|
|
96
|
+
if args.files:
|
|
97
|
+
payload["files"] = list(args.files)
|
|
98
|
+
|
|
99
|
+
# Options mapping
|
|
100
|
+
if args.case:
|
|
101
|
+
payload["case"] = args.case
|
|
102
|
+
if args.fixed_strings:
|
|
103
|
+
payload["fixed_strings"] = True
|
|
104
|
+
if args.word:
|
|
105
|
+
payload["word"] = True
|
|
106
|
+
if args.multiline:
|
|
107
|
+
payload["multiline"] = True
|
|
108
|
+
if args.include_globs:
|
|
109
|
+
payload["include_globs"] = args.include_globs
|
|
110
|
+
if args.exclude_globs:
|
|
111
|
+
payload["exclude_globs"] = args.exclude_globs
|
|
112
|
+
if args.follow_symlinks:
|
|
113
|
+
payload["follow_symlinks"] = True
|
|
114
|
+
if args.hidden:
|
|
115
|
+
payload["hidden"] = True
|
|
116
|
+
if args.no_ignore:
|
|
117
|
+
payload["no_ignore"] = True
|
|
118
|
+
if args.max_filesize:
|
|
119
|
+
payload["max_filesize"] = args.max_filesize
|
|
120
|
+
if args.context_before is not None:
|
|
121
|
+
payload["context_before"] = int(args.context_before)
|
|
122
|
+
if args.context_after is not None:
|
|
123
|
+
payload["context_after"] = int(args.context_after)
|
|
124
|
+
if args.encoding:
|
|
125
|
+
payload["encoding"] = args.encoding
|
|
126
|
+
if args.max_count is not None:
|
|
127
|
+
payload["max_count"] = int(args.max_count)
|
|
128
|
+
if args.timeout_ms is not None:
|
|
129
|
+
payload["timeout_ms"] = int(args.timeout_ms)
|
|
130
|
+
if args.count_only_matches:
|
|
131
|
+
payload["count_only_matches"] = True
|
|
132
|
+
if args.summary_only:
|
|
133
|
+
payload["summary_only"] = True
|
|
134
|
+
if args.optimize_paths:
|
|
135
|
+
payload["optimize_paths"] = True
|
|
136
|
+
if args.group_by_file:
|
|
137
|
+
payload["group_by_file"] = True
|
|
138
|
+
if args.total_only:
|
|
139
|
+
payload["total_only"] = True
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
result = await tool.execute(payload)
|
|
143
|
+
output_data(result, args.output_format)
|
|
144
|
+
return 0 if (isinstance(result, dict) or isinstance(result, int)) else 0
|
|
145
|
+
except Exception as e:
|
|
146
|
+
output_error(str(e))
|
|
147
|
+
return 1
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def main() -> None:
|
|
151
|
+
parser = _build_parser()
|
|
152
|
+
args = parser.parse_args()
|
|
153
|
+
try:
|
|
154
|
+
rc = asyncio.run(_run(args))
|
|
155
|
+
except KeyboardInterrupt:
|
|
156
|
+
rc = 1
|
|
157
|
+
sys.exit(rc)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
if __name__ == "__main__":
|
|
161
|
+
main()
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Structure Command
|
|
4
|
+
|
|
5
|
+
Handles structure analysis functionality with appropriate Japanese output.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import TYPE_CHECKING
|
|
9
|
+
|
|
10
|
+
from ...constants import (
|
|
11
|
+
ELEMENT_TYPE_CLASS,
|
|
12
|
+
ELEMENT_TYPE_FUNCTION,
|
|
13
|
+
ELEMENT_TYPE_IMPORT,
|
|
14
|
+
ELEMENT_TYPE_PACKAGE,
|
|
15
|
+
ELEMENT_TYPE_VARIABLE,
|
|
16
|
+
is_element_of_type,
|
|
17
|
+
)
|
|
18
|
+
from ...output_manager import output_data, output_json, output_section
|
|
19
|
+
from .base_command import BaseCommand
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from ...models import AnalysisResult
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class StructureCommand(BaseCommand):
|
|
26
|
+
"""Command for structure analysis with Japanese output."""
|
|
27
|
+
|
|
28
|
+
async def execute_async(self, language: str) -> int:
|
|
29
|
+
analysis_result = await self.analyze_file(language)
|
|
30
|
+
if not analysis_result:
|
|
31
|
+
return 1
|
|
32
|
+
|
|
33
|
+
self._output_structure_analysis(analysis_result)
|
|
34
|
+
return 0
|
|
35
|
+
|
|
36
|
+
def _output_structure_analysis(self, analysis_result: "AnalysisResult") -> None:
|
|
37
|
+
"""Output structure analysis results with appropriate header."""
|
|
38
|
+
output_section("Structure Analysis Results")
|
|
39
|
+
|
|
40
|
+
# Convert to legacy structure format expected by tests
|
|
41
|
+
structure_dict = self._convert_to_legacy_format(analysis_result)
|
|
42
|
+
|
|
43
|
+
if self.args.output_format == "json":
|
|
44
|
+
output_json(structure_dict)
|
|
45
|
+
else:
|
|
46
|
+
self._output_text_format(structure_dict)
|
|
47
|
+
|
|
48
|
+
def _convert_to_legacy_format(self, analysis_result: "AnalysisResult") -> dict:
|
|
49
|
+
"""Convert AnalysisResult to legacy structure format expected by tests."""
|
|
50
|
+
import time
|
|
51
|
+
|
|
52
|
+
# Extract elements by type
|
|
53
|
+
classes = [
|
|
54
|
+
e
|
|
55
|
+
for e in analysis_result.elements
|
|
56
|
+
if is_element_of_type(e, ELEMENT_TYPE_CLASS)
|
|
57
|
+
]
|
|
58
|
+
methods = [
|
|
59
|
+
e
|
|
60
|
+
for e in analysis_result.elements
|
|
61
|
+
if is_element_of_type(e, ELEMENT_TYPE_FUNCTION)
|
|
62
|
+
]
|
|
63
|
+
fields = [
|
|
64
|
+
e
|
|
65
|
+
for e in analysis_result.elements
|
|
66
|
+
if is_element_of_type(e, ELEMENT_TYPE_VARIABLE)
|
|
67
|
+
]
|
|
68
|
+
imports = [
|
|
69
|
+
e
|
|
70
|
+
for e in analysis_result.elements
|
|
71
|
+
if is_element_of_type(e, ELEMENT_TYPE_IMPORT)
|
|
72
|
+
]
|
|
73
|
+
packages = [
|
|
74
|
+
e
|
|
75
|
+
for e in analysis_result.elements
|
|
76
|
+
if is_element_of_type(e, ELEMENT_TYPE_PACKAGE)
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
"file_path": analysis_result.file_path,
|
|
81
|
+
"language": analysis_result.language,
|
|
82
|
+
"package": (
|
|
83
|
+
{
|
|
84
|
+
"name": packages[0].name,
|
|
85
|
+
"line_range": {
|
|
86
|
+
"start": packages[0].start_line,
|
|
87
|
+
"end": packages[0].end_line,
|
|
88
|
+
},
|
|
89
|
+
}
|
|
90
|
+
if packages
|
|
91
|
+
else None
|
|
92
|
+
),
|
|
93
|
+
"classes": [{"name": getattr(c, "name", "unknown")} for c in classes],
|
|
94
|
+
"methods": [{"name": getattr(m, "name", "unknown")} for m in methods],
|
|
95
|
+
"fields": [{"name": getattr(f, "name", "unknown")} for f in fields],
|
|
96
|
+
"imports": [
|
|
97
|
+
{
|
|
98
|
+
"name": getattr(i, "name", "unknown"),
|
|
99
|
+
"is_static": getattr(i, "is_static", False),
|
|
100
|
+
"is_wildcard": getattr(i, "is_wildcard", False),
|
|
101
|
+
"statement": getattr(i, "import_statement", ""),
|
|
102
|
+
"line_range": {
|
|
103
|
+
"start": getattr(i, "start_line", 0),
|
|
104
|
+
"end": getattr(i, "end_line", 0),
|
|
105
|
+
},
|
|
106
|
+
}
|
|
107
|
+
for i in imports
|
|
108
|
+
],
|
|
109
|
+
"annotations": [],
|
|
110
|
+
"statistics": {
|
|
111
|
+
"class_count": len(classes),
|
|
112
|
+
"method_count": len(methods),
|
|
113
|
+
"field_count": len(fields),
|
|
114
|
+
"import_count": len(imports),
|
|
115
|
+
"total_lines": analysis_result.line_count,
|
|
116
|
+
"annotation_count": 0,
|
|
117
|
+
},
|
|
118
|
+
"analysis_metadata": {
|
|
119
|
+
"analysis_time": getattr(analysis_result, "analysis_time", 0.0),
|
|
120
|
+
"language": analysis_result.language,
|
|
121
|
+
"file_path": analysis_result.file_path,
|
|
122
|
+
"analyzer_version": "2.0.0",
|
|
123
|
+
"timestamp": time.time(),
|
|
124
|
+
},
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
def _output_text_format(self, structure_dict: dict) -> None:
|
|
128
|
+
"""Output structure analysis in human-readable text format."""
|
|
129
|
+
output_data(f"File: {structure_dict['file_path']}")
|
|
130
|
+
output_data(f"Language: {structure_dict['language']}")
|
|
131
|
+
|
|
132
|
+
if structure_dict["package"]:
|
|
133
|
+
output_data(f"Package: {structure_dict['package']['name']}")
|
|
134
|
+
|
|
135
|
+
stats = structure_dict["statistics"]
|
|
136
|
+
output_data("Statistics:")
|
|
137
|
+
output_data(f" Classes: {stats['class_count']}")
|
|
138
|
+
output_data(f" Methods: {stats['method_count']}")
|
|
139
|
+
output_data(f" Fields: {stats['field_count']}")
|
|
140
|
+
output_data(f" Imports: {stats['import_count']}")
|
|
141
|
+
output_data(f" Total lines: {stats['total_lines']}")
|
|
142
|
+
|
|
143
|
+
if structure_dict["classes"]:
|
|
144
|
+
output_data("Classes:")
|
|
145
|
+
for cls in structure_dict["classes"]:
|
|
146
|
+
output_data(f" - {cls['name']}")
|
|
147
|
+
|
|
148
|
+
if structure_dict["methods"]:
|
|
149
|
+
output_data("Methods:")
|
|
150
|
+
for method in structure_dict["methods"]:
|
|
151
|
+
output_data(f" - {method['name']}")
|
|
152
|
+
|
|
153
|
+
if structure_dict["fields"]:
|
|
154
|
+
output_data("Fields:")
|
|
155
|
+
for field in structure_dict["fields"]:
|
|
156
|
+
output_data(f" - {field['name']}")
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Summary Command
|
|
4
|
+
|
|
5
|
+
Handles summary functionality with specified element types.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
from ...constants import (
|
|
11
|
+
ELEMENT_TYPE_CLASS,
|
|
12
|
+
ELEMENT_TYPE_FUNCTION,
|
|
13
|
+
ELEMENT_TYPE_IMPORT,
|
|
14
|
+
ELEMENT_TYPE_VARIABLE,
|
|
15
|
+
is_element_of_type,
|
|
16
|
+
)
|
|
17
|
+
from ...output_manager import output_data, output_json, output_section
|
|
18
|
+
from .base_command import BaseCommand
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from ...models import AnalysisResult
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SummaryCommand(BaseCommand):
|
|
25
|
+
"""Command for summary analysis with specified element types."""
|
|
26
|
+
|
|
27
|
+
async def execute_async(self, language: str) -> int:
|
|
28
|
+
analysis_result = await self.analyze_file(language)
|
|
29
|
+
if not analysis_result:
|
|
30
|
+
return 1
|
|
31
|
+
|
|
32
|
+
self._output_summary_analysis(analysis_result)
|
|
33
|
+
return 0
|
|
34
|
+
|
|
35
|
+
def _output_summary_analysis(self, analysis_result: "AnalysisResult") -> None:
|
|
36
|
+
"""Output summary analysis results."""
|
|
37
|
+
output_section("Summary Results")
|
|
38
|
+
|
|
39
|
+
# Get summary types from args (default: classes,methods)
|
|
40
|
+
summary_types = getattr(self.args, "summary", "classes,methods")
|
|
41
|
+
if summary_types:
|
|
42
|
+
requested_types = [t.strip() for t in summary_types.split(",")]
|
|
43
|
+
else:
|
|
44
|
+
requested_types = ["classes", "methods"]
|
|
45
|
+
|
|
46
|
+
# Extract elements by type
|
|
47
|
+
classes = [
|
|
48
|
+
e
|
|
49
|
+
for e in analysis_result.elements
|
|
50
|
+
if is_element_of_type(e, ELEMENT_TYPE_CLASS)
|
|
51
|
+
]
|
|
52
|
+
methods = [
|
|
53
|
+
e
|
|
54
|
+
for e in analysis_result.elements
|
|
55
|
+
if is_element_of_type(e, ELEMENT_TYPE_FUNCTION)
|
|
56
|
+
]
|
|
57
|
+
fields = [
|
|
58
|
+
e
|
|
59
|
+
for e in analysis_result.elements
|
|
60
|
+
if is_element_of_type(e, ELEMENT_TYPE_VARIABLE)
|
|
61
|
+
]
|
|
62
|
+
imports = [
|
|
63
|
+
e
|
|
64
|
+
for e in analysis_result.elements
|
|
65
|
+
if is_element_of_type(e, ELEMENT_TYPE_IMPORT)
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
summary_data: dict[str, Any] = {
|
|
69
|
+
"file_path": analysis_result.file_path,
|
|
70
|
+
"language": analysis_result.language,
|
|
71
|
+
"summary": {},
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if "classes" in requested_types:
|
|
75
|
+
summary_data["summary"]["classes"] = [
|
|
76
|
+
{"name": getattr(c, "name", "unknown")} for c in classes
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
if "methods" in requested_types:
|
|
80
|
+
summary_data["summary"]["methods"] = [
|
|
81
|
+
{"name": getattr(m, "name", "unknown")} for m in methods
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
if "fields" in requested_types:
|
|
85
|
+
summary_data["summary"]["fields"] = [
|
|
86
|
+
{"name": getattr(f, "name", "unknown")} for f in fields
|
|
87
|
+
]
|
|
88
|
+
|
|
89
|
+
if "imports" in requested_types:
|
|
90
|
+
summary_data["summary"]["imports"] = [
|
|
91
|
+
{"name": getattr(i, "name", "unknown")} for i in imports
|
|
92
|
+
]
|
|
93
|
+
|
|
94
|
+
if self.args.output_format == "json":
|
|
95
|
+
output_json(summary_data)
|
|
96
|
+
else:
|
|
97
|
+
self._output_text_format(summary_data, requested_types)
|
|
98
|
+
|
|
99
|
+
def _output_text_format(self, summary_data: dict, requested_types: list) -> None:
|
|
100
|
+
"""Output summary in human-readable text format."""
|
|
101
|
+
output_data(f"File: {summary_data['file_path']}")
|
|
102
|
+
output_data(f"Language: {summary_data['language']}")
|
|
103
|
+
|
|
104
|
+
for element_type in requested_types:
|
|
105
|
+
if element_type in summary_data["summary"]:
|
|
106
|
+
elements = summary_data["summary"][element_type]
|
|
107
|
+
type_name_map = {
|
|
108
|
+
"classes": "Classes",
|
|
109
|
+
"methods": "Methods",
|
|
110
|
+
"fields": "Fields",
|
|
111
|
+
"imports": "Imports",
|
|
112
|
+
}
|
|
113
|
+
type_name = type_name_map.get(element_type, element_type)
|
|
114
|
+
output_data(f"\n{type_name} ({len(elements)} items):")
|
|
115
|
+
for element in elements:
|
|
116
|
+
output_data(f" - {element['name']}")
|