tree-sitter-analyzer 1.9.17.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tree_sitter_analyzer/__init__.py +132 -0
- tree_sitter_analyzer/__main__.py +11 -0
- tree_sitter_analyzer/api.py +853 -0
- tree_sitter_analyzer/cli/__init__.py +39 -0
- tree_sitter_analyzer/cli/__main__.py +12 -0
- tree_sitter_analyzer/cli/argument_validator.py +89 -0
- tree_sitter_analyzer/cli/commands/__init__.py +26 -0
- tree_sitter_analyzer/cli/commands/advanced_command.py +226 -0
- tree_sitter_analyzer/cli/commands/base_command.py +181 -0
- tree_sitter_analyzer/cli/commands/default_command.py +18 -0
- tree_sitter_analyzer/cli/commands/find_and_grep_cli.py +188 -0
- tree_sitter_analyzer/cli/commands/list_files_cli.py +133 -0
- tree_sitter_analyzer/cli/commands/partial_read_command.py +139 -0
- tree_sitter_analyzer/cli/commands/query_command.py +109 -0
- tree_sitter_analyzer/cli/commands/search_content_cli.py +161 -0
- tree_sitter_analyzer/cli/commands/structure_command.py +156 -0
- tree_sitter_analyzer/cli/commands/summary_command.py +116 -0
- tree_sitter_analyzer/cli/commands/table_command.py +414 -0
- tree_sitter_analyzer/cli/info_commands.py +124 -0
- tree_sitter_analyzer/cli_main.py +472 -0
- tree_sitter_analyzer/constants.py +85 -0
- tree_sitter_analyzer/core/__init__.py +15 -0
- tree_sitter_analyzer/core/analysis_engine.py +580 -0
- tree_sitter_analyzer/core/cache_service.py +333 -0
- tree_sitter_analyzer/core/engine.py +585 -0
- tree_sitter_analyzer/core/parser.py +293 -0
- tree_sitter_analyzer/core/query.py +605 -0
- tree_sitter_analyzer/core/query_filter.py +200 -0
- tree_sitter_analyzer/core/query_service.py +340 -0
- tree_sitter_analyzer/encoding_utils.py +530 -0
- tree_sitter_analyzer/exceptions.py +747 -0
- tree_sitter_analyzer/file_handler.py +246 -0
- tree_sitter_analyzer/formatters/__init__.py +1 -0
- tree_sitter_analyzer/formatters/base_formatter.py +201 -0
- tree_sitter_analyzer/formatters/csharp_formatter.py +367 -0
- tree_sitter_analyzer/formatters/formatter_config.py +197 -0
- tree_sitter_analyzer/formatters/formatter_factory.py +84 -0
- tree_sitter_analyzer/formatters/formatter_registry.py +377 -0
- tree_sitter_analyzer/formatters/formatter_selector.py +96 -0
- tree_sitter_analyzer/formatters/go_formatter.py +368 -0
- tree_sitter_analyzer/formatters/html_formatter.py +498 -0
- tree_sitter_analyzer/formatters/java_formatter.py +423 -0
- tree_sitter_analyzer/formatters/javascript_formatter.py +611 -0
- tree_sitter_analyzer/formatters/kotlin_formatter.py +268 -0
- tree_sitter_analyzer/formatters/language_formatter_factory.py +123 -0
- tree_sitter_analyzer/formatters/legacy_formatter_adapters.py +228 -0
- tree_sitter_analyzer/formatters/markdown_formatter.py +725 -0
- tree_sitter_analyzer/formatters/php_formatter.py +301 -0
- tree_sitter_analyzer/formatters/python_formatter.py +830 -0
- tree_sitter_analyzer/formatters/ruby_formatter.py +278 -0
- tree_sitter_analyzer/formatters/rust_formatter.py +233 -0
- tree_sitter_analyzer/formatters/sql_formatter_wrapper.py +689 -0
- tree_sitter_analyzer/formatters/sql_formatters.py +536 -0
- tree_sitter_analyzer/formatters/typescript_formatter.py +543 -0
- tree_sitter_analyzer/formatters/yaml_formatter.py +462 -0
- tree_sitter_analyzer/interfaces/__init__.py +9 -0
- tree_sitter_analyzer/interfaces/cli.py +535 -0
- tree_sitter_analyzer/interfaces/cli_adapter.py +359 -0
- tree_sitter_analyzer/interfaces/mcp_adapter.py +224 -0
- tree_sitter_analyzer/interfaces/mcp_server.py +428 -0
- tree_sitter_analyzer/language_detector.py +553 -0
- tree_sitter_analyzer/language_loader.py +271 -0
- tree_sitter_analyzer/languages/__init__.py +10 -0
- tree_sitter_analyzer/languages/csharp_plugin.py +1076 -0
- tree_sitter_analyzer/languages/css_plugin.py +449 -0
- tree_sitter_analyzer/languages/go_plugin.py +836 -0
- tree_sitter_analyzer/languages/html_plugin.py +496 -0
- tree_sitter_analyzer/languages/java_plugin.py +1299 -0
- tree_sitter_analyzer/languages/javascript_plugin.py +1622 -0
- tree_sitter_analyzer/languages/kotlin_plugin.py +656 -0
- tree_sitter_analyzer/languages/markdown_plugin.py +1928 -0
- tree_sitter_analyzer/languages/php_plugin.py +862 -0
- tree_sitter_analyzer/languages/python_plugin.py +1636 -0
- tree_sitter_analyzer/languages/ruby_plugin.py +757 -0
- tree_sitter_analyzer/languages/rust_plugin.py +673 -0
- tree_sitter_analyzer/languages/sql_plugin.py +2444 -0
- tree_sitter_analyzer/languages/typescript_plugin.py +1892 -0
- tree_sitter_analyzer/languages/yaml_plugin.py +695 -0
- tree_sitter_analyzer/legacy_table_formatter.py +860 -0
- tree_sitter_analyzer/mcp/__init__.py +34 -0
- tree_sitter_analyzer/mcp/resources/__init__.py +43 -0
- tree_sitter_analyzer/mcp/resources/code_file_resource.py +208 -0
- tree_sitter_analyzer/mcp/resources/project_stats_resource.py +586 -0
- tree_sitter_analyzer/mcp/server.py +869 -0
- tree_sitter_analyzer/mcp/tools/__init__.py +28 -0
- tree_sitter_analyzer/mcp/tools/analyze_scale_tool.py +779 -0
- tree_sitter_analyzer/mcp/tools/analyze_scale_tool_cli_compatible.py +291 -0
- tree_sitter_analyzer/mcp/tools/base_tool.py +139 -0
- tree_sitter_analyzer/mcp/tools/fd_rg_utils.py +816 -0
- tree_sitter_analyzer/mcp/tools/find_and_grep_tool.py +686 -0
- tree_sitter_analyzer/mcp/tools/list_files_tool.py +413 -0
- tree_sitter_analyzer/mcp/tools/output_format_validator.py +148 -0
- tree_sitter_analyzer/mcp/tools/query_tool.py +443 -0
- tree_sitter_analyzer/mcp/tools/read_partial_tool.py +464 -0
- tree_sitter_analyzer/mcp/tools/search_content_tool.py +836 -0
- tree_sitter_analyzer/mcp/tools/table_format_tool.py +572 -0
- tree_sitter_analyzer/mcp/tools/universal_analyze_tool.py +653 -0
- tree_sitter_analyzer/mcp/utils/__init__.py +113 -0
- tree_sitter_analyzer/mcp/utils/error_handler.py +569 -0
- tree_sitter_analyzer/mcp/utils/file_output_factory.py +217 -0
- tree_sitter_analyzer/mcp/utils/file_output_manager.py +322 -0
- tree_sitter_analyzer/mcp/utils/gitignore_detector.py +358 -0
- tree_sitter_analyzer/mcp/utils/path_resolver.py +414 -0
- tree_sitter_analyzer/mcp/utils/search_cache.py +343 -0
- tree_sitter_analyzer/models.py +840 -0
- tree_sitter_analyzer/mypy_current_errors.txt +2 -0
- tree_sitter_analyzer/output_manager.py +255 -0
- tree_sitter_analyzer/platform_compat/__init__.py +3 -0
- tree_sitter_analyzer/platform_compat/adapter.py +324 -0
- tree_sitter_analyzer/platform_compat/compare.py +224 -0
- tree_sitter_analyzer/platform_compat/detector.py +67 -0
- tree_sitter_analyzer/platform_compat/fixtures.py +228 -0
- tree_sitter_analyzer/platform_compat/profiles.py +217 -0
- tree_sitter_analyzer/platform_compat/record.py +55 -0
- tree_sitter_analyzer/platform_compat/recorder.py +155 -0
- tree_sitter_analyzer/platform_compat/report.py +92 -0
- tree_sitter_analyzer/plugins/__init__.py +280 -0
- tree_sitter_analyzer/plugins/base.py +647 -0
- tree_sitter_analyzer/plugins/manager.py +384 -0
- tree_sitter_analyzer/project_detector.py +328 -0
- tree_sitter_analyzer/queries/__init__.py +27 -0
- tree_sitter_analyzer/queries/csharp.py +216 -0
- tree_sitter_analyzer/queries/css.py +615 -0
- tree_sitter_analyzer/queries/go.py +275 -0
- tree_sitter_analyzer/queries/html.py +543 -0
- tree_sitter_analyzer/queries/java.py +402 -0
- tree_sitter_analyzer/queries/javascript.py +724 -0
- tree_sitter_analyzer/queries/kotlin.py +192 -0
- tree_sitter_analyzer/queries/markdown.py +258 -0
- tree_sitter_analyzer/queries/php.py +95 -0
- tree_sitter_analyzer/queries/python.py +859 -0
- tree_sitter_analyzer/queries/ruby.py +92 -0
- tree_sitter_analyzer/queries/rust.py +223 -0
- tree_sitter_analyzer/queries/sql.py +555 -0
- tree_sitter_analyzer/queries/typescript.py +871 -0
- tree_sitter_analyzer/queries/yaml.py +236 -0
- tree_sitter_analyzer/query_loader.py +272 -0
- tree_sitter_analyzer/security/__init__.py +22 -0
- tree_sitter_analyzer/security/boundary_manager.py +277 -0
- tree_sitter_analyzer/security/regex_checker.py +297 -0
- tree_sitter_analyzer/security/validator.py +599 -0
- tree_sitter_analyzer/table_formatter.py +782 -0
- tree_sitter_analyzer/utils/__init__.py +53 -0
- tree_sitter_analyzer/utils/logging.py +433 -0
- tree_sitter_analyzer/utils/tree_sitter_compat.py +289 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/METADATA +485 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/RECORD +149 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/WHEEL +4 -0
- tree_sitter_analyzer-1.9.17.1.dist-info/entry_points.txt +25 -0
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
YAML Formatter
|
|
4
|
+
|
|
5
|
+
Provides specialized formatting for YAML files, focusing on configuration structure
|
|
6
|
+
including mappings, sequences, anchors, aliases, and multi-document support.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from .base_formatter import BaseFormatter
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class YAMLFormatter(BaseFormatter):
|
|
15
|
+
"""Formatter specialized for YAML documents."""
|
|
16
|
+
|
|
17
|
+
def __init__(self) -> None:
|
|
18
|
+
"""Initialize the YAML formatter."""
|
|
19
|
+
self.language = "yaml"
|
|
20
|
+
|
|
21
|
+
def format_summary(self, analysis_result: dict[str, Any]) -> str:
|
|
22
|
+
"""Format summary for YAML files."""
|
|
23
|
+
file_path = analysis_result.get("file_path", "")
|
|
24
|
+
elements = analysis_result.get("elements", [])
|
|
25
|
+
|
|
26
|
+
# Count different types of YAML elements
|
|
27
|
+
documents = [e for e in elements if e.get("element_type") == "document"]
|
|
28
|
+
mappings = [e for e in elements if e.get("element_type") == "mapping"]
|
|
29
|
+
sequences = [e for e in elements if e.get("element_type") == "sequence"]
|
|
30
|
+
anchors = [e for e in elements if e.get("element_type") == "anchor"]
|
|
31
|
+
aliases = [e for e in elements if e.get("element_type") == "alias"]
|
|
32
|
+
comments = [e for e in elements if e.get("element_type") == "comment"]
|
|
33
|
+
|
|
34
|
+
summary = {
|
|
35
|
+
"documents": len(documents),
|
|
36
|
+
"mappings": len(mappings),
|
|
37
|
+
"sequences": len(sequences),
|
|
38
|
+
"anchors": [{"name": a.get("anchor_name", "")} for a in anchors],
|
|
39
|
+
"aliases": [{"target": a.get("alias_target", "")} for a in aliases],
|
|
40
|
+
"comments": len(comments),
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
result = {"file_path": file_path, "language": "yaml", "summary": summary}
|
|
44
|
+
|
|
45
|
+
return self._format_json_output("Summary Results", result)
|
|
46
|
+
|
|
47
|
+
def format_structure(self, analysis_result: dict[str, Any]) -> str:
|
|
48
|
+
"""Format structure analysis for YAML files."""
|
|
49
|
+
file_path = analysis_result.get("file_path", "")
|
|
50
|
+
elements = analysis_result.get("elements", [])
|
|
51
|
+
line_count = analysis_result.get("line_count", 0)
|
|
52
|
+
|
|
53
|
+
# Organize elements by type
|
|
54
|
+
documents = [e for e in elements if e.get("element_type") == "document"]
|
|
55
|
+
mappings = [e for e in elements if e.get("element_type") == "mapping"]
|
|
56
|
+
sequences = [e for e in elements if e.get("element_type") == "sequence"]
|
|
57
|
+
anchors = [e for e in elements if e.get("element_type") == "anchor"]
|
|
58
|
+
aliases = [e for e in elements if e.get("element_type") == "alias"]
|
|
59
|
+
comments = [e for e in elements if e.get("element_type") == "comment"]
|
|
60
|
+
|
|
61
|
+
structure = {
|
|
62
|
+
"file_path": file_path,
|
|
63
|
+
"language": "yaml",
|
|
64
|
+
"documents": [
|
|
65
|
+
{
|
|
66
|
+
"index": d.get("document_index", 0),
|
|
67
|
+
"line_range": {
|
|
68
|
+
"start": d.get("start_line", 0),
|
|
69
|
+
"end": d.get("end_line", 0),
|
|
70
|
+
},
|
|
71
|
+
"child_count": d.get("child_count", 0),
|
|
72
|
+
}
|
|
73
|
+
for d in documents
|
|
74
|
+
],
|
|
75
|
+
"mappings": [
|
|
76
|
+
{
|
|
77
|
+
"key": m.get("key", ""),
|
|
78
|
+
"value_type": m.get("value_type", ""),
|
|
79
|
+
"nesting_level": m.get("nesting_level", 0),
|
|
80
|
+
"line_range": {
|
|
81
|
+
"start": m.get("start_line", 0),
|
|
82
|
+
"end": m.get("end_line", 0),
|
|
83
|
+
},
|
|
84
|
+
}
|
|
85
|
+
for m in mappings
|
|
86
|
+
],
|
|
87
|
+
"sequences": [
|
|
88
|
+
{
|
|
89
|
+
"child_count": s.get("child_count", 0),
|
|
90
|
+
"nesting_level": s.get("nesting_level", 0),
|
|
91
|
+
"line_range": {
|
|
92
|
+
"start": s.get("start_line", 0),
|
|
93
|
+
"end": s.get("end_line", 0),
|
|
94
|
+
},
|
|
95
|
+
}
|
|
96
|
+
for s in sequences
|
|
97
|
+
],
|
|
98
|
+
"anchors": [
|
|
99
|
+
{
|
|
100
|
+
"name": a.get("anchor_name", ""),
|
|
101
|
+
"line": a.get("start_line", 0),
|
|
102
|
+
}
|
|
103
|
+
for a in anchors
|
|
104
|
+
],
|
|
105
|
+
"aliases": [
|
|
106
|
+
{
|
|
107
|
+
"target": a.get("alias_target", ""),
|
|
108
|
+
"line": a.get("start_line", 0),
|
|
109
|
+
}
|
|
110
|
+
for a in aliases
|
|
111
|
+
],
|
|
112
|
+
"statistics": {
|
|
113
|
+
"document_count": len(documents),
|
|
114
|
+
"mapping_count": len(mappings),
|
|
115
|
+
"sequence_count": len(sequences),
|
|
116
|
+
"anchor_count": len(anchors),
|
|
117
|
+
"alias_count": len(aliases),
|
|
118
|
+
"comment_count": len(comments),
|
|
119
|
+
"total_lines": line_count,
|
|
120
|
+
},
|
|
121
|
+
"analysis_metadata": analysis_result.get("analysis_metadata", {}),
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
return self._format_json_output("Structure Analysis Results", structure)
|
|
125
|
+
|
|
126
|
+
def format_advanced(
|
|
127
|
+
self, analysis_result: dict[str, Any], output_format: str = "json"
|
|
128
|
+
) -> str:
|
|
129
|
+
"""Format advanced analysis for YAML files."""
|
|
130
|
+
file_path = analysis_result.get("file_path", "")
|
|
131
|
+
elements = analysis_result.get("elements", [])
|
|
132
|
+
line_count = analysis_result.get("line_count", 0)
|
|
133
|
+
element_count = len(elements)
|
|
134
|
+
|
|
135
|
+
# Calculate YAML-specific metrics
|
|
136
|
+
documents = [e for e in elements if e.get("element_type") == "document"]
|
|
137
|
+
mappings = [e for e in elements if e.get("element_type") == "mapping"]
|
|
138
|
+
sequences = [e for e in elements if e.get("element_type") == "sequence"]
|
|
139
|
+
anchors = [e for e in elements if e.get("element_type") == "anchor"]
|
|
140
|
+
aliases = [e for e in elements if e.get("element_type") == "alias"]
|
|
141
|
+
comments = [e for e in elements if e.get("element_type") == "comment"]
|
|
142
|
+
|
|
143
|
+
# Calculate nesting metrics
|
|
144
|
+
nesting_levels = [m.get("nesting_level", 0) for m in mappings + sequences]
|
|
145
|
+
max_nesting = max(nesting_levels) if nesting_levels else 0
|
|
146
|
+
avg_nesting = sum(nesting_levels) / len(nesting_levels) if nesting_levels else 0
|
|
147
|
+
|
|
148
|
+
# Value type distribution
|
|
149
|
+
value_types: dict[str, int] = {}
|
|
150
|
+
for m in mappings:
|
|
151
|
+
vt = m.get("value_type", "unknown")
|
|
152
|
+
value_types[vt] = value_types.get(vt, 0) + 1
|
|
153
|
+
|
|
154
|
+
advanced_data = {
|
|
155
|
+
"file_path": file_path,
|
|
156
|
+
"language": "yaml",
|
|
157
|
+
"line_count": line_count,
|
|
158
|
+
"element_count": element_count,
|
|
159
|
+
"success": True,
|
|
160
|
+
"elements": elements,
|
|
161
|
+
"document_metrics": {
|
|
162
|
+
"document_count": len(documents),
|
|
163
|
+
"mapping_count": len(mappings),
|
|
164
|
+
"sequence_count": len(sequences),
|
|
165
|
+
"anchor_count": len(anchors),
|
|
166
|
+
"alias_count": len(aliases),
|
|
167
|
+
"comment_count": len(comments),
|
|
168
|
+
"max_nesting_level": max_nesting,
|
|
169
|
+
"avg_nesting_level": round(avg_nesting, 2),
|
|
170
|
+
},
|
|
171
|
+
"value_type_distribution": value_types,
|
|
172
|
+
"content_analysis": {
|
|
173
|
+
"is_multi_document": len(documents) > 1,
|
|
174
|
+
"has_anchors": len(anchors) > 0,
|
|
175
|
+
"has_aliases": len(aliases) > 0,
|
|
176
|
+
"has_comments": len(comments) > 0,
|
|
177
|
+
"complexity": self._calculate_complexity(
|
|
178
|
+
mappings, sequences, max_nesting
|
|
179
|
+
),
|
|
180
|
+
},
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if output_format == "text":
|
|
184
|
+
return self._format_advanced_text(advanced_data)
|
|
185
|
+
else:
|
|
186
|
+
return self._format_json_output("Advanced Analysis Results", advanced_data)
|
|
187
|
+
|
|
188
|
+
def format_analysis_result(
|
|
189
|
+
self, analysis_result: Any, table_type: str = "full"
|
|
190
|
+
) -> str:
|
|
191
|
+
"""Format AnalysisResult directly for YAML files."""
|
|
192
|
+
data = self._convert_analysis_result_to_format(analysis_result)
|
|
193
|
+
return self.format_table(data, table_type)
|
|
194
|
+
|
|
195
|
+
def _convert_analysis_result_to_format(
|
|
196
|
+
self, analysis_result: Any
|
|
197
|
+
) -> dict[str, Any]:
|
|
198
|
+
"""Convert AnalysisResult to format expected by format_table."""
|
|
199
|
+
return {
|
|
200
|
+
"file_path": analysis_result.file_path,
|
|
201
|
+
"language": analysis_result.language,
|
|
202
|
+
"line_count": analysis_result.line_count,
|
|
203
|
+
"elements": [
|
|
204
|
+
{
|
|
205
|
+
"name": getattr(element, "name", ""),
|
|
206
|
+
"element_type": getattr(element, "element_type", ""),
|
|
207
|
+
"key": getattr(element, "key", ""),
|
|
208
|
+
"value": getattr(element, "value", ""),
|
|
209
|
+
"value_type": getattr(element, "value_type", ""),
|
|
210
|
+
"anchor_name": getattr(element, "anchor_name", ""),
|
|
211
|
+
"alias_target": getattr(element, "alias_target", ""),
|
|
212
|
+
"nesting_level": getattr(element, "nesting_level", 0),
|
|
213
|
+
"document_index": getattr(element, "document_index", 0),
|
|
214
|
+
"child_count": getattr(element, "child_count", None),
|
|
215
|
+
"start_line": getattr(element, "start_line", 0),
|
|
216
|
+
"end_line": getattr(element, "end_line", 0),
|
|
217
|
+
}
|
|
218
|
+
for element in analysis_result.elements
|
|
219
|
+
],
|
|
220
|
+
"analysis_metadata": {
|
|
221
|
+
"analysis_time": getattr(analysis_result, "analysis_time", 0.0),
|
|
222
|
+
"language": analysis_result.language,
|
|
223
|
+
"file_path": analysis_result.file_path,
|
|
224
|
+
"analyzer_version": "2.0.0",
|
|
225
|
+
},
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
def format_table(
|
|
229
|
+
self, analysis_result: dict[str, Any], table_type: str = "full"
|
|
230
|
+
) -> str:
|
|
231
|
+
"""Format table output for YAML files."""
|
|
232
|
+
if table_type == "compact":
|
|
233
|
+
return self._format_compact(analysis_result)
|
|
234
|
+
elif table_type == "csv":
|
|
235
|
+
return self._format_csv(analysis_result)
|
|
236
|
+
return self._format_full(analysis_result)
|
|
237
|
+
|
|
238
|
+
def _format_full(self, analysis_result: dict[str, Any]) -> str:
|
|
239
|
+
"""Format full table output for YAML files."""
|
|
240
|
+
file_path = analysis_result.get("file_path", "")
|
|
241
|
+
elements = analysis_result.get("elements", [])
|
|
242
|
+
|
|
243
|
+
output = [f"# YAML Analysis: {file_path}\n"]
|
|
244
|
+
|
|
245
|
+
# Document Overview
|
|
246
|
+
output.append("## Document Overview\n")
|
|
247
|
+
output.append("| Property | Value |")
|
|
248
|
+
output.append("|----------|-------|")
|
|
249
|
+
output.append(f"| File | {file_path} |")
|
|
250
|
+
output.append("| Language | yaml |")
|
|
251
|
+
output.append(f"| Total Lines | {analysis_result.get('line_count', 0)} |")
|
|
252
|
+
output.append(f"| Total Elements | {len(elements)} |")
|
|
253
|
+
output.append("")
|
|
254
|
+
|
|
255
|
+
# Documents Section
|
|
256
|
+
documents = [e for e in elements if e.get("element_type") == "document"]
|
|
257
|
+
if documents:
|
|
258
|
+
output.append("## Documents\n")
|
|
259
|
+
output.append("| Index | Lines | Children |")
|
|
260
|
+
output.append("|-------|-------|----------|")
|
|
261
|
+
for doc in documents:
|
|
262
|
+
idx = doc.get("document_index", 0)
|
|
263
|
+
start = doc.get("start_line", 0)
|
|
264
|
+
end = doc.get("end_line", 0)
|
|
265
|
+
children = doc.get("child_count", 0)
|
|
266
|
+
output.append(f"| {idx} | {start}-{end} | {children} |")
|
|
267
|
+
output.append("")
|
|
268
|
+
|
|
269
|
+
# Mappings Section
|
|
270
|
+
mappings = [e for e in elements if e.get("element_type") == "mapping"]
|
|
271
|
+
if mappings:
|
|
272
|
+
output.append("## Mappings\n")
|
|
273
|
+
output.append("| Key | Value Type | Nesting | Line |")
|
|
274
|
+
output.append("|-----|------------|---------|------|")
|
|
275
|
+
for m in mappings[:50]: # Limit to 50 for readability
|
|
276
|
+
key = m.get("key", "")[:30]
|
|
277
|
+
vtype = m.get("value_type", "")
|
|
278
|
+
nesting = m.get("nesting_level", 0)
|
|
279
|
+
line = m.get("start_line", 0)
|
|
280
|
+
output.append(f"| {key} | {vtype} | {nesting} | {line} |")
|
|
281
|
+
if len(mappings) > 50:
|
|
282
|
+
output.append(f"| ... | ({len(mappings) - 50} more) | | |")
|
|
283
|
+
output.append("")
|
|
284
|
+
|
|
285
|
+
# Sequences Section
|
|
286
|
+
sequences = [e for e in elements if e.get("element_type") == "sequence"]
|
|
287
|
+
if sequences:
|
|
288
|
+
output.append("## Sequences\n")
|
|
289
|
+
output.append("| Items | Nesting | Line |")
|
|
290
|
+
output.append("|-------|---------|------|")
|
|
291
|
+
for s in sequences:
|
|
292
|
+
items = s.get("child_count", 0)
|
|
293
|
+
nesting = s.get("nesting_level", 0)
|
|
294
|
+
line = s.get("start_line", 0)
|
|
295
|
+
output.append(f"| {items} | {nesting} | {line} |")
|
|
296
|
+
output.append("")
|
|
297
|
+
|
|
298
|
+
# Anchors Section
|
|
299
|
+
anchors = [e for e in elements if e.get("element_type") == "anchor"]
|
|
300
|
+
if anchors:
|
|
301
|
+
output.append("## Anchors\n")
|
|
302
|
+
output.append("| Name | Line |")
|
|
303
|
+
output.append("|------|------|")
|
|
304
|
+
for a in anchors:
|
|
305
|
+
name = a.get("anchor_name", "")
|
|
306
|
+
line = a.get("start_line", 0)
|
|
307
|
+
output.append(f"| &{name} | {line} |")
|
|
308
|
+
output.append("")
|
|
309
|
+
|
|
310
|
+
# Aliases Section
|
|
311
|
+
aliases = [e for e in elements if e.get("element_type") == "alias"]
|
|
312
|
+
if aliases:
|
|
313
|
+
output.append("## Aliases\n")
|
|
314
|
+
output.append("| Target | Line |")
|
|
315
|
+
output.append("|--------|------|")
|
|
316
|
+
for a in aliases:
|
|
317
|
+
target = a.get("alias_target", "")
|
|
318
|
+
line = a.get("start_line", 0)
|
|
319
|
+
output.append(f"| *{target} | {line} |")
|
|
320
|
+
output.append("")
|
|
321
|
+
|
|
322
|
+
# Comments Section
|
|
323
|
+
comments = [e for e in elements if e.get("element_type") == "comment"]
|
|
324
|
+
if comments:
|
|
325
|
+
output.append("## Comments\n")
|
|
326
|
+
output.append("| Content | Line |")
|
|
327
|
+
output.append("|---------|------|")
|
|
328
|
+
for c in comments:
|
|
329
|
+
content = c.get("value", "")[:50]
|
|
330
|
+
if len(c.get("value", "")) > 50:
|
|
331
|
+
content += "..."
|
|
332
|
+
line = c.get("start_line", 0)
|
|
333
|
+
output.append(f"| {content} | {line} |")
|
|
334
|
+
output.append("")
|
|
335
|
+
|
|
336
|
+
return "\n".join(output)
|
|
337
|
+
|
|
338
|
+
def _format_compact(self, analysis_result: dict[str, Any]) -> str:
|
|
339
|
+
"""Format compact table output for YAML files."""
|
|
340
|
+
file_path = analysis_result.get("file_path", "")
|
|
341
|
+
elements = analysis_result.get("elements", [])
|
|
342
|
+
|
|
343
|
+
# Count elements by type
|
|
344
|
+
documents = [e for e in elements if e.get("element_type") == "document"]
|
|
345
|
+
mappings = [e for e in elements if e.get("element_type") == "mapping"]
|
|
346
|
+
sequences = [e for e in elements if e.get("element_type") == "sequence"]
|
|
347
|
+
anchors = [e for e in elements if e.get("element_type") == "anchor"]
|
|
348
|
+
aliases = [e for e in elements if e.get("element_type") == "alias"]
|
|
349
|
+
comments = [e for e in elements if e.get("element_type") == "comment"]
|
|
350
|
+
|
|
351
|
+
output = [f"# YAML Analysis: {file_path} (Compact)\n"]
|
|
352
|
+
|
|
353
|
+
# Summary table
|
|
354
|
+
output.append("## Summary\n")
|
|
355
|
+
output.append("| Element Type | Count |")
|
|
356
|
+
output.append("|--------------|-------|")
|
|
357
|
+
output.append(f"| Documents | {len(documents)} |")
|
|
358
|
+
output.append(f"| Mappings | {len(mappings)} |")
|
|
359
|
+
output.append(f"| Sequences | {len(sequences)} |")
|
|
360
|
+
output.append(f"| Anchors | {len(anchors)} |")
|
|
361
|
+
output.append(f"| Aliases | {len(aliases)} |")
|
|
362
|
+
output.append(f"| Comments | {len(comments)} |")
|
|
363
|
+
output.append(f"| **Total** | **{len(elements)}** |")
|
|
364
|
+
output.append("")
|
|
365
|
+
|
|
366
|
+
# Top-level mappings only
|
|
367
|
+
top_level_mappings = [m for m in mappings if m.get("nesting_level", 0) == 1]
|
|
368
|
+
if top_level_mappings:
|
|
369
|
+
output.append("## Top-Level Keys\n")
|
|
370
|
+
output.append("| Key | Value Type | Line |")
|
|
371
|
+
output.append("|-----|------------|------|")
|
|
372
|
+
for m in top_level_mappings:
|
|
373
|
+
key = m.get("key", "")[:30]
|
|
374
|
+
vtype = m.get("value_type", "")
|
|
375
|
+
line = m.get("start_line", 0)
|
|
376
|
+
output.append(f"| {key} | {vtype} | {line} |")
|
|
377
|
+
output.append("")
|
|
378
|
+
|
|
379
|
+
# Anchors and Aliases
|
|
380
|
+
if anchors or aliases:
|
|
381
|
+
output.append("## References\n")
|
|
382
|
+
output.append("| Type | Name/Target | Line |")
|
|
383
|
+
output.append("|------|-------------|------|")
|
|
384
|
+
for a in anchors:
|
|
385
|
+
output.append(
|
|
386
|
+
f"| Anchor | &{a.get('anchor_name', '')} | {a.get('start_line', 0)} |"
|
|
387
|
+
)
|
|
388
|
+
for a in aliases:
|
|
389
|
+
output.append(
|
|
390
|
+
f"| Alias | *{a.get('alias_target', '')} | {a.get('start_line', 0)} |"
|
|
391
|
+
)
|
|
392
|
+
output.append("")
|
|
393
|
+
|
|
394
|
+
return "\n".join(output)
|
|
395
|
+
|
|
396
|
+
def _format_csv(self, analysis_result: dict[str, Any]) -> str:
|
|
397
|
+
"""Format CSV output for YAML files."""
|
|
398
|
+
elements = analysis_result.get("elements", [])
|
|
399
|
+
|
|
400
|
+
output = ["name,element_type,value_type,nesting_level,start_line,end_line"]
|
|
401
|
+
|
|
402
|
+
for e in elements:
|
|
403
|
+
name = e.get("name", "").replace(",", ";")
|
|
404
|
+
element_type = e.get("element_type", "")
|
|
405
|
+
value_type = e.get("value_type", "") or ""
|
|
406
|
+
nesting = e.get("nesting_level", 0)
|
|
407
|
+
start = e.get("start_line", 0)
|
|
408
|
+
end = e.get("end_line", 0)
|
|
409
|
+
output.append(f"{name},{element_type},{value_type},{nesting},{start},{end}")
|
|
410
|
+
|
|
411
|
+
return "\n".join(output)
|
|
412
|
+
|
|
413
|
+
def _calculate_complexity(
|
|
414
|
+
self, mappings: list[dict], sequences: list[dict], max_nesting: int
|
|
415
|
+
) -> str:
|
|
416
|
+
"""Calculate document complexity based on structure."""
|
|
417
|
+
score = 0
|
|
418
|
+
score += len(mappings) * 1
|
|
419
|
+
score += len(sequences) * 2
|
|
420
|
+
score += max_nesting * 5
|
|
421
|
+
|
|
422
|
+
if score < 20:
|
|
423
|
+
return "Simple"
|
|
424
|
+
elif score < 50:
|
|
425
|
+
return "Moderate"
|
|
426
|
+
elif score < 100:
|
|
427
|
+
return "Complex"
|
|
428
|
+
else:
|
|
429
|
+
return "Very Complex"
|
|
430
|
+
|
|
431
|
+
def _format_advanced_text(self, data: dict[str, Any]) -> str:
|
|
432
|
+
"""Format advanced analysis in text format."""
|
|
433
|
+
output = ["--- Advanced Analysis Results ---"]
|
|
434
|
+
|
|
435
|
+
output.append(f'"File: {data["file_path"]}"')
|
|
436
|
+
output.append(f'"Language: {data["language"]}"')
|
|
437
|
+
output.append(f'"Lines: {data["line_count"]}"')
|
|
438
|
+
output.append(f'"Elements: {data["element_count"]}"')
|
|
439
|
+
|
|
440
|
+
metrics = data["document_metrics"]
|
|
441
|
+
output.append(f'"Documents: {metrics["document_count"]}"')
|
|
442
|
+
output.append(f'"Mappings: {metrics["mapping_count"]}"')
|
|
443
|
+
output.append(f'"Sequences: {metrics["sequence_count"]}"')
|
|
444
|
+
output.append(f'"Anchors: {metrics["anchor_count"]}"')
|
|
445
|
+
output.append(f'"Aliases: {metrics["alias_count"]}"')
|
|
446
|
+
output.append(f'"Comments: {metrics["comment_count"]}"')
|
|
447
|
+
output.append(f'"Max Nesting: {metrics["max_nesting_level"]}"')
|
|
448
|
+
|
|
449
|
+
content = data["content_analysis"]
|
|
450
|
+
output.append(f'"Multi-Document: {content["is_multi_document"]}"')
|
|
451
|
+
output.append(f'"Has Anchors: {content["has_anchors"]}"')
|
|
452
|
+
output.append(f'"Complexity: {content["complexity"]}"')
|
|
453
|
+
|
|
454
|
+
return "\n".join(output)
|
|
455
|
+
|
|
456
|
+
def _format_json_output(self, title: str, data: dict[str, Any]) -> str:
|
|
457
|
+
"""Format JSON output with title."""
|
|
458
|
+
import json
|
|
459
|
+
|
|
460
|
+
output = [f"--- {title} ---"]
|
|
461
|
+
output.append(json.dumps(data, indent=2, ensure_ascii=False))
|
|
462
|
+
return "\n".join(output)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Interfaces Package
|
|
4
|
+
|
|
5
|
+
This package contains the external interfaces for the tree-sitter analyzer.
|
|
6
|
+
Each interface provides a different way to interact with the core analysis engine.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
# This file makes the interfaces directory a Python package
|