skydeckai-code 0.1.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,703 @@
1
+ import json
2
+ import os
3
+ import subprocess
4
+ from typing import Any, Dict, List
5
+
6
+ import tree_sitter_c_sharp
7
+ import tree_sitter_cpp
8
+ import tree_sitter_go
9
+ import tree_sitter_java
10
+ import tree_sitter_javascript
11
+ import tree_sitter_kotlin
12
+ import tree_sitter_python
13
+ import tree_sitter_ruby
14
+ import tree_sitter_rust
15
+ from tree_sitter import Language, Parser
16
+ from tree_sitter_php._binding import language_php
17
+ from tree_sitter_typescript._binding import language_tsx, language_typescript
18
+
19
+ from .state import state
20
+
21
+ # Map of file extensions to language names
22
+ LANGUAGE_MAP = {
23
+ '.py': 'python',
24
+ '.js': 'javascript', '.jsx': 'javascript', '.mjs': 'javascript', '.cjs': 'javascript',
25
+ '.ts': 'typescript',
26
+ '.tsx': 'tsx',
27
+ '.java': 'java',
28
+ '.cpp': 'cpp', '.hpp': 'cpp', '.cc': 'cpp', '.hh': 'cpp', '.cxx': 'cpp', '.hxx': 'cpp',
29
+ '.rb': 'ruby', '.rake': 'ruby',
30
+ '.go': 'go',
31
+ '.rs': 'rust',
32
+ '.php': 'php',
33
+ '.cs': 'c-sharp',
34
+ '.kt': 'kotlin', '.kts': 'kotlin'
35
+ # Add more languages as needed
36
+ }
37
+
38
+ # Initialize languages and create parsers
39
+ try:
40
+ _parser_cache = {
41
+ 'python': Parser(Language(tree_sitter_python.language())),
42
+ 'javascript': Parser(Language(tree_sitter_javascript.language())),
43
+ 'typescript': Parser(Language(language_typescript())),
44
+ 'tsx': Parser(Language(language_tsx())),
45
+ 'java': Parser(Language(tree_sitter_java.language())),
46
+ 'cpp': Parser(Language(tree_sitter_cpp.language())),
47
+ 'ruby': Parser(Language(tree_sitter_ruby.language())),
48
+ 'go': Parser(Language(tree_sitter_go.language())),
49
+ 'rust': Parser(Language(tree_sitter_rust.language())),
50
+ 'php': Parser(Language(language_php())),
51
+ 'c-sharp': Parser(Language(tree_sitter_c_sharp.language())),
52
+ 'kotlin': Parser(Language(tree_sitter_kotlin.language())),
53
+ }
54
+ except Exception as e:
55
+ raise RuntimeError(f"Failed to initialize languages: {e}")
56
+
57
+ def codebase_mapper_tool():
58
+ return {
59
+ "name": "codebase_mapper",
60
+ "description": "Build a structural map of source code files in a directory. "
61
+ "This tool analyzes code structure to identify classes, functions, and methods. "
62
+ "WHEN TO USE: When you need to understand the structure of a codebase, discover classes and "
63
+ "functions across multiple files, identify inheritance relationships, or get a high-level overview of code organization without "
64
+ "reading every file individually. "
65
+ "WHEN NOT TO USE: When you need to search for specific text patterns (use search_files instead), when you "
66
+ "need to analyze a single known file (use read_file instead), or when you're working with non-code files. "
67
+ "SUPPORTED LANGUAGES: Python (.py), JavaScript (.js/.jsx), TypeScript (.ts/.tsx), Java (.java), C++ (.cpp), Ruby (.rb), Go (.go), Rust (.rs), PHP (.php), "
68
+ "C# (.cs), Kotlin (.kt). "
69
+ "RETURNS: A text-based tree structure showing classes and functions in the codebase, along with statistics "
70
+ "about found elements. Only analyzes files within the allowed directory. "
71
+ "Example: Enter '.' to analyze all source files in current directory, or 'src' to analyze all files in the src directory.",
72
+ "inputSchema": {
73
+ "type": "object",
74
+ "properties": {
75
+ "path": {
76
+ "type": "string",
77
+ "description": "Root directory to analyze. Examples: '.' for current directory, 'src' for src directory, 'lib/components' for a specific subdirectory. The path must point to a directory within the allowed workspace."
78
+ }
79
+ },
80
+ "required": ["path"]
81
+ },
82
+ }
83
+
84
+ def _detect_language(file_path: str) -> str:
85
+ """Detect programming language based on file extension."""
86
+ ext = os.path.splitext(file_path)[1].lower()
87
+ return LANGUAGE_MAP.get(ext, 'unknown')
88
+
89
+ def _get_language_parser(language: str):
90
+ """Get the appropriate tree-sitter parser for a language."""
91
+ try:
92
+ if language not in _parser_cache:
93
+ return {'error': f'Unsupported language: {language}'}
94
+ return _parser_cache[language]
95
+ except Exception as e:
96
+ return {
97
+ 'error': f'Error loading language {language}: {str(e)}'
98
+ }
99
+
100
+ def _extract_node_text(node, source_code: bytes) -> str:
101
+ """Extract text from a node."""
102
+ return source_code[node.start_byte:node.end_byte].decode('utf-8')
103
+
104
+ def _analyze_file(file_path: str) -> Dict[str, Any]:
105
+ """Analyze a single file using tree-sitter."""
106
+ try:
107
+ with open(file_path, 'rb') as f:
108
+ source_code = f.read()
109
+
110
+ language = _detect_language(file_path)
111
+ if language == 'unknown':
112
+ return {'error': f'Unsupported file type: {os.path.splitext(file_path)[1]}'}
113
+
114
+ parser = _get_language_parser(language)
115
+ if isinstance(parser, dict) and 'error' in parser:
116
+ return parser
117
+
118
+ tree = parser.parse(source_code)
119
+ root_node = tree.root_node
120
+
121
+ # Check if we got a valid root node
122
+ if not root_node:
123
+ return {'error': 'Failed to parse file - no root node'}
124
+
125
+ def process_node(node) -> Dict[str, Any]:
126
+ if not node:
127
+ return None
128
+
129
+ result = {
130
+ 'type': node.type,
131
+ 'start_line': node.start_point[0] + 1,
132
+ 'end_line': node.end_point[0] + 1,
133
+ }
134
+
135
+ # Process child nodes based on language-specific patterns
136
+ if language == 'python':
137
+ if node.type in ['class_definition', 'function_definition']:
138
+ for child in node.children:
139
+ if child.type == 'identifier':
140
+ result['name'] = _extract_node_text(child, source_code)
141
+ elif child.type == 'parameters':
142
+ params = []
143
+ for param in child.children:
144
+ if param.type == 'identifier':
145
+ params.append(_extract_node_text(param, source_code))
146
+ if params:
147
+ result['parameters'] = params
148
+ elif node.type == 'assignment':
149
+ # Handle global variable assignments
150
+ for child in node.children:
151
+ if child.type == 'identifier':
152
+ result['type'] = 'variable_declaration'
153
+ result['name'] = _extract_node_text(child, source_code)
154
+ return result
155
+ # Break after first identifier to avoid capturing right-hand side
156
+ break
157
+
158
+ elif language == 'javascript':
159
+ if node.type in ['class_declaration', 'method_definition', 'function_declaration']:
160
+ for child in node.children:
161
+ if child.type == 'identifier':
162
+ result['name'] = _extract_node_text(child, source_code)
163
+ elif child.type == 'formal_parameters':
164
+ params = []
165
+ for param in child.children:
166
+ if param.type == 'identifier':
167
+ params.append(_extract_node_text(param, source_code))
168
+ if params:
169
+ result['parameters'] = params
170
+ elif node.type in ['variable_declaration', 'lexical_declaration']:
171
+ # Handle var/let/const declarations
172
+ for child in node.children:
173
+ if child.type == 'variable_declarator':
174
+ for subchild in child.children:
175
+ if subchild.type == 'identifier':
176
+ result['type'] = 'variable_declaration'
177
+ result['name'] = _extract_node_text(subchild, source_code)
178
+ return result
179
+
180
+ elif language == 'typescript':
181
+ if node.type in ['class_declaration', 'method_declaration', 'function_declaration', 'interface_declaration']:
182
+ for child in node.children:
183
+ if child.type == 'identifier':
184
+ result['name'] = _extract_node_text(child, source_code)
185
+ return result
186
+ return result
187
+ elif node.type in ['variable_statement', 'property_declaration']:
188
+ # Handle variable declarations and property declarations
189
+ for child in node.children:
190
+ if child.type == 'identifier':
191
+ result['type'] = 'variable_declaration'
192
+ result['name'] = _extract_node_text(child, source_code)
193
+ return result
194
+ return result
195
+
196
+ elif language == 'java':
197
+ if node.type in ['class_declaration', 'method_declaration', 'constructor_declaration', 'interface_declaration']:
198
+ for child in node.children:
199
+ if child.type == 'identifier':
200
+ result['name'] = _extract_node_text(child, source_code)
201
+ return result
202
+ return result
203
+ elif node.type in ['field_declaration', 'variable_declaration']:
204
+ # Handle Java global fields and variables
205
+ for child in node.children:
206
+ if child.type == 'variable_declarator':
207
+ for subchild in child.children:
208
+ if subchild.type == 'identifier':
209
+ result['type'] = 'variable_declaration'
210
+ result['name'] = _extract_node_text(subchild, source_code)
211
+ return result
212
+ return result
213
+
214
+ elif language == 'cpp':
215
+ if node.type in ['class_specifier', 'function_definition', 'struct_specifier']:
216
+ for child in node.children:
217
+ if child.type == 'identifier':
218
+ result['name'] = _extract_node_text(child, source_code)
219
+ return result
220
+ return result
221
+ elif node.type in ['declaration', 'variable_declaration']:
222
+ # Handle C++ global variables and declarations
223
+ for child in node.children:
224
+ if child.type == 'init_declarator' or child.type == 'declarator':
225
+ for subchild in child.children:
226
+ if subchild.type == 'identifier':
227
+ result['type'] = 'variable_declaration'
228
+ result['name'] = _extract_node_text(subchild, source_code)
229
+ return result
230
+ return result
231
+
232
+ elif language == 'ruby':
233
+ if node.type in ['class', 'method', 'singleton_method', 'module']:
234
+ for child in node.children:
235
+ if child.type == 'identifier':
236
+ result['name'] = _extract_node_text(child, source_code)
237
+ return result
238
+ return result
239
+ elif node.type == 'assignment' or node.type == 'global_variable':
240
+ # Handle Ruby global variables and assignments
241
+ for child in node.children:
242
+ if child.type == 'identifier' or child.type == 'global_variable':
243
+ result['type'] = 'variable_declaration'
244
+ result['name'] = _extract_node_text(child, source_code)
245
+ return result
246
+ return result
247
+
248
+ elif language == 'go':
249
+ if node.type in ['type_declaration', 'function_declaration', 'method_declaration', 'interface_declaration']:
250
+ for child in node.children:
251
+ if child.type == 'identifier' or child.type == 'field_identifier':
252
+ result['name'] = _extract_node_text(child, source_code)
253
+ return result
254
+ return result
255
+ elif node.type == 'var_declaration' or node.type == 'const_declaration':
256
+ # Handle Go variable and constant declarations
257
+ for child in node.children:
258
+ if child.type == 'var_spec' or child.type == 'const_spec':
259
+ for subchild in child.children:
260
+ if subchild.type == 'identifier':
261
+ result['type'] = 'variable_declaration'
262
+ result['name'] = _extract_node_text(subchild, source_code)
263
+ return result
264
+ return result
265
+
266
+ elif language == 'rust':
267
+ if node.type in ['struct_item', 'impl_item', 'fn_item', 'trait_item']:
268
+ for child in node.children:
269
+ if child.type == 'identifier':
270
+ result['name'] = _extract_node_text(child, source_code)
271
+ return result
272
+ return result
273
+ elif node.type in ['static_item', 'const_item', 'let_declaration']:
274
+ # Handle Rust static items, constants, and let declarations
275
+ for child in node.children:
276
+ if child.type == 'identifier':
277
+ result['type'] = 'variable_declaration'
278
+ result['name'] = _extract_node_text(child, source_code)
279
+ return result
280
+ elif child.type == 'pattern' and child.children:
281
+ result['name'] = _extract_node_text(child.children[0], source_code)
282
+ return result
283
+
284
+ elif language == 'php':
285
+ if node.type in ['class_declaration', 'method_declaration', 'function_definition', 'interface_declaration', 'trait_declaration']:
286
+ for child in node.children:
287
+ if child.type == 'name':
288
+ result['name'] = _extract_node_text(child, source_code)
289
+ return result
290
+ return result
291
+ elif node.type == 'property_declaration' or node.type == 'const_declaration':
292
+ # Handle PHP class properties and constants
293
+ for child in node.children:
294
+ if child.type == 'property_element' or child.type == 'const_element':
295
+ for subchild in child.children:
296
+ if subchild.type == 'variable_name' or subchild.type == 'name':
297
+ result['type'] = 'variable_declaration'
298
+ result['name'] = _extract_node_text(subchild, source_code)
299
+ return result
300
+
301
+ elif language == 'csharp':
302
+ if node.type in ['class_declaration', 'interface_declaration', 'method_declaration']:
303
+ for child in node.children:
304
+ if child.type == 'identifier':
305
+ result['name'] = _extract_node_text(child, source_code)
306
+ return result
307
+ return result
308
+ elif node.type in ['field_declaration', 'property_declaration']:
309
+ # Handle C# fields and properties
310
+ for child in node.children:
311
+ if child.type == 'variable_declaration':
312
+ for subchild in child.children:
313
+ if subchild.type == 'identifier':
314
+ result['type'] = 'variable_declaration'
315
+ result['name'] = _extract_node_text(subchild, source_code)
316
+ return result
317
+ return result
318
+
319
+ elif language == 'kotlin':
320
+ if node.type in ['class_declaration', 'function_declaration']:
321
+ for child in node.children:
322
+ if child.type == 'simple_identifier':
323
+ result['name'] = _extract_node_text(child, source_code)
324
+ return result
325
+ return result
326
+ elif node.type in ['property_declaration', 'variable_declaration']:
327
+ # Handle Kotlin properties and variables
328
+ for child in node.children:
329
+ if child.type == 'simple_identifier':
330
+ result['type'] = 'variable_declaration'
331
+ result['name'] = _extract_node_text(child, source_code)
332
+ return result
333
+ break # Only capture the first identifier
334
+ return result
335
+
336
+ # Recursively process children
337
+ children = []
338
+ for child in node.children:
339
+ child_result = process_node(child)
340
+ if child_result and (
341
+ child_result.get('type') in [
342
+ 'class_definition', 'function_definition',
343
+ 'class_declaration', 'method_definition',
344
+ 'function_declaration', 'interface_declaration',
345
+ 'method_declaration', 'constructor_declaration',
346
+ 'class_specifier', 'struct_specifier',
347
+ 'class', 'method', 'singleton_method', 'module',
348
+ 'type_declaration', 'method_declaration',
349
+ 'interface_declaration', 'struct_item', 'impl_item',
350
+ 'fn_item', 'trait_item', 'trait_declaration',
351
+ 'property_declaration', 'object_definition',
352
+ 'trait_definition', 'def_definition',
353
+ 'function_definition', 'class_definition',
354
+ 'variable_declaration'] or 'children' in child_result
355
+ ):
356
+ children.append(child_result)
357
+
358
+ if children:
359
+ result['children'] = children
360
+ return result
361
+
362
+ return process_node(root_node)
363
+
364
+ except Exception as e:
365
+ return {
366
+ 'error': f'Error analyzing file: {str(e)}'
367
+ }
368
+
369
+ async def handle_codebase_mapper(arguments: dict):
370
+ """Handle building a structural map of source code."""
371
+ from mcp.types import TextContent
372
+
373
+ path = arguments.get("path", ".")
374
+
375
+ # Validate and get full path
376
+ full_path = os.path.abspath(os.path.join(state.allowed_directory, path))
377
+ if not full_path.startswith(state.allowed_directory):
378
+ return [TextContent(
379
+ type="text",
380
+ text=json.dumps({'error': 'Access denied: Path must be within allowed directory'})
381
+ )]
382
+ if not os.path.exists(full_path):
383
+ return [TextContent(
384
+ type="text",
385
+ text=json.dumps({'error': f'Path does not exist: {path}'})
386
+ )]
387
+ if not os.path.isdir(full_path):
388
+ return [TextContent(type="text", text=json.dumps({'error': f'Path is not a directory: {path}'}))]
389
+
390
+ analyzed_files = []
391
+
392
+ # First try using git ls-files
393
+ try:
394
+ result = subprocess.run(
395
+ ['git', 'ls-files'],
396
+ cwd=full_path,
397
+ capture_output=True,
398
+ text=True,
399
+ check=True,
400
+ )
401
+ if result.returncode == 0:
402
+ files = [
403
+ os.path.join(full_path, f.strip())
404
+ for f in result.stdout.splitlines()
405
+ if f.strip()
406
+ ]
407
+ analyzed_files.extend(files)
408
+
409
+ except (subprocess.SubprocessError, FileNotFoundError):
410
+ pass
411
+
412
+ # If git didn't work or found no files, use regular directory walk
413
+ if not analyzed_files:
414
+ skip_dirs = {'.git', '.svn', 'node_modules', '__pycache__', 'build', 'dist'}
415
+ for root, _, filenames in os.walk(full_path):
416
+
417
+ # Get the directory name
418
+ dir_name = os.path.basename(root)
419
+
420
+ # Skip hidden and build directories
421
+ if dir_name.startswith('.') or dir_name in skip_dirs:
422
+ continue
423
+
424
+ for filename in filenames:
425
+ # Skip hidden files
426
+ if filename.startswith('.'):
427
+ continue
428
+
429
+ file_path = os.path.join(root, filename)
430
+ language = _detect_language(file_path)
431
+ if language != 'unknown':
432
+ analyzed_files.append(file_path)
433
+
434
+ if not analyzed_files:
435
+ return [TextContent(
436
+ type="text",
437
+ text=json.dumps({
438
+ 'error': 'No source code files found to analyze',
439
+ 'path': full_path
440
+ }, indent=2)
441
+ )]
442
+
443
+ # Analyze each file
444
+ analysis_results = []
445
+ errors = []
446
+ for file_path in sorted(analyzed_files):
447
+ rel_path = os.path.relpath(file_path, full_path)
448
+ try:
449
+ result = _analyze_file(file_path)
450
+
451
+ if result and isinstance(result, dict) and 'error' not in result:
452
+ # Successfully analyzed file
453
+ analysis_results.append({
454
+ 'path': rel_path,
455
+ 'language': _detect_language(rel_path),
456
+ 'structure': result
457
+ })
458
+ elif result and isinstance(result, dict) and 'error' in result:
459
+ errors.append({
460
+ 'path': rel_path,
461
+ 'error': result['error']
462
+ })
463
+ except Exception as e:
464
+ errors.append({
465
+ 'path': rel_path,
466
+ 'error': str(e)
467
+ })
468
+
469
+ if not analysis_results:
470
+ return [TextContent(
471
+ type="text",
472
+ text=json.dumps({
473
+ 'error': 'Analysis completed but no valid results',
474
+ 'path': full_path,
475
+ 'attempted': len(analyzed_files),
476
+ 'files_found': len(analyzed_files),
477
+ 'errors': errors
478
+ }, indent=2)
479
+ )]
480
+
481
+ def count_nodes(structure: Dict[str, Any], node_types: set[str]) -> int:
482
+ """Recursively count nodes of specific types in the tree structure."""
483
+ count = 0
484
+
485
+ # Count current node if it matches
486
+ if structure.get('type') in node_types:
487
+ count += 1
488
+
489
+ # Recursively count in children
490
+ for child in structure.get('children', []):
491
+ count += count_nodes(child, node_types)
492
+
493
+ return count
494
+
495
+ # Define node types for different categories
496
+ class_types = {
497
+ 'class_definition', 'class_declaration', 'class_specifier',
498
+ 'struct_specifier', 'struct_item', 'interface_declaration',
499
+ 'object_declaration' # Kotlin object declarations
500
+ }
501
+
502
+ function_types = {
503
+ 'function_definition', 'function_declaration', 'method_definition',
504
+ 'method_declaration', 'constructor_declaration', 'fn_item',
505
+ 'method', 'singleton_method',
506
+ 'primary_constructor' # Kotlin primary constructors
507
+ }
508
+
509
+ def generate_text_map(analysis_results: List[Dict[str, Any]]) -> str:
510
+ """Generate a compact text representation of the code structure analysis."""
511
+
512
+ def format_node(node: Dict[str, Any], prefix: str = "", is_last: bool = True) -> List[str]:
513
+ lines = []
514
+
515
+ node_type = node.get('type', '')
516
+ node_name = node.get('name', '')
517
+
518
+ # Handle decorated functions - extract the actual function definition
519
+ if node_type == 'decorated_definition' and 'children' in node:
520
+ for child in node.get('children', []):
521
+ if child.get('type') in {
522
+ 'function_definition', 'method_definition', 'member_function_definition'
523
+ }:
524
+ return format_node(child, prefix, is_last)
525
+
526
+ # Handle class body, block nodes, and wrapper functions
527
+ if not node_name and node_type in {'class_body', 'block', 'declaration_list', 'body'}:
528
+ return process_children(node.get('children', []), prefix, is_last)
529
+ elif not node_name:
530
+ return lines
531
+
532
+ branch = "└── " if is_last else "├── "
533
+
534
+ # Format node information based on type
535
+ if node_type in {
536
+ 'class_definition', 'class_declaration', 'class_specifier',
537
+ 'class', 'interface_declaration', 'struct_specifier',
538
+ 'struct_item', 'trait_item', 'trait_declaration',
539
+ 'module', 'type_declaration'
540
+ }:
541
+ node_info = f"class {node_name}"
542
+ elif node_type in {
543
+ 'function_definition', 'function_declaration', 'method_definition',
544
+ 'method_declaration', 'fn_item', 'method', 'singleton_method',
545
+ 'constructor_declaration', 'member_function_definition',
546
+ 'constructor', 'destructor', 'public_method_definition',
547
+ 'private_method_definition', 'protected_method_definition'
548
+ }:
549
+ # Handle parameters
550
+ params = []
551
+ if 'parameters' in node and node['parameters']:
552
+ params = node['parameters']
553
+ elif 'children' in node:
554
+ # Try to extract parameters from children for languages that structure them differently
555
+ for child in node['children']:
556
+ if child.get('type') in {'parameter_list', 'parameters', 'formal_parameters', 'argument_list'}:
557
+ for param in child.get('children', []):
558
+ if param.get('type') in {'identifier', 'parameter'}:
559
+ param_name = param.get('name', '')
560
+ if param_name:
561
+ params.append(param_name)
562
+
563
+ params_str = ', '.join(params) if params else ''
564
+ node_info = f"{node_name}({params_str})"
565
+ else:
566
+ node_info = node_name
567
+
568
+ lines.append(f"{prefix}{branch}{node_info}")
569
+
570
+ # Process children
571
+ if 'children' in node:
572
+ new_prefix = prefix + (" " if is_last else "│ ")
573
+ child_lines = process_children(node['children'], new_prefix, is_last)
574
+ if child_lines: # Only add child lines if there are any
575
+ lines.extend(child_lines)
576
+
577
+ return lines
578
+
579
+ def process_children(children: List[Dict], prefix: str, is_last: bool) -> List[str]:
580
+ if not children:
581
+ return []
582
+
583
+ lines = []
584
+ significant_children = [
585
+ child for child in children
586
+ if child.get('type') in {
587
+ 'decorated_definition',
588
+ # Class-related nodes
589
+ 'class_definition', 'class_declaration', 'class_specifier',
590
+ 'class', 'interface_declaration', 'struct_specifier',
591
+ 'struct_item', 'trait_item', 'trait_declaration',
592
+ 'module', 'type_declaration',
593
+ 'impl_item', # Rust implementations
594
+ # Method-related nodes
595
+ 'function_definition', 'function_declaration', 'method_definition',
596
+ 'method_declaration', 'fn_item', 'method', 'singleton_method',
597
+ 'constructor_declaration', 'member_function_definition',
598
+ 'constructor', 'destructor', 'public_method_definition',
599
+ 'private_method_definition', 'protected_method_definition',
600
+ # Container nodes that might have methods
601
+ 'class_body', 'block', 'declaration_list', 'body',
602
+ 'impl_block', # Rust implementation blocks
603
+ # Property and field nodes
604
+ 'property_declaration', 'field_declaration',
605
+ 'variable_declaration', 'const_declaration'
606
+ }
607
+ ]
608
+
609
+ for i, child in enumerate(significant_children):
610
+ is_last_child = (i == len(significant_children) - 1)
611
+ child_lines = format_node(child, prefix, is_last_child)
612
+ if child_lines: # Only add child lines if there are any
613
+ lines.extend(child_lines)
614
+
615
+ return lines
616
+
617
+ # Process each file
618
+ output_lines = []
619
+
620
+ # Sort analysis results by path
621
+ sorted_results = sorted(analysis_results, key=lambda x: x['path'])
622
+
623
+ for result in sorted_results:
624
+ # Skip files with no significant structure
625
+ if not result.get('structure') or not result.get('structure', {}).get('children'):
626
+ continue
627
+
628
+ # Add file header
629
+ output_lines.append(f"\n{result['path']}")
630
+
631
+ # Format the structure
632
+ structure = result['structure']
633
+ if 'children' in structure:
634
+ significant_nodes = [
635
+ child for child in structure['children']
636
+ if child.get('type') in {
637
+ 'decorated_definition',
638
+ # Class-related nodes
639
+ 'class_definition', 'class_declaration', 'class_specifier',
640
+ 'class', 'interface_declaration', 'struct_specifier',
641
+ 'struct_item', 'trait_item', 'trait_declaration',
642
+ 'module', 'type_declaration',
643
+ 'impl_item', # Rust implementations
644
+ # Method-related nodes
645
+ 'function_definition', 'function_declaration', 'method_definition',
646
+ 'method_declaration', 'fn_item', 'method', 'singleton_method',
647
+ 'constructor_declaration', 'member_function_definition',
648
+ 'constructor', 'destructor', 'public_method_definition',
649
+ 'private_method_definition', 'protected_method_definition',
650
+ # Property and field nodes
651
+ 'property_declaration', 'field_declaration',
652
+ 'variable_declaration', 'const_declaration'
653
+ }
654
+ ]
655
+
656
+ for i, node in enumerate(significant_nodes):
657
+ is_last = (i == len(significant_nodes) - 1)
658
+ node_lines = format_node(node, "", is_last)
659
+ if node_lines: # Only add node lines if there are any
660
+ output_lines.extend(node_lines)
661
+
662
+ # Return the formatted text
663
+ return '\n'.join(output_lines) if output_lines else "No significant code structure found."
664
+
665
+ def format_analysis_results(analysis_results: List[Dict[str, Any]], analyzed_files: List[str], errors: List[Dict[str, str]]) -> str:
666
+ """Format the analysis results into a clear text format."""
667
+
668
+ # Count statistics
669
+ total_files = len(analyzed_files)
670
+ classes = sum(count_nodes(f['structure'], class_types) for f in analysis_results)
671
+ functions = sum(count_nodes(f['structure'], function_types) for f in analysis_results)
672
+ decorated_functions = sum(count_nodes(f['structure'], {'decorated_definition'}) for f in analysis_results)
673
+ error_count = len(errors)
674
+
675
+ # Build output sections
676
+ sections = []
677
+
678
+ # Add statistics section
679
+ sections.append("\n===ANALYSIS STATISTICS===\n")
680
+ sections.append(f"Total files analyzed: {total_files}")
681
+ sections.append(f"Total errors: {error_count}")
682
+ sections.append(f"Total classes found: {classes}")
683
+ sections.append(f"Total functions found: {functions}")
684
+ sections.append(f"Total decorated functions: {decorated_functions}")
685
+
686
+ # Add errors section if any
687
+ if errors:
688
+ sections.append("\n===ERRORS===")
689
+ for error in errors:
690
+ error_first_line = error['error'].split('\n')[0]
691
+ sections.append(f"{error['path']}: {error_first_line}")
692
+
693
+ # Add repository map
694
+ sections.append("\n===REPOSITORY STRUCTURE===")
695
+ sections.append(generate_text_map(analysis_results))
696
+
697
+ # Join all sections with newlines
698
+ return "\n".join(sections)
699
+
700
+ return [TextContent(
701
+ type="text",
702
+ text=format_analysis_results(analysis_results, analyzed_files, errors)
703
+ )]