agentcrew-ai 0.8.12__py3-none-any.whl → 0.8.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- AgentCrew/__init__.py +1 -1
- AgentCrew/main.py +55 -3
- AgentCrew/modules/agents/local_agent.py +25 -0
- AgentCrew/modules/code_analysis/__init__.py +8 -0
- AgentCrew/modules/code_analysis/parsers/__init__.py +67 -0
- AgentCrew/modules/code_analysis/parsers/base.py +93 -0
- AgentCrew/modules/code_analysis/parsers/cpp_parser.py +127 -0
- AgentCrew/modules/code_analysis/parsers/csharp_parser.py +162 -0
- AgentCrew/modules/code_analysis/parsers/generic_parser.py +63 -0
- AgentCrew/modules/code_analysis/parsers/go_parser.py +154 -0
- AgentCrew/modules/code_analysis/parsers/java_parser.py +103 -0
- AgentCrew/modules/code_analysis/parsers/javascript_parser.py +268 -0
- AgentCrew/modules/code_analysis/parsers/kotlin_parser.py +84 -0
- AgentCrew/modules/code_analysis/parsers/php_parser.py +107 -0
- AgentCrew/modules/code_analysis/parsers/python_parser.py +60 -0
- AgentCrew/modules/code_analysis/parsers/ruby_parser.py +46 -0
- AgentCrew/modules/code_analysis/parsers/rust_parser.py +72 -0
- AgentCrew/modules/code_analysis/service.py +231 -897
- AgentCrew/modules/command_execution/constants.py +2 -2
- AgentCrew/modules/console/confirmation_handler.py +4 -4
- AgentCrew/modules/console/console_ui.py +20 -1
- AgentCrew/modules/console/conversation_browser.py +557 -0
- AgentCrew/modules/console/diff_display.py +22 -51
- AgentCrew/modules/console/display_handlers.py +22 -22
- AgentCrew/modules/console/tool_display.py +4 -6
- AgentCrew/modules/file_editing/service.py +8 -8
- AgentCrew/modules/file_editing/tool.py +65 -67
- AgentCrew/modules/gui/components/tool_handlers.py +0 -2
- AgentCrew/modules/gui/widgets/diff_widget.py +30 -61
- AgentCrew/modules/llm/constants.py +5 -5
- AgentCrew/modules/memory/context_persistent.py +1 -0
- AgentCrew/modules/memory/tool.py +1 -1
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.8.13.dist-info}/METADATA +1 -1
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.8.13.dist-info}/RECORD +38 -24
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.8.13.dist-info}/WHEEL +1 -1
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.8.13.dist-info}/entry_points.txt +0 -0
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.8.13.dist-info}/licenses/LICENSE +0 -0
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.8.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Go language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GoParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for Go source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "go"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type == "type_declaration":
|
|
23
|
+
return self._handle_type_declaration(
|
|
24
|
+
node, source_code, result, process_children_callback
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
elif node.type in ["function_declaration", "method_declaration"]:
|
|
28
|
+
for child in node.children:
|
|
29
|
+
if child.type in ["identifier", "field_identifier"]:
|
|
30
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
31
|
+
result["first_line"] = (
|
|
32
|
+
self.extract_node_text(node, source_code)
|
|
33
|
+
.split("\n")[0]
|
|
34
|
+
.strip("{")
|
|
35
|
+
)
|
|
36
|
+
return result
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
elif node.type == "interface_declaration":
|
|
40
|
+
for child in node.children:
|
|
41
|
+
if child.type == "identifier":
|
|
42
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
43
|
+
return result
|
|
44
|
+
return result
|
|
45
|
+
|
|
46
|
+
elif node.type in ["var_declaration", "const_declaration"]:
|
|
47
|
+
return self._handle_var_declaration(node, source_code, result)
|
|
48
|
+
|
|
49
|
+
elif node.type == "field_declaration":
|
|
50
|
+
return self._handle_field_declaration(node, source_code, result)
|
|
51
|
+
|
|
52
|
+
children = []
|
|
53
|
+
for child in node.children:
|
|
54
|
+
child_result = process_children_callback(child)
|
|
55
|
+
if child_result and self._is_significant_node(child_result):
|
|
56
|
+
children.append(child_result)
|
|
57
|
+
|
|
58
|
+
if children:
|
|
59
|
+
result["children"] = children
|
|
60
|
+
|
|
61
|
+
return result
|
|
62
|
+
|
|
63
|
+
def _handle_type_declaration(
|
|
64
|
+
self,
|
|
65
|
+
node,
|
|
66
|
+
source_code: bytes,
|
|
67
|
+
result: Dict[str, Any],
|
|
68
|
+
process_children_callback,
|
|
69
|
+
) -> Dict[str, Any]:
|
|
70
|
+
for child in node.children:
|
|
71
|
+
if child.type == "type_spec":
|
|
72
|
+
for spec_child in child.children:
|
|
73
|
+
if spec_child.type == "type_identifier":
|
|
74
|
+
result["name"] = self.extract_node_text(spec_child, source_code)
|
|
75
|
+
result["type"] = "type_declaration"
|
|
76
|
+
elif spec_child.type == "struct_type":
|
|
77
|
+
result["type"] = "struct_declaration"
|
|
78
|
+
for struct_child in spec_child.children:
|
|
79
|
+
if struct_child.type == "field_declaration_list":
|
|
80
|
+
children = []
|
|
81
|
+
for field in struct_child.children:
|
|
82
|
+
field_result = process_children_callback(field)
|
|
83
|
+
if field_result and self._is_significant_node(
|
|
84
|
+
field_result
|
|
85
|
+
):
|
|
86
|
+
children.append(field_result)
|
|
87
|
+
if children:
|
|
88
|
+
result["children"] = children
|
|
89
|
+
elif spec_child.type == "interface_type":
|
|
90
|
+
result["type"] = "interface_declaration"
|
|
91
|
+
|
|
92
|
+
return result
|
|
93
|
+
|
|
94
|
+
def _handle_var_declaration(
|
|
95
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
96
|
+
) -> Dict[str, Any]:
|
|
97
|
+
var_name = None
|
|
98
|
+
var_type = None
|
|
99
|
+
|
|
100
|
+
for child in node.children:
|
|
101
|
+
if child.type in ["var_spec", "const_spec"]:
|
|
102
|
+
for subchild in child.children:
|
|
103
|
+
if subchild.type == "identifier" and var_name is None:
|
|
104
|
+
var_name = self.extract_node_text(subchild, source_code)
|
|
105
|
+
elif subchild.type in [
|
|
106
|
+
"type_identifier",
|
|
107
|
+
"pointer_type",
|
|
108
|
+
"array_type",
|
|
109
|
+
"slice_type",
|
|
110
|
+
"map_type",
|
|
111
|
+
"channel_type",
|
|
112
|
+
"qualified_type",
|
|
113
|
+
]:
|
|
114
|
+
var_type = self.extract_node_text(subchild, source_code)
|
|
115
|
+
|
|
116
|
+
if var_name:
|
|
117
|
+
result["type"] = "variable_declaration"
|
|
118
|
+
if var_type:
|
|
119
|
+
result["name"] = f"{var_name} {var_type}"
|
|
120
|
+
else:
|
|
121
|
+
result["name"] = var_name
|
|
122
|
+
|
|
123
|
+
return result
|
|
124
|
+
|
|
125
|
+
def _handle_field_declaration(
|
|
126
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
127
|
+
) -> Dict[str, Any]:
|
|
128
|
+
field_name = None
|
|
129
|
+
field_type = None
|
|
130
|
+
|
|
131
|
+
for child in node.children:
|
|
132
|
+
if child.type == "field_identifier":
|
|
133
|
+
field_name = self.extract_node_text(child, source_code)
|
|
134
|
+
elif child.type in [
|
|
135
|
+
"type_identifier",
|
|
136
|
+
"pointer_type",
|
|
137
|
+
"array_type",
|
|
138
|
+
"slice_type",
|
|
139
|
+
"map_type",
|
|
140
|
+
"channel_type",
|
|
141
|
+
"qualified_type",
|
|
142
|
+
"struct_type",
|
|
143
|
+
"interface_type",
|
|
144
|
+
]:
|
|
145
|
+
field_type = self.extract_node_text(child, source_code)
|
|
146
|
+
|
|
147
|
+
if field_name:
|
|
148
|
+
result["type"] = "field_declaration"
|
|
149
|
+
if field_type:
|
|
150
|
+
result["name"] = f"{field_name} {field_type}"
|
|
151
|
+
else:
|
|
152
|
+
result["name"] = field_name
|
|
153
|
+
|
|
154
|
+
return result
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Java language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JavaParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for Java source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "java"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type in ["class_declaration", "interface_declaration"]:
|
|
23
|
+
for child in node.children:
|
|
24
|
+
if child.type == "identifier":
|
|
25
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
26
|
+
elif child.type in ["class_body", "interface_body"]:
|
|
27
|
+
result["children"] = [
|
|
28
|
+
process_children_callback(c) for c in child.children
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
elif node.type == "method_declaration":
|
|
32
|
+
self._handle_method_declaration(node, source_code, result)
|
|
33
|
+
|
|
34
|
+
elif node.type == "field_declaration":
|
|
35
|
+
self._handle_field_declaration(node, source_code, result)
|
|
36
|
+
|
|
37
|
+
elif node.type == "annotation":
|
|
38
|
+
annotation_name = self.extract_node_text(node, source_code)
|
|
39
|
+
result["name"] = annotation_name
|
|
40
|
+
result["type"] = "annotation"
|
|
41
|
+
|
|
42
|
+
elif node.type == "lambda_expression":
|
|
43
|
+
result["type"] = "lambda_expression"
|
|
44
|
+
|
|
45
|
+
children = [process_children_callback(child) for child in node.children]
|
|
46
|
+
if children:
|
|
47
|
+
result["children"] = children
|
|
48
|
+
|
|
49
|
+
return result
|
|
50
|
+
|
|
51
|
+
def _handle_method_declaration(
|
|
52
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
53
|
+
) -> None:
|
|
54
|
+
method_name = None
|
|
55
|
+
parameters = []
|
|
56
|
+
return_type = None
|
|
57
|
+
|
|
58
|
+
for child in node.children:
|
|
59
|
+
if child.type == "identifier":
|
|
60
|
+
method_name = self.extract_node_text(child, source_code)
|
|
61
|
+
result["name"] = method_name
|
|
62
|
+
elif child.type == "formal_parameters":
|
|
63
|
+
for param in child.children:
|
|
64
|
+
if param.type == "parameter":
|
|
65
|
+
param_name = self.extract_node_text(
|
|
66
|
+
param.child_by_field_name("name"), source_code
|
|
67
|
+
)
|
|
68
|
+
param_type = self.extract_node_text(
|
|
69
|
+
param.child_by_field_name("type"), source_code
|
|
70
|
+
)
|
|
71
|
+
parameters.append(f"{param_type} {param_name}")
|
|
72
|
+
result["parameters"] = parameters
|
|
73
|
+
elif child.type == "type":
|
|
74
|
+
return_type = self.extract_node_text(child, source_code)
|
|
75
|
+
result["return_type"] = return_type
|
|
76
|
+
|
|
77
|
+
def _handle_field_declaration(
|
|
78
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
79
|
+
) -> None:
|
|
80
|
+
field_type = None
|
|
81
|
+
field_name = None
|
|
82
|
+
|
|
83
|
+
for child in node.children:
|
|
84
|
+
if child.type in [
|
|
85
|
+
"type_identifier",
|
|
86
|
+
"generic_type",
|
|
87
|
+
"array_type",
|
|
88
|
+
"integral_type",
|
|
89
|
+
"floating_point_type",
|
|
90
|
+
"boolean_type",
|
|
91
|
+
]:
|
|
92
|
+
field_type = self.extract_node_text(child, source_code)
|
|
93
|
+
elif child.type == "variable_declarator":
|
|
94
|
+
name_node = child.child_by_field_name("name")
|
|
95
|
+
if name_node:
|
|
96
|
+
field_name = self.extract_node_text(name_node, source_code)
|
|
97
|
+
|
|
98
|
+
if field_name:
|
|
99
|
+
result["type"] = "field_declaration"
|
|
100
|
+
if field_type:
|
|
101
|
+
result["name"] = f"{field_type} {field_name}"
|
|
102
|
+
else:
|
|
103
|
+
result["name"] = field_name
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
"""
|
|
2
|
+
JavaScript/TypeScript language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JavaScriptParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for JavaScript and TypeScript source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "javascript"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type in [
|
|
23
|
+
"class_declaration",
|
|
24
|
+
"method_definition",
|
|
25
|
+
"class",
|
|
26
|
+
"method_declaration",
|
|
27
|
+
"function_declaration",
|
|
28
|
+
"interface_declaration",
|
|
29
|
+
"export_statement",
|
|
30
|
+
"arrow_function",
|
|
31
|
+
"lexical_declaration",
|
|
32
|
+
]:
|
|
33
|
+
if node.type == "export_statement":
|
|
34
|
+
return self._handle_export_statement(
|
|
35
|
+
node, source_code, process_children_callback
|
|
36
|
+
)
|
|
37
|
+
elif node.type == "arrow_function":
|
|
38
|
+
return self._handle_arrow_function(
|
|
39
|
+
node, source_code, result, process_children_callback
|
|
40
|
+
)
|
|
41
|
+
elif node.type == "lexical_declaration":
|
|
42
|
+
return self._handle_lexical_declaration(
|
|
43
|
+
node, source_code, result, process_children_callback
|
|
44
|
+
)
|
|
45
|
+
else:
|
|
46
|
+
self._handle_regular_declaration(node, source_code, result)
|
|
47
|
+
|
|
48
|
+
elif node.type in ["property_declaration", "public_field_definition"]:
|
|
49
|
+
return self._handle_property_declaration(node, source_code, result)
|
|
50
|
+
|
|
51
|
+
elif node.type in ["variable_statement", "variable_declaration"]:
|
|
52
|
+
return self._handle_variable_statement(
|
|
53
|
+
node, source_code, result, process_children_callback
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
children = []
|
|
57
|
+
for child in node.children:
|
|
58
|
+
child_result = process_children_callback(child)
|
|
59
|
+
if child_result and self._is_significant_node(child_result):
|
|
60
|
+
children.append(child_result)
|
|
61
|
+
|
|
62
|
+
if children:
|
|
63
|
+
result["children"] = children
|
|
64
|
+
|
|
65
|
+
return result
|
|
66
|
+
|
|
67
|
+
def _handle_export_statement(
|
|
68
|
+
self, node, source_code: bytes, process_children_callback
|
|
69
|
+
) -> Optional[Dict[str, Any]]:
|
|
70
|
+
for child in node.children:
|
|
71
|
+
if child.type in [
|
|
72
|
+
"class_declaration",
|
|
73
|
+
"function_declaration",
|
|
74
|
+
"interface_declaration",
|
|
75
|
+
"variable_statement",
|
|
76
|
+
"lexical_declaration",
|
|
77
|
+
"method_definition",
|
|
78
|
+
]:
|
|
79
|
+
exported_result = process_children_callback(child)
|
|
80
|
+
if exported_result:
|
|
81
|
+
exported_result["exported"] = True
|
|
82
|
+
return exported_result
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
def _handle_arrow_function(
|
|
86
|
+
self,
|
|
87
|
+
node,
|
|
88
|
+
source_code: bytes,
|
|
89
|
+
result: Dict[str, Any],
|
|
90
|
+
process_children_callback,
|
|
91
|
+
) -> Dict[str, Any]:
|
|
92
|
+
parent = node.parent
|
|
93
|
+
if parent and parent.type == "variable_declarator":
|
|
94
|
+
for sibling in parent.children:
|
|
95
|
+
if sibling.type == "identifier":
|
|
96
|
+
result["type"] = "arrow_function"
|
|
97
|
+
result["name"] = self.extract_node_text(sibling, source_code)
|
|
98
|
+
|
|
99
|
+
for child in node.children:
|
|
100
|
+
if child.type == "formal_parameters":
|
|
101
|
+
params = self._extract_parameters(child, source_code)
|
|
102
|
+
if params:
|
|
103
|
+
result["parameters"] = params
|
|
104
|
+
|
|
105
|
+
return result
|
|
106
|
+
|
|
107
|
+
def _handle_lexical_declaration(
|
|
108
|
+
self,
|
|
109
|
+
node,
|
|
110
|
+
source_code: bytes,
|
|
111
|
+
result: Dict[str, Any],
|
|
112
|
+
process_children_callback,
|
|
113
|
+
) -> Dict[str, Any]:
|
|
114
|
+
for child in node.children:
|
|
115
|
+
if child.type == "variable_declarator":
|
|
116
|
+
var_name = None
|
|
117
|
+
has_arrow_function = False
|
|
118
|
+
|
|
119
|
+
for declarator_child in child.children:
|
|
120
|
+
if declarator_child.type == "identifier":
|
|
121
|
+
var_name = self.extract_node_text(declarator_child, source_code)
|
|
122
|
+
elif declarator_child.type == "arrow_function":
|
|
123
|
+
has_arrow_function = True
|
|
124
|
+
|
|
125
|
+
if var_name and has_arrow_function:
|
|
126
|
+
result["type"] = "arrow_function"
|
|
127
|
+
result["name"] = var_name
|
|
128
|
+
for declarator_child in child.children:
|
|
129
|
+
if declarator_child.type == "arrow_function":
|
|
130
|
+
arrow_result = process_children_callback(declarator_child)
|
|
131
|
+
if arrow_result and "parameters" in arrow_result:
|
|
132
|
+
result["parameters"] = arrow_result["parameters"]
|
|
133
|
+
else:
|
|
134
|
+
result["type"] = "variable_declaration"
|
|
135
|
+
result["name"] = var_name
|
|
136
|
+
result["first_line"] = (
|
|
137
|
+
self.extract_node_text(node, source_code)
|
|
138
|
+
.split("\n")[0]
|
|
139
|
+
.strip("{")
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
return result
|
|
143
|
+
|
|
144
|
+
def _handle_regular_declaration(
|
|
145
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
146
|
+
) -> None:
|
|
147
|
+
for child in node.children:
|
|
148
|
+
if child.type in ["identifier", "type_identifier", "property_identifier"]:
|
|
149
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
150
|
+
elif child.type == "formal_parameters" and node.type in [
|
|
151
|
+
"function_declaration",
|
|
152
|
+
"method_declaration",
|
|
153
|
+
"method_definition",
|
|
154
|
+
]:
|
|
155
|
+
params = self._extract_parameters_with_types(child, source_code)
|
|
156
|
+
if params:
|
|
157
|
+
result["parameters"] = params
|
|
158
|
+
|
|
159
|
+
def _handle_variable_statement(
|
|
160
|
+
self,
|
|
161
|
+
node,
|
|
162
|
+
source_code: bytes,
|
|
163
|
+
result: Dict[str, Any],
|
|
164
|
+
process_children_callback,
|
|
165
|
+
) -> Dict[str, Any]:
|
|
166
|
+
for child in node.children:
|
|
167
|
+
if child.type == "variable_declaration_list":
|
|
168
|
+
for declarator in child.children:
|
|
169
|
+
if declarator.type == "variable_declarator":
|
|
170
|
+
var_name = None
|
|
171
|
+
has_arrow_function = False
|
|
172
|
+
|
|
173
|
+
for declarator_child in declarator.children:
|
|
174
|
+
if declarator_child.type == "identifier":
|
|
175
|
+
var_name = self.extract_node_text(
|
|
176
|
+
declarator_child, source_code
|
|
177
|
+
)
|
|
178
|
+
elif declarator_child.type == "arrow_function":
|
|
179
|
+
has_arrow_function = True
|
|
180
|
+
|
|
181
|
+
if var_name:
|
|
182
|
+
if has_arrow_function:
|
|
183
|
+
result["type"] = "arrow_function"
|
|
184
|
+
result["name"] = var_name
|
|
185
|
+
for declarator_child in declarator.children:
|
|
186
|
+
if declarator_child.type == "arrow_function":
|
|
187
|
+
arrow_result = process_children_callback(
|
|
188
|
+
declarator_child
|
|
189
|
+
)
|
|
190
|
+
if (
|
|
191
|
+
arrow_result
|
|
192
|
+
and "parameters" in arrow_result
|
|
193
|
+
):
|
|
194
|
+
result["parameters"] = arrow_result[
|
|
195
|
+
"parameters"
|
|
196
|
+
]
|
|
197
|
+
else:
|
|
198
|
+
result["type"] = "variable_declaration"
|
|
199
|
+
result["name"] = var_name
|
|
200
|
+
return result
|
|
201
|
+
|
|
202
|
+
elif child.type == "identifier":
|
|
203
|
+
result["type"] = "variable_declaration"
|
|
204
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
205
|
+
return result
|
|
206
|
+
|
|
207
|
+
return result
|
|
208
|
+
|
|
209
|
+
def _handle_property_declaration(
|
|
210
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
211
|
+
) -> Dict[str, Any]:
|
|
212
|
+
prop_name = None
|
|
213
|
+
prop_type = None
|
|
214
|
+
|
|
215
|
+
for child in node.children:
|
|
216
|
+
if child.type in ["property_identifier", "identifier"]:
|
|
217
|
+
prop_name = self.extract_node_text(child, source_code)
|
|
218
|
+
elif child.type == "type_annotation":
|
|
219
|
+
for type_child in child.children:
|
|
220
|
+
if type_child.type != ":":
|
|
221
|
+
prop_type = self.extract_node_text(type_child, source_code)
|
|
222
|
+
|
|
223
|
+
if prop_name:
|
|
224
|
+
result["type"] = "property_declaration"
|
|
225
|
+
if prop_type:
|
|
226
|
+
result["name"] = f"{prop_name}: {prop_type}"
|
|
227
|
+
else:
|
|
228
|
+
result["name"] = prop_name
|
|
229
|
+
|
|
230
|
+
return result
|
|
231
|
+
|
|
232
|
+
def _extract_parameters(self, params_node, source_code: bytes) -> list:
|
|
233
|
+
params = []
|
|
234
|
+
for param in params_node.children:
|
|
235
|
+
if param.type in ["required_parameter", "optional_parameter", "identifier"]:
|
|
236
|
+
param_text = self.extract_node_text(param, source_code)
|
|
237
|
+
params.append(param_text)
|
|
238
|
+
return params
|
|
239
|
+
|
|
240
|
+
def _extract_parameters_with_types(self, params_node, source_code: bytes) -> list:
|
|
241
|
+
params = []
|
|
242
|
+
for param in params_node.children:
|
|
243
|
+
if param.type in ["required_parameter", "optional_parameter", "identifier"]:
|
|
244
|
+
param_name = None
|
|
245
|
+
param_type = None
|
|
246
|
+
|
|
247
|
+
if param.type == "identifier":
|
|
248
|
+
param_name = self.extract_node_text(param, source_code)
|
|
249
|
+
params.append(param_name)
|
|
250
|
+
continue
|
|
251
|
+
|
|
252
|
+
for param_child in param.children:
|
|
253
|
+
if param_child.type in ["identifier", "object_pattern"]:
|
|
254
|
+
param_name = self.extract_node_text(param_child, source_code)
|
|
255
|
+
elif param_child.type == "type_annotation":
|
|
256
|
+
for type_child in param_child.children:
|
|
257
|
+
if type_child.type != ":":
|
|
258
|
+
param_type = self.extract_node_text(
|
|
259
|
+
type_child, source_code
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
if param_name:
|
|
263
|
+
if param_type:
|
|
264
|
+
params.append(f"{param_name}: {param_type}")
|
|
265
|
+
else:
|
|
266
|
+
params.append(param_name)
|
|
267
|
+
|
|
268
|
+
return params
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Kotlin language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class KotlinParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for Kotlin source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "kotlin"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type == "class_declaration":
|
|
23
|
+
for child in node.children:
|
|
24
|
+
if child.type in ["simple_identifier", "type_identifier"]:
|
|
25
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
26
|
+
elif child.type == "class_body":
|
|
27
|
+
children = []
|
|
28
|
+
for body_child in child.children:
|
|
29
|
+
child_result = process_children_callback(body_child)
|
|
30
|
+
if child_result and self._is_significant_node(child_result):
|
|
31
|
+
children.append(child_result)
|
|
32
|
+
if children:
|
|
33
|
+
result["children"] = children
|
|
34
|
+
return result
|
|
35
|
+
|
|
36
|
+
elif node.type == "function_declaration":
|
|
37
|
+
for child in node.children:
|
|
38
|
+
if child.type == "simple_identifier":
|
|
39
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
40
|
+
return result
|
|
41
|
+
return result
|
|
42
|
+
|
|
43
|
+
elif node.type in ["property_declaration", "variable_declaration"]:
|
|
44
|
+
return self._handle_property_declaration(node, source_code, result)
|
|
45
|
+
|
|
46
|
+
children = []
|
|
47
|
+
for child in node.children:
|
|
48
|
+
child_result = process_children_callback(child)
|
|
49
|
+
if child_result and self._is_significant_node(child_result):
|
|
50
|
+
children.append(child_result)
|
|
51
|
+
|
|
52
|
+
if children:
|
|
53
|
+
result["children"] = children
|
|
54
|
+
|
|
55
|
+
return result
|
|
56
|
+
|
|
57
|
+
def _handle_property_declaration(
|
|
58
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
59
|
+
) -> Dict[str, Any]:
|
|
60
|
+
prop_name = None
|
|
61
|
+
prop_type = None
|
|
62
|
+
|
|
63
|
+
for child in node.children:
|
|
64
|
+
if child.type == "variable_declaration":
|
|
65
|
+
for subchild in child.children:
|
|
66
|
+
if subchild.type == "simple_identifier" and prop_name is None:
|
|
67
|
+
prop_name = self.extract_node_text(subchild, source_code)
|
|
68
|
+
elif subchild.type == "user_type":
|
|
69
|
+
prop_type = self.extract_node_text(subchild, source_code)
|
|
70
|
+
elif subchild.type in ["nullable_type", "type_identifier"]:
|
|
71
|
+
prop_type = self.extract_node_text(subchild, source_code)
|
|
72
|
+
elif child.type == "simple_identifier" and prop_name is None:
|
|
73
|
+
prop_name = self.extract_node_text(child, source_code)
|
|
74
|
+
elif child.type == "user_type":
|
|
75
|
+
prop_type = self.extract_node_text(child, source_code)
|
|
76
|
+
|
|
77
|
+
if prop_name:
|
|
78
|
+
result["type"] = "property_declaration"
|
|
79
|
+
if prop_type:
|
|
80
|
+
result["name"] = f"{prop_name}: {prop_type}"
|
|
81
|
+
else:
|
|
82
|
+
result["name"] = prop_name
|
|
83
|
+
|
|
84
|
+
return result
|