agentcrew-ai 0.8.12__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- AgentCrew/__init__.py +1 -1
- AgentCrew/app.py +34 -633
- AgentCrew/main.py +55 -3
- AgentCrew/main_docker.py +1 -30
- AgentCrew/modules/agents/local_agent.py +26 -1
- AgentCrew/modules/chat/message/command_processor.py +33 -8
- AgentCrew/modules/chat/message/handler.py +5 -1
- AgentCrew/modules/code_analysis/__init__.py +8 -0
- AgentCrew/modules/code_analysis/parsers/__init__.py +67 -0
- AgentCrew/modules/code_analysis/parsers/base.py +93 -0
- AgentCrew/modules/code_analysis/parsers/cpp_parser.py +127 -0
- AgentCrew/modules/code_analysis/parsers/csharp_parser.py +162 -0
- AgentCrew/modules/code_analysis/parsers/generic_parser.py +63 -0
- AgentCrew/modules/code_analysis/parsers/go_parser.py +154 -0
- AgentCrew/modules/code_analysis/parsers/java_parser.py +103 -0
- AgentCrew/modules/code_analysis/parsers/javascript_parser.py +268 -0
- AgentCrew/modules/code_analysis/parsers/kotlin_parser.py +84 -0
- AgentCrew/modules/code_analysis/parsers/php_parser.py +107 -0
- AgentCrew/modules/code_analysis/parsers/python_parser.py +60 -0
- AgentCrew/modules/code_analysis/parsers/ruby_parser.py +46 -0
- AgentCrew/modules/code_analysis/parsers/rust_parser.py +72 -0
- AgentCrew/modules/code_analysis/service.py +231 -897
- AgentCrew/modules/command_execution/constants.py +2 -2
- AgentCrew/modules/console/completers.py +1 -1
- AgentCrew/modules/console/confirmation_handler.py +4 -4
- AgentCrew/modules/console/console_ui.py +17 -3
- AgentCrew/modules/console/conversation_browser/__init__.py +9 -0
- AgentCrew/modules/console/conversation_browser/browser.py +84 -0
- AgentCrew/modules/console/conversation_browser/browser_input_handler.py +279 -0
- AgentCrew/modules/console/conversation_browser/browser_ui.py +643 -0
- AgentCrew/modules/console/conversation_handler.py +34 -1
- AgentCrew/modules/console/diff_display.py +22 -51
- AgentCrew/modules/console/display_handlers.py +142 -26
- AgentCrew/modules/console/tool_display.py +4 -6
- AgentCrew/modules/file_editing/service.py +8 -8
- AgentCrew/modules/file_editing/tool.py +65 -67
- AgentCrew/modules/gui/components/command_handler.py +137 -29
- AgentCrew/modules/gui/components/tool_handlers.py +0 -2
- AgentCrew/modules/gui/themes/README.md +30 -14
- AgentCrew/modules/gui/themes/__init__.py +2 -1
- AgentCrew/modules/gui/themes/atom_light.yaml +1287 -0
- AgentCrew/modules/gui/themes/catppuccin.yaml +1276 -0
- AgentCrew/modules/gui/themes/dracula.yaml +1262 -0
- AgentCrew/modules/gui/themes/nord.yaml +1267 -0
- AgentCrew/modules/gui/themes/saigontech.yaml +1268 -0
- AgentCrew/modules/gui/themes/style_provider.py +76 -264
- AgentCrew/modules/gui/themes/theme_loader.py +379 -0
- AgentCrew/modules/gui/themes/unicorn.yaml +1276 -0
- AgentCrew/modules/gui/widgets/configs/global_settings.py +3 -4
- AgentCrew/modules/gui/widgets/diff_widget.py +30 -61
- AgentCrew/modules/llm/constants.py +18 -9
- AgentCrew/modules/memory/context_persistent.py +1 -0
- AgentCrew/modules/memory/tool.py +1 -1
- AgentCrew/setup.py +470 -0
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.9.0.dist-info}/METADATA +1 -1
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.9.0.dist-info}/RECORD +60 -41
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.9.0.dist-info}/WHEEL +1 -1
- AgentCrew/modules/gui/themes/atom_light.py +0 -1365
- AgentCrew/modules/gui/themes/catppuccin.py +0 -1404
- AgentCrew/modules/gui/themes/dracula.py +0 -1372
- AgentCrew/modules/gui/themes/nord.py +0 -1365
- AgentCrew/modules/gui/themes/saigontech.py +0 -1359
- AgentCrew/modules/gui/themes/unicorn.py +0 -1372
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.9.0.dist-info}/entry_points.txt +0 -0
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.9.0.dist-info}/licenses/LICENSE +0 -0
- {agentcrew_ai-0.8.12.dist-info → agentcrew_ai-0.9.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"""
|
|
2
|
+
C# language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class CSharpParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for C# source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "c-sharp"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type == "class_declaration":
|
|
23
|
+
self._handle_class_declaration(node, source_code, result)
|
|
24
|
+
|
|
25
|
+
elif node.type == "method_declaration":
|
|
26
|
+
self._handle_method_declaration(node, source_code, result)
|
|
27
|
+
|
|
28
|
+
elif node.type == "property_declaration":
|
|
29
|
+
self._handle_property_declaration(node, source_code, result)
|
|
30
|
+
|
|
31
|
+
elif node.type == "field_declaration":
|
|
32
|
+
self._handle_field_declaration(node, source_code, result)
|
|
33
|
+
|
|
34
|
+
children = []
|
|
35
|
+
for child in node.children:
|
|
36
|
+
child_result = process_children_callback(child)
|
|
37
|
+
if child_result and self._is_significant_node(child_result):
|
|
38
|
+
children.append(child_result)
|
|
39
|
+
|
|
40
|
+
if children:
|
|
41
|
+
result["children"] = children
|
|
42
|
+
|
|
43
|
+
return result
|
|
44
|
+
|
|
45
|
+
def _handle_class_declaration(
|
|
46
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
47
|
+
) -> None:
|
|
48
|
+
for child in node.children:
|
|
49
|
+
if child.type == "identifier":
|
|
50
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
51
|
+
elif child.type == "base_list":
|
|
52
|
+
if len(child.children) > 1:
|
|
53
|
+
result["base_class"] = self.extract_node_text(
|
|
54
|
+
child.children[1], source_code
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
def _handle_method_declaration(
|
|
58
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
59
|
+
) -> None:
|
|
60
|
+
method_name = None
|
|
61
|
+
parameters = []
|
|
62
|
+
access_modifiers = []
|
|
63
|
+
|
|
64
|
+
for child in node.children:
|
|
65
|
+
if child.type == "identifier":
|
|
66
|
+
method_name = self.extract_node_text(child, source_code)
|
|
67
|
+
result["name"] = method_name
|
|
68
|
+
elif child.type == "parameter_list":
|
|
69
|
+
for param in child.children:
|
|
70
|
+
if param.type == "parameter":
|
|
71
|
+
param_type = ""
|
|
72
|
+
param_name = None
|
|
73
|
+
|
|
74
|
+
type_node = param.child_by_field_name("type")
|
|
75
|
+
name_node = param.child_by_field_name("name")
|
|
76
|
+
|
|
77
|
+
if type_node:
|
|
78
|
+
param_type = self.extract_node_text(type_node, source_code)
|
|
79
|
+
if name_node:
|
|
80
|
+
param_name = self.extract_node_text(name_node, source_code)
|
|
81
|
+
|
|
82
|
+
if param_name:
|
|
83
|
+
parameters.append(param_type + " " + param_name)
|
|
84
|
+
|
|
85
|
+
if parameters:
|
|
86
|
+
result["parameters"] = parameters
|
|
87
|
+
elif child.type == "modifier":
|
|
88
|
+
modifier = self.extract_node_text(child, source_code)
|
|
89
|
+
access_modifiers.append(modifier)
|
|
90
|
+
|
|
91
|
+
if access_modifiers:
|
|
92
|
+
result["modifiers"] = access_modifiers
|
|
93
|
+
|
|
94
|
+
def _handle_property_declaration(
|
|
95
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
96
|
+
) -> None:
|
|
97
|
+
property_name = None
|
|
98
|
+
property_type = None
|
|
99
|
+
modifiers = []
|
|
100
|
+
|
|
101
|
+
for child in node.children:
|
|
102
|
+
if child.type == "modifier":
|
|
103
|
+
modifiers.append(self.extract_node_text(child, source_code))
|
|
104
|
+
elif child.type in [
|
|
105
|
+
"predefined_type",
|
|
106
|
+
"nullable_type",
|
|
107
|
+
"generic_name",
|
|
108
|
+
"array_type",
|
|
109
|
+
]:
|
|
110
|
+
property_type = self.extract_node_text(child, source_code)
|
|
111
|
+
elif child.type == "identifier":
|
|
112
|
+
if property_type is None:
|
|
113
|
+
property_type = self.extract_node_text(child, source_code)
|
|
114
|
+
else:
|
|
115
|
+
property_name = self.extract_node_text(child, source_code)
|
|
116
|
+
|
|
117
|
+
if property_name:
|
|
118
|
+
result["type"] = "property_declaration"
|
|
119
|
+
if property_type:
|
|
120
|
+
result["name"] = f"{property_type} {property_name}"
|
|
121
|
+
else:
|
|
122
|
+
result["name"] = property_name
|
|
123
|
+
if modifiers:
|
|
124
|
+
result["modifiers"] = modifiers
|
|
125
|
+
|
|
126
|
+
def _handle_field_declaration(
|
|
127
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
128
|
+
) -> None:
|
|
129
|
+
field_name = None
|
|
130
|
+
field_type = None
|
|
131
|
+
modifiers = []
|
|
132
|
+
|
|
133
|
+
for child in node.children:
|
|
134
|
+
if child.type == "modifier":
|
|
135
|
+
modifiers.append(self.extract_node_text(child, source_code))
|
|
136
|
+
elif child.type == "variable_declaration":
|
|
137
|
+
for subchild in child.children:
|
|
138
|
+
if subchild.type in [
|
|
139
|
+
"predefined_type",
|
|
140
|
+
"nullable_type",
|
|
141
|
+
"generic_name",
|
|
142
|
+
"array_type",
|
|
143
|
+
]:
|
|
144
|
+
field_type = self.extract_node_text(subchild, source_code)
|
|
145
|
+
elif subchild.type == "identifier" and field_type is None:
|
|
146
|
+
field_type = self.extract_node_text(subchild, source_code)
|
|
147
|
+
elif subchild.type == "variable_declarator":
|
|
148
|
+
for var_child in subchild.children:
|
|
149
|
+
if var_child.type == "identifier":
|
|
150
|
+
field_name = self.extract_node_text(
|
|
151
|
+
var_child, source_code
|
|
152
|
+
)
|
|
153
|
+
break
|
|
154
|
+
|
|
155
|
+
if field_name:
|
|
156
|
+
result["type"] = "field_declaration"
|
|
157
|
+
if field_type:
|
|
158
|
+
result["name"] = f"{field_type} {field_name}"
|
|
159
|
+
else:
|
|
160
|
+
result["name"] = field_name
|
|
161
|
+
if modifiers:
|
|
162
|
+
result["modifiers"] = modifiers
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Generic language parser for code analysis (fallback for unsupported languages).
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GenericParser(BaseLanguageParser):
|
|
11
|
+
"""Generic parser for languages without specific implementation."""
|
|
12
|
+
|
|
13
|
+
def __init__(self, language: str = "unknown"):
|
|
14
|
+
self._language = language
|
|
15
|
+
|
|
16
|
+
@property
|
|
17
|
+
def language_name(self) -> str:
|
|
18
|
+
return self._language
|
|
19
|
+
|
|
20
|
+
def process_node(
|
|
21
|
+
self, node, source_code: bytes, process_children_callback
|
|
22
|
+
) -> Optional[Dict[str, Any]]:
|
|
23
|
+
result = self._create_base_result(node)
|
|
24
|
+
|
|
25
|
+
if node.type in [
|
|
26
|
+
"type_declaration",
|
|
27
|
+
"function_declaration",
|
|
28
|
+
"method_declaration",
|
|
29
|
+
"interface_declaration",
|
|
30
|
+
]:
|
|
31
|
+
for child in node.children:
|
|
32
|
+
if child.type in ["identifier", "field_identifier"]:
|
|
33
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
34
|
+
result["first_line"] = (
|
|
35
|
+
self.extract_node_text(node, source_code)
|
|
36
|
+
.split("\n")[0]
|
|
37
|
+
.strip("{")
|
|
38
|
+
)
|
|
39
|
+
return result
|
|
40
|
+
return result
|
|
41
|
+
|
|
42
|
+
elif node.type in ["var_declaration", "const_declaration"]:
|
|
43
|
+
for child in node.children:
|
|
44
|
+
if child.type in ["var_spec", "const_spec"]:
|
|
45
|
+
for subchild in child.children:
|
|
46
|
+
if subchild.type == "identifier":
|
|
47
|
+
result["type"] = "variable_declaration"
|
|
48
|
+
result["name"] = self.extract_node_text(
|
|
49
|
+
subchild, source_code
|
|
50
|
+
)
|
|
51
|
+
return result
|
|
52
|
+
return result
|
|
53
|
+
|
|
54
|
+
children = []
|
|
55
|
+
for child in node.children:
|
|
56
|
+
child_result = process_children_callback(child)
|
|
57
|
+
if child_result and self._is_significant_node(child_result):
|
|
58
|
+
children.append(child_result)
|
|
59
|
+
|
|
60
|
+
if children:
|
|
61
|
+
result["children"] = children
|
|
62
|
+
|
|
63
|
+
return result
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Go language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class GoParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for Go source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "go"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type == "type_declaration":
|
|
23
|
+
return self._handle_type_declaration(
|
|
24
|
+
node, source_code, result, process_children_callback
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
elif node.type in ["function_declaration", "method_declaration"]:
|
|
28
|
+
for child in node.children:
|
|
29
|
+
if child.type in ["identifier", "field_identifier"]:
|
|
30
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
31
|
+
result["first_line"] = (
|
|
32
|
+
self.extract_node_text(node, source_code)
|
|
33
|
+
.split("\n")[0]
|
|
34
|
+
.strip("{")
|
|
35
|
+
)
|
|
36
|
+
return result
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
elif node.type == "interface_declaration":
|
|
40
|
+
for child in node.children:
|
|
41
|
+
if child.type == "identifier":
|
|
42
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
43
|
+
return result
|
|
44
|
+
return result
|
|
45
|
+
|
|
46
|
+
elif node.type in ["var_declaration", "const_declaration"]:
|
|
47
|
+
return self._handle_var_declaration(node, source_code, result)
|
|
48
|
+
|
|
49
|
+
elif node.type == "field_declaration":
|
|
50
|
+
return self._handle_field_declaration(node, source_code, result)
|
|
51
|
+
|
|
52
|
+
children = []
|
|
53
|
+
for child in node.children:
|
|
54
|
+
child_result = process_children_callback(child)
|
|
55
|
+
if child_result and self._is_significant_node(child_result):
|
|
56
|
+
children.append(child_result)
|
|
57
|
+
|
|
58
|
+
if children:
|
|
59
|
+
result["children"] = children
|
|
60
|
+
|
|
61
|
+
return result
|
|
62
|
+
|
|
63
|
+
def _handle_type_declaration(
|
|
64
|
+
self,
|
|
65
|
+
node,
|
|
66
|
+
source_code: bytes,
|
|
67
|
+
result: Dict[str, Any],
|
|
68
|
+
process_children_callback,
|
|
69
|
+
) -> Dict[str, Any]:
|
|
70
|
+
for child in node.children:
|
|
71
|
+
if child.type == "type_spec":
|
|
72
|
+
for spec_child in child.children:
|
|
73
|
+
if spec_child.type == "type_identifier":
|
|
74
|
+
result["name"] = self.extract_node_text(spec_child, source_code)
|
|
75
|
+
result["type"] = "type_declaration"
|
|
76
|
+
elif spec_child.type == "struct_type":
|
|
77
|
+
result["type"] = "struct_declaration"
|
|
78
|
+
for struct_child in spec_child.children:
|
|
79
|
+
if struct_child.type == "field_declaration_list":
|
|
80
|
+
children = []
|
|
81
|
+
for field in struct_child.children:
|
|
82
|
+
field_result = process_children_callback(field)
|
|
83
|
+
if field_result and self._is_significant_node(
|
|
84
|
+
field_result
|
|
85
|
+
):
|
|
86
|
+
children.append(field_result)
|
|
87
|
+
if children:
|
|
88
|
+
result["children"] = children
|
|
89
|
+
elif spec_child.type == "interface_type":
|
|
90
|
+
result["type"] = "interface_declaration"
|
|
91
|
+
|
|
92
|
+
return result
|
|
93
|
+
|
|
94
|
+
def _handle_var_declaration(
|
|
95
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
96
|
+
) -> Dict[str, Any]:
|
|
97
|
+
var_name = None
|
|
98
|
+
var_type = None
|
|
99
|
+
|
|
100
|
+
for child in node.children:
|
|
101
|
+
if child.type in ["var_spec", "const_spec"]:
|
|
102
|
+
for subchild in child.children:
|
|
103
|
+
if subchild.type == "identifier" and var_name is None:
|
|
104
|
+
var_name = self.extract_node_text(subchild, source_code)
|
|
105
|
+
elif subchild.type in [
|
|
106
|
+
"type_identifier",
|
|
107
|
+
"pointer_type",
|
|
108
|
+
"array_type",
|
|
109
|
+
"slice_type",
|
|
110
|
+
"map_type",
|
|
111
|
+
"channel_type",
|
|
112
|
+
"qualified_type",
|
|
113
|
+
]:
|
|
114
|
+
var_type = self.extract_node_text(subchild, source_code)
|
|
115
|
+
|
|
116
|
+
if var_name:
|
|
117
|
+
result["type"] = "variable_declaration"
|
|
118
|
+
if var_type:
|
|
119
|
+
result["name"] = f"{var_name} {var_type}"
|
|
120
|
+
else:
|
|
121
|
+
result["name"] = var_name
|
|
122
|
+
|
|
123
|
+
return result
|
|
124
|
+
|
|
125
|
+
def _handle_field_declaration(
|
|
126
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
127
|
+
) -> Dict[str, Any]:
|
|
128
|
+
field_name = None
|
|
129
|
+
field_type = None
|
|
130
|
+
|
|
131
|
+
for child in node.children:
|
|
132
|
+
if child.type == "field_identifier":
|
|
133
|
+
field_name = self.extract_node_text(child, source_code)
|
|
134
|
+
elif child.type in [
|
|
135
|
+
"type_identifier",
|
|
136
|
+
"pointer_type",
|
|
137
|
+
"array_type",
|
|
138
|
+
"slice_type",
|
|
139
|
+
"map_type",
|
|
140
|
+
"channel_type",
|
|
141
|
+
"qualified_type",
|
|
142
|
+
"struct_type",
|
|
143
|
+
"interface_type",
|
|
144
|
+
]:
|
|
145
|
+
field_type = self.extract_node_text(child, source_code)
|
|
146
|
+
|
|
147
|
+
if field_name:
|
|
148
|
+
result["type"] = "field_declaration"
|
|
149
|
+
if field_type:
|
|
150
|
+
result["name"] = f"{field_name} {field_type}"
|
|
151
|
+
else:
|
|
152
|
+
result["name"] = field_name
|
|
153
|
+
|
|
154
|
+
return result
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Java language parser for code analysis.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
|
|
7
|
+
from .base import BaseLanguageParser
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JavaParser(BaseLanguageParser):
|
|
11
|
+
"""Parser for Java source code."""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def language_name(self) -> str:
|
|
15
|
+
return "java"
|
|
16
|
+
|
|
17
|
+
def process_node(
|
|
18
|
+
self, node, source_code: bytes, process_children_callback
|
|
19
|
+
) -> Optional[Dict[str, Any]]:
|
|
20
|
+
result = self._create_base_result(node)
|
|
21
|
+
|
|
22
|
+
if node.type in ["class_declaration", "interface_declaration"]:
|
|
23
|
+
for child in node.children:
|
|
24
|
+
if child.type == "identifier":
|
|
25
|
+
result["name"] = self.extract_node_text(child, source_code)
|
|
26
|
+
elif child.type in ["class_body", "interface_body"]:
|
|
27
|
+
result["children"] = [
|
|
28
|
+
process_children_callback(c) for c in child.children
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
elif node.type == "method_declaration":
|
|
32
|
+
self._handle_method_declaration(node, source_code, result)
|
|
33
|
+
|
|
34
|
+
elif node.type == "field_declaration":
|
|
35
|
+
self._handle_field_declaration(node, source_code, result)
|
|
36
|
+
|
|
37
|
+
elif node.type == "annotation":
|
|
38
|
+
annotation_name = self.extract_node_text(node, source_code)
|
|
39
|
+
result["name"] = annotation_name
|
|
40
|
+
result["type"] = "annotation"
|
|
41
|
+
|
|
42
|
+
elif node.type == "lambda_expression":
|
|
43
|
+
result["type"] = "lambda_expression"
|
|
44
|
+
|
|
45
|
+
children = [process_children_callback(child) for child in node.children]
|
|
46
|
+
if children:
|
|
47
|
+
result["children"] = children
|
|
48
|
+
|
|
49
|
+
return result
|
|
50
|
+
|
|
51
|
+
def _handle_method_declaration(
|
|
52
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
53
|
+
) -> None:
|
|
54
|
+
method_name = None
|
|
55
|
+
parameters = []
|
|
56
|
+
return_type = None
|
|
57
|
+
|
|
58
|
+
for child in node.children:
|
|
59
|
+
if child.type == "identifier":
|
|
60
|
+
method_name = self.extract_node_text(child, source_code)
|
|
61
|
+
result["name"] = method_name
|
|
62
|
+
elif child.type == "formal_parameters":
|
|
63
|
+
for param in child.children:
|
|
64
|
+
if param.type == "parameter":
|
|
65
|
+
param_name = self.extract_node_text(
|
|
66
|
+
param.child_by_field_name("name"), source_code
|
|
67
|
+
)
|
|
68
|
+
param_type = self.extract_node_text(
|
|
69
|
+
param.child_by_field_name("type"), source_code
|
|
70
|
+
)
|
|
71
|
+
parameters.append(f"{param_type} {param_name}")
|
|
72
|
+
result["parameters"] = parameters
|
|
73
|
+
elif child.type == "type":
|
|
74
|
+
return_type = self.extract_node_text(child, source_code)
|
|
75
|
+
result["return_type"] = return_type
|
|
76
|
+
|
|
77
|
+
def _handle_field_declaration(
|
|
78
|
+
self, node, source_code: bytes, result: Dict[str, Any]
|
|
79
|
+
) -> None:
|
|
80
|
+
field_type = None
|
|
81
|
+
field_name = None
|
|
82
|
+
|
|
83
|
+
for child in node.children:
|
|
84
|
+
if child.type in [
|
|
85
|
+
"type_identifier",
|
|
86
|
+
"generic_type",
|
|
87
|
+
"array_type",
|
|
88
|
+
"integral_type",
|
|
89
|
+
"floating_point_type",
|
|
90
|
+
"boolean_type",
|
|
91
|
+
]:
|
|
92
|
+
field_type = self.extract_node_text(child, source_code)
|
|
93
|
+
elif child.type == "variable_declarator":
|
|
94
|
+
name_node = child.child_by_field_name("name")
|
|
95
|
+
if name_node:
|
|
96
|
+
field_name = self.extract_node_text(name_node, source_code)
|
|
97
|
+
|
|
98
|
+
if field_name:
|
|
99
|
+
result["type"] = "field_declaration"
|
|
100
|
+
if field_type:
|
|
101
|
+
result["name"] = f"{field_type} {field_name}"
|
|
102
|
+
else:
|
|
103
|
+
result["name"] = field_name
|