jaclang 0.8.0__py3-none-any.whl → 0.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jaclang might be problematic. Click here for more details.
- jaclang/cli/cli.py +11 -9
- jaclang/compiler/jac.lark +2 -12
- jaclang/compiler/larkparse/jac_parser.py +1 -1
- jaclang/compiler/parser.py +360 -521
- jaclang/compiler/passes/main/cfg_build_pass.py +2 -2
- jaclang/compiler/passes/main/def_impl_match_pass.py +14 -13
- jaclang/compiler/passes/main/def_use_pass.py +4 -7
- jaclang/compiler/passes/main/import_pass.py +3 -3
- jaclang/compiler/passes/main/inheritance_pass.py +2 -2
- jaclang/compiler/passes/main/pyast_gen_pass.py +196 -218
- jaclang/compiler/passes/main/pyast_load_pass.py +115 -311
- jaclang/compiler/passes/main/pyjac_ast_link_pass.py +8 -7
- jaclang/compiler/passes/main/sym_tab_build_pass.py +3 -3
- jaclang/compiler/passes/main/sym_tab_link_pass.py +4 -4
- jaclang/compiler/passes/main/tests/fixtures/symtab_link_tests/action/actions.jac +1 -5
- jaclang/compiler/passes/main/tests/fixtures/symtab_link_tests/main.jac +1 -8
- jaclang/compiler/passes/main/tests/test_cfg_build_pass.py +4 -2
- jaclang/compiler/passes/tool/doc_ir_gen_pass.py +197 -120
- jaclang/compiler/program.py +2 -7
- jaclang/compiler/tests/fixtures/fam.jac +2 -2
- jaclang/compiler/tests/fixtures/pkg_import_lib/__init__.jac +1 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib/sub/__init__.jac +1 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib/sub/helper.jac +3 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib/tools.jac +3 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib_py/__init__.py +11 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib_py/sub/__init__.py +7 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib_py/sub/helper.jac +3 -0
- jaclang/compiler/tests/fixtures/pkg_import_lib_py/tools.jac +3 -0
- jaclang/compiler/tests/fixtures/pkg_import_main.jac +10 -0
- jaclang/compiler/tests/fixtures/pkg_import_main_py.jac +11 -0
- jaclang/compiler/tests/test_importer.py +20 -0
- jaclang/compiler/tests/test_parser.py +1 -0
- jaclang/compiler/unitree.py +456 -304
- jaclang/langserve/engine.jac +498 -0
- jaclang/langserve/sem_manager.jac +309 -0
- jaclang/langserve/server.jac +186 -0
- jaclang/langserve/tests/server_test/test_lang_serve.py +6 -7
- jaclang/langserve/tests/server_test/utils.py +4 -1
- jaclang/langserve/tests/session.jac +294 -0
- jaclang/langserve/tests/test_sem_tokens.py +2 -2
- jaclang/langserve/tests/test_server.py +12 -7
- jaclang/langserve/utils.jac +51 -30
- jaclang/runtimelib/archetype.py +1 -1
- jaclang/runtimelib/builtin.py +17 -14
- jaclang/runtimelib/importer.py +26 -8
- jaclang/runtimelib/machine.py +96 -55
- jaclang/runtimelib/tests/fixtures/traversing_save.jac +7 -5
- jaclang/runtimelib/utils.py +3 -3
- jaclang/tests/fixtures/backward_edge_visit.jac +31 -0
- jaclang/tests/fixtures/builtin_printgraph.jac +85 -0
- jaclang/tests/fixtures/builtin_printgraph_json.jac +21 -0
- jaclang/tests/fixtures/builtin_printgraph_mermaid.jac +16 -0
- jaclang/tests/fixtures/chandra_bugs2.jac +20 -13
- jaclang/tests/fixtures/concurrency.jac +1 -1
- jaclang/tests/fixtures/edge_ability.jac +49 -0
- jaclang/tests/fixtures/guess_game.jac +1 -1
- jaclang/tests/fixtures/here_usage_error.jac +21 -0
- jaclang/tests/fixtures/here_visitor_usage.jac +21 -0
- jaclang/tests/fixtures/node_del.jac +30 -36
- jaclang/tests/fixtures/visit_traversal.jac +47 -0
- jaclang/tests/test_cli.py +12 -7
- jaclang/tests/test_language.py +91 -16
- jaclang/utils/helpers.py +14 -6
- jaclang/utils/lang_tools.py +2 -3
- jaclang/utils/tests/test_lang_tools.py +2 -1
- jaclang/utils/treeprinter.py +3 -4
- {jaclang-0.8.0.dist-info → jaclang-0.8.1.dist-info}/METADATA +4 -3
- {jaclang-0.8.0.dist-info → jaclang-0.8.1.dist-info}/RECORD +71 -55
- {jaclang-0.8.0.dist-info → jaclang-0.8.1.dist-info}/WHEEL +1 -1
- jaclang/langserve/engine.py +0 -553
- jaclang/langserve/sem_manager.py +0 -383
- jaclang/langserve/server.py +0 -167
- jaclang/langserve/tests/session.py +0 -255
- jaclang/tests/fixtures/builtin_dotgen.jac +0 -42
- jaclang/tests/fixtures/builtin_dotgen_json.jac +0 -21
- /jaclang/langserve/{__init__.py → __init__.jac} +0 -0
- {jaclang-0.8.0.dist-info → jaclang-0.8.1.dist-info}/entry_points.txt +0 -0
jaclang/langserve/sem_manager.py
DELETED
|
@@ -1,383 +0,0 @@
|
|
|
1
|
-
"""Semantic Token Manager module."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from typing import List, Optional, Tuple
|
|
6
|
-
|
|
7
|
-
import jaclang.compiler.unitree as uni
|
|
8
|
-
from jaclang import JacMachineInterface as Jac
|
|
9
|
-
|
|
10
|
-
import lsprotocol.types as lspt
|
|
11
|
-
|
|
12
|
-
(utils,) = Jac.py_jac_import(".utils", base_path=__file__)
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class SemTokManager:
|
|
16
|
-
"""Semantic Token Manager class."""
|
|
17
|
-
|
|
18
|
-
def __init__(self, ir: uni.Module) -> None:
|
|
19
|
-
"""Initialize semantic token manager."""
|
|
20
|
-
self.sem_tokens: List[int] = self.gen_sem_tokens(ir)
|
|
21
|
-
self.static_sem_tokens: List[
|
|
22
|
-
Tuple[lspt.Position, int, int, uni.AstSymbolNode]
|
|
23
|
-
] = self.gen_sem_tok_node(ir)
|
|
24
|
-
|
|
25
|
-
def gen_sem_tokens(self, ir: uni.Module) -> list[int]:
|
|
26
|
-
"""Return semantic tokens."""
|
|
27
|
-
tokens = []
|
|
28
|
-
prev_line, prev_col = 0, 0
|
|
29
|
-
for node in ir._in_mod_nodes:
|
|
30
|
-
if isinstance(node, uni.NameAtom) and node.sem_token:
|
|
31
|
-
line, col_start, col_end = (
|
|
32
|
-
node.loc.first_line - 1,
|
|
33
|
-
node.loc.col_start - 1,
|
|
34
|
-
node.loc.col_end - 1,
|
|
35
|
-
)
|
|
36
|
-
length = col_end - col_start
|
|
37
|
-
tokens += [
|
|
38
|
-
line - prev_line,
|
|
39
|
-
col_start if line != prev_line else col_start - prev_col,
|
|
40
|
-
length,
|
|
41
|
-
*node.sem_token,
|
|
42
|
-
]
|
|
43
|
-
prev_line, prev_col = line, col_start
|
|
44
|
-
return tokens
|
|
45
|
-
|
|
46
|
-
def gen_sem_tok_node(
|
|
47
|
-
self, ir: uni.Module
|
|
48
|
-
) -> List[Tuple[lspt.Position, int, int, uni.AstSymbolNode]]:
|
|
49
|
-
"""Return semantic tokens."""
|
|
50
|
-
tokens: List[Tuple[lspt.Position, int, int, uni.AstSymbolNode]] = []
|
|
51
|
-
for node in ir._in_mod_nodes:
|
|
52
|
-
if isinstance(node, uni.NameAtom) and node.sem_token:
|
|
53
|
-
line, col_start, col_end = (
|
|
54
|
-
node.loc.first_line - 1,
|
|
55
|
-
node.loc.col_start - 1,
|
|
56
|
-
node.loc.col_end - 1,
|
|
57
|
-
)
|
|
58
|
-
length = col_end - col_start
|
|
59
|
-
pos = lspt.Position(line, col_start)
|
|
60
|
-
tokens += [(pos, col_end, length, node)]
|
|
61
|
-
return tokens
|
|
62
|
-
|
|
63
|
-
def update_sem_tokens(
|
|
64
|
-
self,
|
|
65
|
-
content_changes: lspt.DidChangeTextDocumentParams,
|
|
66
|
-
sem_tokens: list[int],
|
|
67
|
-
document_lines: List[str],
|
|
68
|
-
) -> list[int]:
|
|
69
|
-
"""Update semantic tokens on change."""
|
|
70
|
-
for change in [
|
|
71
|
-
x
|
|
72
|
-
for x in content_changes.content_changes
|
|
73
|
-
if isinstance(x, lspt.TextDocumentContentChangeEvent_Type1)
|
|
74
|
-
]:
|
|
75
|
-
change_start_line = change.range.start.line
|
|
76
|
-
change_start_char = change.range.start.character
|
|
77
|
-
change_end_line = change.range.end.line
|
|
78
|
-
change_end_char = change.range.end.character
|
|
79
|
-
|
|
80
|
-
is_delete = change.text == ""
|
|
81
|
-
prev_token_index, next_token_index, insert_inside_token = (
|
|
82
|
-
utils.find_surrounding_tokens(
|
|
83
|
-
change_start_line,
|
|
84
|
-
change_start_char,
|
|
85
|
-
change_end_line,
|
|
86
|
-
change_end_char,
|
|
87
|
-
sem_tokens,
|
|
88
|
-
)
|
|
89
|
-
)
|
|
90
|
-
prev_tok_pos = utils.get_token_start(prev_token_index, sem_tokens)
|
|
91
|
-
nxt_tok_pos = utils.get_token_start(next_token_index, sem_tokens)
|
|
92
|
-
changing_line_text = utils.get_line_of_code(
|
|
93
|
-
change_start_line, document_lines
|
|
94
|
-
)
|
|
95
|
-
if not changing_line_text:
|
|
96
|
-
return sem_tokens
|
|
97
|
-
is_edit_between_tokens = bool(
|
|
98
|
-
(
|
|
99
|
-
change_start_line > prev_tok_pos[0]
|
|
100
|
-
or (
|
|
101
|
-
change_start_line == prev_tok_pos[0]
|
|
102
|
-
and change_start_char
|
|
103
|
-
> prev_tok_pos[1] + sem_tokens[prev_token_index + 2]
|
|
104
|
-
if prev_token_index and prev_token_index + 2 < len(sem_tokens)
|
|
105
|
-
else 0
|
|
106
|
-
)
|
|
107
|
-
)
|
|
108
|
-
and (
|
|
109
|
-
change_end_line < nxt_tok_pos[0]
|
|
110
|
-
or (
|
|
111
|
-
change_end_line == nxt_tok_pos[0]
|
|
112
|
-
and change_end_char < nxt_tok_pos[1]
|
|
113
|
-
)
|
|
114
|
-
)
|
|
115
|
-
)
|
|
116
|
-
text = r"%s" % change.text
|
|
117
|
-
line_delta = len(text.split("\n")) - 1
|
|
118
|
-
|
|
119
|
-
is_multiline_insertion = line_delta > 0
|
|
120
|
-
is_next_token_same_line = change_end_line == nxt_tok_pos[0]
|
|
121
|
-
|
|
122
|
-
if is_delete:
|
|
123
|
-
next_token_index = (
|
|
124
|
-
prev_token_index + 5
|
|
125
|
-
if insert_inside_token
|
|
126
|
-
and prev_token_index is not None
|
|
127
|
-
or (
|
|
128
|
-
next_token_index
|
|
129
|
-
and prev_token_index is not None
|
|
130
|
-
and next_token_index >= 10
|
|
131
|
-
and next_token_index - prev_token_index == 10
|
|
132
|
-
)
|
|
133
|
-
else next_token_index
|
|
134
|
-
)
|
|
135
|
-
if next_token_index is None:
|
|
136
|
-
return sem_tokens
|
|
137
|
-
nxt_tok_pos = utils.get_token_start(next_token_index, sem_tokens)
|
|
138
|
-
is_single_line_change = change_end_line == change_start_line
|
|
139
|
-
is_next_token_same_line = change_end_line == nxt_tok_pos[0]
|
|
140
|
-
if (
|
|
141
|
-
is_single_line_change
|
|
142
|
-
and insert_inside_token
|
|
143
|
-
and prev_token_index is not None
|
|
144
|
-
):
|
|
145
|
-
sem_tokens = SemTokManager.handle_single_line_delete(
|
|
146
|
-
sem_tokens,
|
|
147
|
-
next_token_index,
|
|
148
|
-
prev_token_index,
|
|
149
|
-
is_next_token_same_line,
|
|
150
|
-
change,
|
|
151
|
-
)
|
|
152
|
-
elif is_single_line_change and is_edit_between_tokens:
|
|
153
|
-
sem_tokens = SemTokManager.handle_single_line_delete_between_tokens(
|
|
154
|
-
sem_tokens,
|
|
155
|
-
next_token_index,
|
|
156
|
-
is_next_token_same_line,
|
|
157
|
-
change,
|
|
158
|
-
change_start_line,
|
|
159
|
-
change_end_line,
|
|
160
|
-
)
|
|
161
|
-
else:
|
|
162
|
-
sem_tokens = SemTokManager.handle_multi_line_delete(
|
|
163
|
-
sem_tokens,
|
|
164
|
-
next_token_index,
|
|
165
|
-
nxt_tok_pos,
|
|
166
|
-
change_start_line,
|
|
167
|
-
change_end_line,
|
|
168
|
-
change_start_char,
|
|
169
|
-
change_end_char,
|
|
170
|
-
prev_tok_pos,
|
|
171
|
-
is_next_token_same_line,
|
|
172
|
-
)
|
|
173
|
-
return sem_tokens
|
|
174
|
-
|
|
175
|
-
is_token_boundary_edit = False
|
|
176
|
-
if insert_inside_token and prev_token_index is not None:
|
|
177
|
-
sem_tokens, is_token_boundary_edit, nxt_tok_pos, next_token_index = (
|
|
178
|
-
SemTokManager.handle_insert_inside_token(
|
|
179
|
-
change,
|
|
180
|
-
sem_tokens,
|
|
181
|
-
prev_token_index,
|
|
182
|
-
changing_line_text,
|
|
183
|
-
line_delta,
|
|
184
|
-
prev_tok_pos,
|
|
185
|
-
change_start_char,
|
|
186
|
-
change_end_char,
|
|
187
|
-
is_token_boundary_edit,
|
|
188
|
-
nxt_tok_pos,
|
|
189
|
-
)
|
|
190
|
-
)
|
|
191
|
-
tokens_on_same_line = prev_tok_pos[0] == nxt_tok_pos[0]
|
|
192
|
-
if (
|
|
193
|
-
is_edit_between_tokens
|
|
194
|
-
or is_token_boundary_edit
|
|
195
|
-
or is_multiline_insertion
|
|
196
|
-
) and next_token_index is not None:
|
|
197
|
-
if is_multiline_insertion:
|
|
198
|
-
sem_tokens = SemTokManager.handle_multi_line_insertion(
|
|
199
|
-
sem_tokens,
|
|
200
|
-
next_token_index,
|
|
201
|
-
nxt_tok_pos,
|
|
202
|
-
change_start_line,
|
|
203
|
-
change_end_line,
|
|
204
|
-
change_end_char,
|
|
205
|
-
prev_tok_pos,
|
|
206
|
-
tokens_on_same_line,
|
|
207
|
-
changing_line_text,
|
|
208
|
-
line_delta,
|
|
209
|
-
)
|
|
210
|
-
else:
|
|
211
|
-
sem_tokens = SemTokManager.handle_single_line_insertion(
|
|
212
|
-
sem_tokens,
|
|
213
|
-
next_token_index,
|
|
214
|
-
is_next_token_same_line,
|
|
215
|
-
change,
|
|
216
|
-
tokens_on_same_line,
|
|
217
|
-
nxt_tok_pos,
|
|
218
|
-
change_start_line,
|
|
219
|
-
line_delta,
|
|
220
|
-
)
|
|
221
|
-
return sem_tokens
|
|
222
|
-
|
|
223
|
-
@staticmethod
|
|
224
|
-
def handle_multi_line_delete(
|
|
225
|
-
sem_tokens: list[int],
|
|
226
|
-
next_token_index: int,
|
|
227
|
-
nxt_tok_pos: tuple[int, int, int],
|
|
228
|
-
change_start_line: int,
|
|
229
|
-
change_end_line: int,
|
|
230
|
-
change_start_char: int,
|
|
231
|
-
change_end_char: int,
|
|
232
|
-
prev_tok_pos: tuple[int, int, int],
|
|
233
|
-
is_next_token_same_line: bool,
|
|
234
|
-
) -> list[int]:
|
|
235
|
-
"""Handle multi line deletion."""
|
|
236
|
-
if is_next_token_same_line:
|
|
237
|
-
char_del = nxt_tok_pos[1] - change_end_char
|
|
238
|
-
total_char_del = change_start_char + char_del
|
|
239
|
-
sem_tokens[next_token_index + 1] = (
|
|
240
|
-
(total_char_del - prev_tok_pos[1])
|
|
241
|
-
if prev_tok_pos[0] == change_start_line
|
|
242
|
-
else total_char_del
|
|
243
|
-
)
|
|
244
|
-
sem_tokens[next_token_index] -= change_end_line - change_start_line
|
|
245
|
-
return sem_tokens
|
|
246
|
-
|
|
247
|
-
@staticmethod
|
|
248
|
-
def handle_single_line_delete_between_tokens(
|
|
249
|
-
sem_tokens: list[int],
|
|
250
|
-
next_token_index: int,
|
|
251
|
-
is_next_token_same_line: bool,
|
|
252
|
-
change: lspt.TextDocumentContentChangeEvent_Type1,
|
|
253
|
-
change_start_line: int,
|
|
254
|
-
change_end_line: int,
|
|
255
|
-
) -> list[int]:
|
|
256
|
-
"""Handle single line deletion between tokens."""
|
|
257
|
-
if is_next_token_same_line and change.range_length:
|
|
258
|
-
sem_tokens[next_token_index + 1] -= change.range_length
|
|
259
|
-
|
|
260
|
-
else:
|
|
261
|
-
sem_tokens[next_token_index] -= change_end_line - change_start_line
|
|
262
|
-
return sem_tokens
|
|
263
|
-
|
|
264
|
-
@staticmethod
|
|
265
|
-
def handle_single_line_delete(
|
|
266
|
-
sem_tokens: list[int],
|
|
267
|
-
next_token_index: int,
|
|
268
|
-
prev_token_index: int,
|
|
269
|
-
is_next_token_same_line: bool,
|
|
270
|
-
change: lspt.TextDocumentContentChangeEvent_Type1,
|
|
271
|
-
) -> list[int]:
|
|
272
|
-
"""Handle single line deletion."""
|
|
273
|
-
assert change.range_length is not None
|
|
274
|
-
sem_tokens[prev_token_index + 2] -= change.range_length
|
|
275
|
-
if is_next_token_same_line:
|
|
276
|
-
sem_tokens[next_token_index + 1] -= change.range_length
|
|
277
|
-
return sem_tokens
|
|
278
|
-
|
|
279
|
-
@staticmethod
|
|
280
|
-
def handle_single_line_insertion(
|
|
281
|
-
sem_tokens: list[int],
|
|
282
|
-
next_token_index: int,
|
|
283
|
-
is_next_token_same_line: bool,
|
|
284
|
-
change: lspt.TextDocumentContentChangeEvent_Type1,
|
|
285
|
-
tokens_on_same_line: bool,
|
|
286
|
-
nxt_tok_pos: tuple[int, int, int],
|
|
287
|
-
change_start_line: int,
|
|
288
|
-
line_delta: int,
|
|
289
|
-
) -> list[int]:
|
|
290
|
-
"""Handle single line insertion."""
|
|
291
|
-
if tokens_on_same_line:
|
|
292
|
-
sem_tokens[next_token_index + 1] += len(change.text)
|
|
293
|
-
sem_tokens[next_token_index] += line_delta
|
|
294
|
-
else:
|
|
295
|
-
is_next_token_same_line = change_start_line == nxt_tok_pos[0]
|
|
296
|
-
if is_next_token_same_line:
|
|
297
|
-
sem_tokens[next_token_index] += line_delta
|
|
298
|
-
sem_tokens[next_token_index + 1] += len(change.text)
|
|
299
|
-
else:
|
|
300
|
-
sem_tokens[next_token_index] += line_delta
|
|
301
|
-
return sem_tokens
|
|
302
|
-
|
|
303
|
-
@staticmethod
|
|
304
|
-
def handle_multi_line_insertion(
|
|
305
|
-
sem_tokens: list[int],
|
|
306
|
-
next_token_index: int,
|
|
307
|
-
nxt_tok_pos: tuple[int, int, int],
|
|
308
|
-
change_start_line: int,
|
|
309
|
-
change_end_line: int,
|
|
310
|
-
change_end_char: int,
|
|
311
|
-
prev_tok_pos: tuple[int, int, int],
|
|
312
|
-
tokens_on_same_line: bool,
|
|
313
|
-
changing_line_text: tuple[str, int],
|
|
314
|
-
line_delta: int,
|
|
315
|
-
) -> list[int]:
|
|
316
|
-
"""Handle multi line insertion."""
|
|
317
|
-
if tokens_on_same_line:
|
|
318
|
-
char_del = nxt_tok_pos[1] - change_end_char
|
|
319
|
-
total_char_del = changing_line_text[1] + char_del
|
|
320
|
-
|
|
321
|
-
else:
|
|
322
|
-
is_prev_token_same_line = change_end_line == prev_tok_pos[0]
|
|
323
|
-
is_next_token_same_line = change_start_line == nxt_tok_pos[0]
|
|
324
|
-
if is_prev_token_same_line:
|
|
325
|
-
total_char_del = nxt_tok_pos[1]
|
|
326
|
-
elif is_next_token_same_line:
|
|
327
|
-
char_del = nxt_tok_pos[1] - change_end_char
|
|
328
|
-
total_char_del = changing_line_text[1] + char_del
|
|
329
|
-
else:
|
|
330
|
-
total_char_del = sem_tokens[next_token_index + 1]
|
|
331
|
-
line_delta -= change_end_line - change_start_line
|
|
332
|
-
sem_tokens[next_token_index + 1] = total_char_del
|
|
333
|
-
sem_tokens[next_token_index] += line_delta
|
|
334
|
-
return sem_tokens
|
|
335
|
-
|
|
336
|
-
@staticmethod
|
|
337
|
-
def handle_insert_inside_token(
|
|
338
|
-
change: lspt.TextDocumentContentChangeEvent_Type1,
|
|
339
|
-
sem_tokens: list[int],
|
|
340
|
-
prev_token_index: int,
|
|
341
|
-
changing_line_text: tuple[str, int],
|
|
342
|
-
line_delta: int,
|
|
343
|
-
prev_tok_pos: tuple[int, int, int],
|
|
344
|
-
change_start_char: int,
|
|
345
|
-
change_end_char: int,
|
|
346
|
-
is_token_boundary_edit: bool,
|
|
347
|
-
nxt_tok_pos: tuple[int, int, int],
|
|
348
|
-
) -> tuple[list[int], bool, tuple[int, int, int], Optional[int]]:
|
|
349
|
-
"""Handle insert inside token."""
|
|
350
|
-
next_token_index = None
|
|
351
|
-
for i in ["\n", " ", "\t"]:
|
|
352
|
-
if i in change.text:
|
|
353
|
-
if prev_tok_pos[1] == change_start_char:
|
|
354
|
-
if i == "\n":
|
|
355
|
-
sem_tokens[prev_token_index] += line_delta
|
|
356
|
-
sem_tokens[prev_token_index + 1] = changing_line_text[1]
|
|
357
|
-
else:
|
|
358
|
-
sem_tokens[prev_token_index + 1] += len(change.text)
|
|
359
|
-
return (
|
|
360
|
-
sem_tokens,
|
|
361
|
-
is_token_boundary_edit,
|
|
362
|
-
nxt_tok_pos,
|
|
363
|
-
next_token_index,
|
|
364
|
-
)
|
|
365
|
-
else:
|
|
366
|
-
is_token_boundary_edit = True
|
|
367
|
-
next_token_index = prev_token_index + 5
|
|
368
|
-
nxt_tok_pos = utils.get_token_start(next_token_index, sem_tokens)
|
|
369
|
-
break
|
|
370
|
-
if not is_token_boundary_edit:
|
|
371
|
-
selected_region = change_end_char - change_start_char
|
|
372
|
-
index_offset = 2
|
|
373
|
-
sem_tokens[prev_token_index + index_offset] += (
|
|
374
|
-
len(change.text) - selected_region
|
|
375
|
-
)
|
|
376
|
-
if (
|
|
377
|
-
prev_tok_pos[0]
|
|
378
|
-
== utils.get_token_start(prev_token_index + 5, sem_tokens)[0]
|
|
379
|
-
):
|
|
380
|
-
sem_tokens[prev_token_index + index_offset + 4] += (
|
|
381
|
-
len(change.text) - selected_region
|
|
382
|
-
)
|
|
383
|
-
return sem_tokens, is_token_boundary_edit, nxt_tok_pos, next_token_index
|
jaclang/langserve/server.py
DELETED
|
@@ -1,167 +0,0 @@
|
|
|
1
|
-
"""Jaclang Language Server."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from typing import Optional
|
|
6
|
-
|
|
7
|
-
from jaclang.compiler.constant import (
|
|
8
|
-
JacSemTokenModifier as SemTokMod,
|
|
9
|
-
JacSemTokenType as SemTokType,
|
|
10
|
-
)
|
|
11
|
-
from jaclang.langserve.engine import JacLangServer
|
|
12
|
-
from jaclang.settings import settings
|
|
13
|
-
|
|
14
|
-
import lsprotocol.types as lspt
|
|
15
|
-
|
|
16
|
-
server = JacLangServer()
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
@server.feature(lspt.TEXT_DOCUMENT_DID_OPEN)
|
|
20
|
-
async def did_open(ls: JacLangServer, params: lspt.DidOpenTextDocumentParams) -> None:
|
|
21
|
-
"""Check syntax on change."""
|
|
22
|
-
await ls.launch_deep_check(params.text_document.uri)
|
|
23
|
-
ls.lsp.send_request(lspt.WORKSPACE_SEMANTIC_TOKENS_REFRESH)
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
@server.feature(lspt.TEXT_DOCUMENT_DID_SAVE)
|
|
27
|
-
async def did_save(ls: JacLangServer, params: lspt.DidOpenTextDocumentParams) -> None:
|
|
28
|
-
"""Check syntax on change."""
|
|
29
|
-
file_path = params.text_document.uri
|
|
30
|
-
await ls.launch_deep_check(file_path)
|
|
31
|
-
ls.lsp.send_request(lspt.WORKSPACE_SEMANTIC_TOKENS_REFRESH)
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
@server.feature(lspt.TEXT_DOCUMENT_DID_CHANGE)
|
|
35
|
-
async def did_change(
|
|
36
|
-
ls: JacLangServer, params: lspt.DidChangeTextDocumentParams
|
|
37
|
-
) -> None:
|
|
38
|
-
"""Check syntax on change."""
|
|
39
|
-
file_path = params.text_document.uri
|
|
40
|
-
await ls.launch_quick_check(file_path)
|
|
41
|
-
ls.lsp.send_request(lspt.WORKSPACE_SEMANTIC_TOKENS_REFRESH)
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
@server.feature(lspt.TEXT_DOCUMENT_FORMATTING)
|
|
45
|
-
def formatting(
|
|
46
|
-
ls: JacLangServer, params: lspt.DocumentFormattingParams
|
|
47
|
-
) -> list[lspt.TextEdit]:
|
|
48
|
-
"""Format the given document."""
|
|
49
|
-
return ls.formatted_jac(params.text_document.uri)
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
@server.feature(
|
|
53
|
-
lspt.WORKSPACE_DID_CREATE_FILES,
|
|
54
|
-
lspt.FileOperationRegistrationOptions(
|
|
55
|
-
filters=[
|
|
56
|
-
lspt.FileOperationFilter(pattern=lspt.FileOperationPattern("**/*.jac"))
|
|
57
|
-
]
|
|
58
|
-
),
|
|
59
|
-
)
|
|
60
|
-
def did_create_files(ls: JacLangServer, params: lspt.CreateFilesParams) -> None:
|
|
61
|
-
"""Check syntax on file creation."""
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
@server.feature(
|
|
65
|
-
lspt.WORKSPACE_DID_RENAME_FILES,
|
|
66
|
-
lspt.FileOperationRegistrationOptions(
|
|
67
|
-
filters=[
|
|
68
|
-
lspt.FileOperationFilter(pattern=lspt.FileOperationPattern("**/*.jac"))
|
|
69
|
-
]
|
|
70
|
-
),
|
|
71
|
-
)
|
|
72
|
-
def did_rename_files(ls: JacLangServer, params: lspt.RenameFilesParams) -> None:
|
|
73
|
-
"""Check syntax on file rename."""
|
|
74
|
-
new_uris = [file.new_uri for file in params.files]
|
|
75
|
-
old_uris = [file.old_uri for file in params.files]
|
|
76
|
-
for i in range(len(new_uris)):
|
|
77
|
-
ls.rename_module(old_uris[i], new_uris[i])
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
@server.feature(
|
|
81
|
-
lspt.WORKSPACE_DID_DELETE_FILES,
|
|
82
|
-
lspt.FileOperationRegistrationOptions(
|
|
83
|
-
filters=[
|
|
84
|
-
lspt.FileOperationFilter(pattern=lspt.FileOperationPattern("**/*.jac"))
|
|
85
|
-
]
|
|
86
|
-
),
|
|
87
|
-
)
|
|
88
|
-
def did_delete_files(ls: JacLangServer, params: lspt.DeleteFilesParams) -> None:
|
|
89
|
-
"""Check syntax on file delete."""
|
|
90
|
-
for file in params.files:
|
|
91
|
-
ls.delete_module(file.uri)
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
@server.feature(
|
|
95
|
-
lspt.TEXT_DOCUMENT_COMPLETION,
|
|
96
|
-
lspt.CompletionOptions(trigger_characters=[".", ":", "a-zA-Z0-9"]),
|
|
97
|
-
)
|
|
98
|
-
def completion(ls: JacLangServer, params: lspt.CompletionParams) -> lspt.CompletionList:
|
|
99
|
-
"""Provide completion."""
|
|
100
|
-
return ls.get_completion(
|
|
101
|
-
params.text_document.uri,
|
|
102
|
-
params.position,
|
|
103
|
-
params.context.trigger_character if params.context else None,
|
|
104
|
-
)
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
@server.feature(lspt.TEXT_DOCUMENT_HOVER, lspt.HoverOptions(work_done_progress=True))
|
|
108
|
-
def hover(
|
|
109
|
-
ls: JacLangServer, params: lspt.TextDocumentPositionParams
|
|
110
|
-
) -> Optional[lspt.Hover]:
|
|
111
|
-
"""Provide hover information for the given hover request."""
|
|
112
|
-
return ls.get_hover_info(params.text_document.uri, params.position)
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
@server.feature(lspt.TEXT_DOCUMENT_DOCUMENT_SYMBOL)
|
|
116
|
-
def document_symbol(
|
|
117
|
-
ls: JacLangServer, params: lspt.DocumentSymbolParams
|
|
118
|
-
) -> list[lspt.DocumentSymbol]:
|
|
119
|
-
"""Provide document symbols."""
|
|
120
|
-
return ls.get_outline(params.text_document.uri)
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
@server.feature(lspt.TEXT_DOCUMENT_DEFINITION)
|
|
124
|
-
def definition(
|
|
125
|
-
ls: JacLangServer, params: lspt.TextDocumentPositionParams
|
|
126
|
-
) -> Optional[lspt.Location]:
|
|
127
|
-
"""Provide definition."""
|
|
128
|
-
return ls.get_definition(params.text_document.uri, params.position)
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
@server.feature(lspt.TEXT_DOCUMENT_REFERENCES)
|
|
132
|
-
def references(ls: JacLangServer, params: lspt.ReferenceParams) -> list[lspt.Location]:
|
|
133
|
-
"""Provide references."""
|
|
134
|
-
return ls.get_references(params.text_document.uri, params.position)
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
@server.feature(lspt.TEXT_DOCUMENT_RENAME)
|
|
138
|
-
def rename(
|
|
139
|
-
ls: JacLangServer, params: lspt.RenameParams
|
|
140
|
-
) -> Optional[lspt.WorkspaceEdit]:
|
|
141
|
-
"""Rename symbol."""
|
|
142
|
-
ls.log_warning("Auto Rename is Experimental, Please use with caution.")
|
|
143
|
-
return ls.rename_symbol(params.text_document.uri, params.position, params.new_name)
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
@server.feature(
|
|
147
|
-
lspt.TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL,
|
|
148
|
-
lspt.SemanticTokensLegend(
|
|
149
|
-
token_types=SemTokType.as_str_list(),
|
|
150
|
-
token_modifiers=SemTokMod.as_str_list(),
|
|
151
|
-
),
|
|
152
|
-
)
|
|
153
|
-
def semantic_tokens_full(
|
|
154
|
-
ls: JacLangServer, params: lspt.SemanticTokensParams
|
|
155
|
-
) -> lspt.SemanticTokens:
|
|
156
|
-
"""Provide semantic tokens."""
|
|
157
|
-
return ls.get_semantic_tokens(params.text_document.uri)
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
def run_lang_server() -> None:
|
|
161
|
-
"""Run the language server."""
|
|
162
|
-
settings.pass_timer = True
|
|
163
|
-
server.start_io()
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
if __name__ == "__main__":
|
|
167
|
-
run_lang_server()
|