jaclang 0.7.13__py3-none-any.whl → 0.7.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jaclang might be problematic. Click here for more details.

Files changed (49) hide show
  1. jaclang/cli/cli.py +11 -8
  2. jaclang/cli/cmdreg.py +9 -12
  3. jaclang/compiler/__init__.py +0 -2
  4. jaclang/compiler/absyntree.py +12 -12
  5. jaclang/compiler/parser.py +4 -0
  6. jaclang/compiler/passes/ir_pass.py +3 -12
  7. jaclang/compiler/passes/main/fuse_typeinfo_pass.py +4 -5
  8. jaclang/compiler/passes/main/import_pass.py +5 -2
  9. jaclang/compiler/passes/main/pyast_gen_pass.py +33 -39
  10. jaclang/compiler/passes/main/pyast_load_pass.py +1 -0
  11. jaclang/compiler/passes/main/registry_pass.py +1 -1
  12. jaclang/compiler/passes/main/tests/test_import_pass.py +5 -1
  13. jaclang/compiler/passes/tool/fuse_comments_pass.py +14 -2
  14. jaclang/compiler/passes/tool/jac_formatter_pass.py +23 -28
  15. jaclang/compiler/passes/transform.py +4 -0
  16. jaclang/compiler/semtable.py +5 -3
  17. jaclang/compiler/tests/test_importer.py +3 -0
  18. jaclang/langserve/engine.py +227 -94
  19. jaclang/langserve/server.py +6 -10
  20. jaclang/langserve/tests/fixtures/base_module_structure.jac +1 -1
  21. jaclang/langserve/tests/fixtures/import_include_statements.jac +1 -1
  22. jaclang/langserve/tests/test_sem_tokens.py +277 -0
  23. jaclang/langserve/tests/test_server.py +28 -4
  24. jaclang/langserve/utils.py +128 -95
  25. jaclang/plugin/builtin.py +1 -1
  26. jaclang/plugin/default.py +25 -16
  27. jaclang/plugin/feature.py +4 -5
  28. jaclang/plugin/spec.py +2 -2
  29. jaclang/{core → runtimelib}/architype.py +1 -1
  30. jaclang/{core → runtimelib}/context.py +4 -1
  31. jaclang/runtimelib/importer.py +414 -0
  32. jaclang/runtimelib/machine.py +19 -0
  33. jaclang/{core → runtimelib}/utils.py +1 -1
  34. jaclang/tests/fixtures/blankwithentry.jac +3 -0
  35. jaclang/tests/fixtures/deep/one_lev.jac +3 -3
  36. jaclang/tests/fixtures/deep/one_lev_dup.jac +2 -3
  37. jaclang/tests/test_cli.py +1 -1
  38. jaclang/tests/test_language.py +16 -0
  39. jaclang/tests/test_man_code.py +17 -0
  40. jaclang/utils/treeprinter.py +0 -4
  41. {jaclang-0.7.13.dist-info → jaclang-0.7.15.dist-info}/METADATA +1 -1
  42. {jaclang-0.7.13.dist-info → jaclang-0.7.15.dist-info}/RECORD +48 -45
  43. jaclang/core/importer.py +0 -344
  44. /jaclang/{core → runtimelib}/__init__.py +0 -0
  45. /jaclang/{core → runtimelib}/constructs.py +0 -0
  46. /jaclang/{core → runtimelib}/memory.py +0 -0
  47. /jaclang/{core → runtimelib}/test.py +0 -0
  48. {jaclang-0.7.13.dist-info → jaclang-0.7.15.dist-info}/WHEEL +0 -0
  49. {jaclang-0.7.13.dist-info → jaclang-0.7.15.dist-info}/entry_points.txt +0 -0
@@ -5,6 +5,7 @@ import sys
5
5
 
6
6
  from jaclang import jac_import
7
7
  from jaclang.cli import cli
8
+ from jaclang.plugin.feature import JacFeature as Jac
8
9
  from jaclang.utils.test import TestCase
9
10
 
10
11
 
@@ -17,11 +18,13 @@ class TestLoader(TestCase):
17
18
 
18
19
  def test_import_basic_python(self) -> None:
19
20
  """Test basic self loading."""
21
+ Jac.context().init_memory(base_path=self.fixture_abs_path(__file__))
20
22
  (h,) = jac_import("fixtures.hello_world", base_path=__file__)
21
23
  self.assertEqual(h.hello(), "Hello World!") # type: ignore
22
24
 
23
25
  def test_modules_correct(self) -> None:
24
26
  """Test basic self loading."""
27
+ Jac.context().init_memory(base_path=self.fixture_abs_path(__file__))
25
28
  jac_import("fixtures.hello_world", base_path=__file__)
26
29
  self.assertIn("module 'hello_world'", str(sys.modules))
27
30
  self.assertIn("/tests/fixtures/hello_world.jac", str(sys.modules))
@@ -5,8 +5,7 @@ from __future__ import annotations
5
5
  import asyncio
6
6
  import logging
7
7
  from concurrent.futures import ThreadPoolExecutor
8
- from typing import Callable, Optional
9
-
8
+ from typing import Callable, List, Optional, Tuple
10
9
 
11
10
  import jaclang.compiler.absyntree as ast
12
11
  from jaclang.compiler.compile import jac_str_to_pass
@@ -14,16 +13,18 @@ from jaclang.compiler.parser import JacParser
14
13
  from jaclang.compiler.passes import Pass
15
14
  from jaclang.compiler.passes.main.schedules import py_code_gen_typed
16
15
  from jaclang.compiler.passes.tool import FuseCommentsPass, JacFormatPass
17
- from jaclang.compiler.passes.transform import Alert
18
16
  from jaclang.langserve.utils import (
19
17
  collect_all_symbols_in_scope,
20
- collect_symbols,
21
18
  create_range,
22
- find_deepest_symbol_node_at_pos,
19
+ find_index,
20
+ find_node_by_position,
21
+ find_surrounding_tokens,
23
22
  gen_diagnostics,
24
23
  get_item_path,
24
+ get_line_of_code,
25
25
  get_mod_path,
26
- locate_affected_token,
26
+ get_symbols_for_outline,
27
+ get_token_start,
27
28
  parse_symbol_path,
28
29
  resolve_completion_symbol_table,
29
30
  )
@@ -39,14 +40,15 @@ class ModuleInfo:
39
40
  def __init__(
40
41
  self,
41
42
  ir: ast.Module,
42
- errors: list[Alert],
43
- warnings: list[Alert],
44
- parent: Optional[ModuleInfo] = None,
43
+ impl_parent: Optional[ModuleInfo] = None,
45
44
  ) -> None:
46
45
  """Initialize module info."""
47
46
  self.ir = ir
48
- self.parent: Optional[ModuleInfo] = parent
47
+ self.impl_parent: Optional[ModuleInfo] = impl_parent
49
48
  self.sem_tokens: list[int] = self.gen_sem_tokens()
49
+ self.static_sem_tokens: List[
50
+ Tuple[lspt.Position, int, int, ast.AstSymbolNode]
51
+ ] = self.gen_sem_tok_node()
50
52
 
51
53
  @property
52
54
  def uri(self) -> str:
@@ -74,8 +76,28 @@ class ModuleInfo:
74
76
  prev_line, prev_col = line, col_start
75
77
  return tokens
76
78
 
79
+ def gen_sem_tok_node(
80
+ self,
81
+ ) -> List[Tuple[lspt.Position, int, int, ast.AstSymbolNode]]:
82
+ """Return semantic tokens."""
83
+ tokens: List[Tuple[lspt.Position, int, int, ast.AstSymbolNode]] = []
84
+ for node in self.ir._in_mod_nodes:
85
+ if isinstance(node, ast.NameAtom) and node.sem_token:
86
+ line, col_start, col_end = (
87
+ node.loc.first_line - 1,
88
+ node.loc.col_start - 1,
89
+ node.loc.col_end - 1,
90
+ )
91
+ length = col_end - col_start
92
+ pos = lspt.Position(line, col_start)
93
+ tokens += [(pos, col_end, length, node)]
94
+ return tokens
95
+
77
96
  def update_sem_tokens(
78
- self, content_changes: lspt.DidChangeTextDocumentParams
97
+ self,
98
+ content_changes: lspt.DidChangeTextDocumentParams,
99
+ sem_tokens: list[int],
100
+ document_lines: List[str],
79
101
  ) -> list[int]:
80
102
  """Update semantic tokens on change."""
81
103
  for change in [
@@ -88,61 +110,161 @@ class ModuleInfo:
88
110
  change_end_line = change.range.end.line
89
111
  change_end_char = change.range.end.character
90
112
 
91
- line_delta = change.text.count("\n") - (change_end_line - change_start_line)
92
- if line_delta == 0:
93
- char_delta = len(change.text) - (change_end_char - change_start_char)
94
- else:
95
- last_newline_index = change.text.rfind("\n")
96
- char_delta = (
97
- len(change.text)
98
- - last_newline_index
99
- - 1
100
- - change_end_char
101
- + change_start_char
113
+ is_delete = change.text == ""
114
+ prev_token_index, next_token_index, insert_inside_token = (
115
+ find_surrounding_tokens(
116
+ change_start_line,
117
+ change_start_char,
118
+ change_end_line,
119
+ change_end_char,
120
+ sem_tokens,
102
121
  )
103
-
104
- changed_token_index = locate_affected_token(
105
- self.sem_tokens,
106
- change_start_line,
107
- change_start_char,
108
- change_end_line,
109
- change_end_char,
110
122
  )
111
- if changed_token_index:
112
- self.sem_tokens[changed_token_index + 2] = max(
113
- 1, self.sem_tokens[changed_token_index + 2] + char_delta
123
+ prev_tok_pos = get_token_start(prev_token_index, sem_tokens)
124
+ nxt_tok_pos = get_token_start(next_token_index, sem_tokens)
125
+ changing_line_text = get_line_of_code(change_start_line, document_lines)
126
+ if not changing_line_text:
127
+ return sem_tokens
128
+ is_edit_between_tokens = bool(
129
+ (
130
+ change_start_line > prev_tok_pos[0]
131
+ or (
132
+ change_start_line == prev_tok_pos[0]
133
+ and change_start_char
134
+ > prev_tok_pos[1] + sem_tokens[prev_token_index + 2]
135
+ if prev_token_index and prev_token_index + 2 < len(sem_tokens)
136
+ else 0
137
+ )
114
138
  )
139
+ and (
140
+ change_end_line < nxt_tok_pos[0]
141
+ or (
142
+ change_end_line == nxt_tok_pos[0]
143
+ and change_end_char < nxt_tok_pos[1]
144
+ )
145
+ )
146
+ )
147
+ text = r"%s" % change.text
148
+ line_delta = len(text.split("\n")) - 1
149
+ is_multiline_insertion = line_delta > 0
150
+ # logging.info(f"chnge text: {change}")
151
+ # logging.info(
152
+ # f"""\n\nprev_token_index: {prev_token_index}, next_token_index:{next_token_index}
153
+ # ,\n insert_inside_token: {insert_inside_token}, insert_between_tokens:
154
+ # {is_edit_between_tokens},\n multi_line_insertion: {is_multiline_insertion}\n\n"""
155
+ # )
156
+ if is_delete:
157
+ next_token_index = (
158
+ prev_token_index + 5
159
+ if insert_inside_token
160
+ and prev_token_index is not None
161
+ or (
162
+ next_token_index
163
+ and prev_token_index is not None
164
+ and next_token_index >= 10
165
+ and next_token_index - prev_token_index == 10
166
+ )
167
+ else next_token_index
168
+ )
169
+ if next_token_index is None:
170
+ return sem_tokens
171
+ nxt_tok_pos = get_token_start(next_token_index, sem_tokens)
172
+ is_single_line_change = change_end_line == change_start_line
173
+ is_next_token_same_line = change_end_line == nxt_tok_pos[0]
115
174
  if (
116
- len(self.sem_tokens) > changed_token_index + 5
117
- and self.sem_tokens[changed_token_index + 5] == 0
175
+ is_single_line_change
176
+ and insert_inside_token
177
+ and prev_token_index is not None
118
178
  ):
119
- next_token_index = changed_token_index + 5
120
- self.sem_tokens[next_token_index + 1] = max(
121
- 0, self.sem_tokens[next_token_index + 1] + char_delta
179
+ sem_tokens[prev_token_index + 2] -= change.range_length
180
+ if is_next_token_same_line:
181
+ sem_tokens[next_token_index + 1] -= change.range_length
182
+ elif is_single_line_change and is_edit_between_tokens:
183
+ if is_next_token_same_line:
184
+ sem_tokens[next_token_index + 1] -= change.range_length
185
+
186
+ else:
187
+ sem_tokens[next_token_index] -= (
188
+ change_end_line - change_start_line
189
+ )
190
+ else:
191
+ if is_next_token_same_line:
192
+ char_del = nxt_tok_pos[1] - change_end_char
193
+ total_char_del = change_start_char + char_del
194
+ sem_tokens[next_token_index + 1] = (
195
+ (total_char_del - prev_tok_pos[1])
196
+ if prev_tok_pos[0] == change_start_line
197
+ else total_char_del
198
+ )
199
+ sem_tokens[next_token_index] -= change_end_line - change_start_line
200
+ return sem_tokens
201
+
202
+ is_token_boundary_edit = False
203
+ if insert_inside_token and prev_token_index is not None:
204
+ for i in ["\n", " ", "\t"]:
205
+ if i in change.text:
206
+ if prev_tok_pos[1] == change_start_char:
207
+ if i == "\n":
208
+ sem_tokens[prev_token_index] += line_delta
209
+ sem_tokens[prev_token_index + 1] = changing_line_text[1]
210
+ else:
211
+ sem_tokens[prev_token_index + 1] += len(change.text)
212
+ return sem_tokens
213
+ else:
214
+ is_token_boundary_edit = True
215
+ next_token_index = prev_token_index + 5
216
+ nxt_tok_pos = get_token_start(next_token_index, sem_tokens)
217
+ break
218
+ if not is_token_boundary_edit:
219
+ selected_region = change_end_char - change_start_char
220
+ index_offset = 2
221
+ sem_tokens[prev_token_index + index_offset] += (
222
+ len(change.text) - selected_region
122
223
  )
123
- return self.sem_tokens
124
-
125
- current_token_index = 0
126
- line_offset = 0
127
- while current_token_index < len(self.sem_tokens):
128
- token_line_number = self.sem_tokens[current_token_index] + line_offset
129
- token_start_pos = self.sem_tokens[current_token_index + 1]
130
-
131
- if token_line_number > change_start_line or (
132
- token_line_number == change_start_line
133
- and token_start_pos >= change_start_char
134
- ):
135
- self.sem_tokens[current_token_index] += line_delta
136
- if token_line_number == change_start_line:
137
- self.sem_tokens[current_token_index + 1] += char_delta
138
- if token_line_number > change_end_line or (
139
- token_line_number == change_end_line
140
- and token_start_pos >= change_end_char
224
+ if (
225
+ prev_tok_pos[0]
226
+ == get_token_start(prev_token_index + 5, sem_tokens)[0]
141
227
  ):
142
- break
143
- line_offset += self.sem_tokens[current_token_index]
144
- current_token_index += 5
145
- return self.sem_tokens
228
+ sem_tokens[prev_token_index + index_offset + 4] += (
229
+ len(change.text) - selected_region
230
+ )
231
+
232
+ tokens_on_same_line = prev_tok_pos[0] == nxt_tok_pos[0]
233
+ if (
234
+ is_edit_between_tokens
235
+ or is_token_boundary_edit
236
+ or is_multiline_insertion
237
+ ) and next_token_index is not None:
238
+ if is_multiline_insertion:
239
+ if tokens_on_same_line:
240
+ char_del = nxt_tok_pos[1] - change_end_char
241
+ total_char_del = changing_line_text[1] + char_del
242
+
243
+ else:
244
+ is_prev_token_same_line = change_end_line == prev_tok_pos[0]
245
+ is_next_token_same_line = change_start_line == nxt_tok_pos[0]
246
+ if is_prev_token_same_line:
247
+ total_char_del = nxt_tok_pos[1]
248
+ elif is_next_token_same_line:
249
+ char_del = nxt_tok_pos[1] - change_end_char
250
+ total_char_del = changing_line_text[1] + char_del
251
+ else:
252
+ total_char_del = sem_tokens[next_token_index + 1]
253
+ line_delta -= change_end_line - change_start_line
254
+ sem_tokens[next_token_index + 1] = total_char_del
255
+ sem_tokens[next_token_index] += line_delta
256
+ else:
257
+ if tokens_on_same_line:
258
+ sem_tokens[next_token_index + 1] += len(change.text)
259
+ sem_tokens[next_token_index] += line_delta
260
+ else:
261
+ is_next_token_same_line = change_start_line == nxt_tok_pos[0]
262
+ if is_next_token_same_line:
263
+ sem_tokens[next_token_index] += line_delta
264
+ sem_tokens[next_token_index + 1] += len(change.text)
265
+ else:
266
+ sem_tokens[next_token_index] += line_delta
267
+ return sem_tokens
146
268
 
147
269
 
148
270
  class JacLangServer(LanguageServer):
@@ -162,29 +284,17 @@ class JacLangServer(LanguageServer):
162
284
  if not isinstance(build.ir, ast.Module):
163
285
  self.log_error("Error with module build.")
164
286
  return
165
- self.modules[file_path] = ModuleInfo(
166
- ir=build.ir,
167
- errors=[
168
- i
169
- for i in build.errors_had
170
- if i.loc.mod_path == uris.to_fs_path(file_path)
171
- ],
172
- warnings=[
173
- i
174
- for i in build.warnings_had
175
- if i.loc.mod_path == uris.to_fs_path(file_path)
176
- ],
287
+ keep_parent = (
288
+ self.modules[file_path].impl_parent if file_path in self.modules else None
177
289
  )
290
+ self.modules[file_path] = ModuleInfo(ir=build.ir, impl_parent=keep_parent)
178
291
  for p in build.ir.mod_deps.keys():
179
292
  uri = uris.from_fs_path(p)
180
- self.modules[uri] = ModuleInfo(
181
- ir=build.ir.mod_deps[p],
182
- errors=[i for i in build.errors_had if i.loc.mod_path == p],
183
- warnings=[i for i in build.warnings_had if i.loc.mod_path == p],
184
- )
185
- self.modules[uri].parent = (
186
- self.modules[file_path] if file_path != uri else None
187
- )
293
+ if file_path != uri:
294
+ self.modules[uri] = ModuleInfo(
295
+ ir=build.ir.mod_deps[p],
296
+ impl_parent=self.modules[file_path],
297
+ )
188
298
 
189
299
  def quick_check(self, file_path: str) -> bool:
190
300
  """Rebuild a file."""
@@ -206,6 +316,12 @@ class JacLangServer(LanguageServer):
206
316
  """Rebuild a file and its dependencies."""
207
317
  try:
208
318
  document = self.workspace.get_text_document(file_path)
319
+ if file_path in self.modules and (
320
+ parent := self.modules[file_path].impl_parent
321
+ ):
322
+ return self.deep_check(
323
+ uris.from_fs_path(parent.ir.loc.mod_path), annex_view=file_path
324
+ )
209
325
  build = jac_str_to_pass(
210
326
  jac_str=document.source,
211
327
  file_path=document.path,
@@ -216,6 +332,7 @@ class JacLangServer(LanguageServer):
216
332
  return self.deep_check(
217
333
  uris.from_fs_path(discover), annex_view=file_path
218
334
  )
335
+
219
336
  self.publish_diagnostics(
220
337
  file_path,
221
338
  gen_diagnostics(
@@ -264,8 +381,8 @@ class JacLangServer(LanguageServer):
264
381
  current_line = document.lines[position.line]
265
382
  current_pos = position.character
266
383
  current_symbol_path = parse_symbol_path(current_line, current_pos)
267
- node_selected = find_deepest_symbol_node_at_pos(
268
- self.modules[file_path].ir,
384
+ node_selected = find_node_by_position(
385
+ self.modules[file_path].static_sem_tokens,
269
386
  position.line,
270
387
  position.character - 2,
271
388
  )
@@ -335,9 +452,14 @@ class JacLangServer(LanguageServer):
335
452
  """Return hover information for a file."""
336
453
  if file_path not in self.modules:
337
454
  return None
338
- node_selected = find_deepest_symbol_node_at_pos(
339
- self.modules[file_path].ir, position.line, position.character
455
+ token_index = find_index(
456
+ self.modules[file_path].sem_tokens,
457
+ position.line,
458
+ position.character,
340
459
  )
460
+ if token_index is None:
461
+ return None
462
+ node_selected = self.modules[file_path].static_sem_tokens[token_index][3]
341
463
  value = self.get_node_info(node_selected) if node_selected else None
342
464
  if value:
343
465
  return lspt.Hover(
@@ -369,12 +491,12 @@ class JacLangServer(LanguageServer):
369
491
  self.log_warning(f"Attribute error when accessing node attributes: {e}")
370
492
  return node_info.strip()
371
493
 
372
- def get_document_symbols(self, file_path: str) -> list[lspt.DocumentSymbol]:
494
+ def get_outline(self, file_path: str) -> list[lspt.DocumentSymbol]:
373
495
  """Return document symbols for a file."""
374
496
  if file_path in self.modules and (
375
497
  root_node := self.modules[file_path].ir._sym_tab
376
498
  ):
377
- return collect_symbols(root_node)
499
+ return get_symbols_for_outline(root_node)
378
500
  return []
379
501
 
380
502
  def get_definition(
@@ -383,9 +505,14 @@ class JacLangServer(LanguageServer):
383
505
  """Return definition location for a file."""
384
506
  if file_path not in self.modules:
385
507
  return None
386
- node_selected: Optional[ast.AstSymbolNode] = find_deepest_symbol_node_at_pos(
387
- self.modules[file_path].ir, position.line, position.character
508
+ token_index = find_index(
509
+ self.modules[file_path].sem_tokens,
510
+ position.line,
511
+ position.character,
388
512
  )
513
+ if token_index is None:
514
+ return None
515
+ node_selected = self.modules[file_path].static_sem_tokens[token_index][3]
389
516
  if node_selected:
390
517
  if (
391
518
  isinstance(node_selected, ast.Name)
@@ -408,13 +535,13 @@ class JacLangServer(LanguageServer):
408
535
  ):
409
536
  path_range = get_item_path(node_selected.parent)
410
537
  if path_range:
411
- path, range = path_range
412
- if path and range:
538
+ path, loc_range = path_range
539
+ if path and loc_range:
413
540
  return lspt.Location(
414
541
  uri=uris.from_fs_path(path),
415
542
  range=lspt.Range(
416
- start=lspt.Position(line=range[0], character=0),
417
- end=lspt.Position(line=range[1], character=5),
543
+ start=lspt.Position(line=loc_range[0], character=0),
544
+ end=lspt.Position(line=loc_range[1], character=5),
418
545
  ),
419
546
  )
420
547
  else:
@@ -432,7 +559,6 @@ class JacLangServer(LanguageServer):
432
559
  else node_selected
433
560
  )
434
561
  )
435
- self.log_py(f"{node_selected}, {decl_node}")
436
562
  decl_uri = uris.from_fs_path(decl_node.loc.mod_path)
437
563
  try:
438
564
  decl_range = create_range(decl_node.loc)
@@ -451,9 +577,16 @@ class JacLangServer(LanguageServer):
451
577
  self, file_path: str, position: lspt.Position
452
578
  ) -> list[lspt.Location]:
453
579
  """Return references for a file."""
454
- node_selected = find_deepest_symbol_node_at_pos(
455
- self.modules[file_path].ir, position.line, position.character
580
+ if file_path not in self.modules:
581
+ return []
582
+ index1 = find_index(
583
+ self.modules[file_path].sem_tokens,
584
+ position.line,
585
+ position.character,
456
586
  )
587
+ if index1 is None:
588
+ return []
589
+ node_selected = self.modules[file_path].static_sem_tokens[index1][3]
457
590
  if node_selected and node_selected.sym:
458
591
  list_of_references: list[lspt.Location] = [
459
592
  lspt.Location(
@@ -30,7 +30,11 @@ async def did_change(
30
30
  """Check syntax on change."""
31
31
  await ls.launch_quick_check(file_path := params.text_document.uri)
32
32
  if file_path in ls.modules:
33
- ls.modules[file_path].update_sem_tokens(params)
33
+ document = ls.workspace.get_text_document(file_path)
34
+ lines = document.source.splitlines()
35
+ ls.modules[file_path].update_sem_tokens(
36
+ params, ls.modules[file_path].sem_tokens, lines
37
+ )
34
38
  ls.lsp.send_request(lspt.WORKSPACE_SEMANTIC_TOKENS_REFRESH)
35
39
 
36
40
 
@@ -110,7 +114,7 @@ def document_symbol(
110
114
  ls: JacLangServer, params: lspt.DocumentSymbolParams
111
115
  ) -> list[lspt.DocumentSymbol]:
112
116
  """Provide document symbols."""
113
- return ls.get_document_symbols(params.text_document.uri)
117
+ return ls.get_outline(params.text_document.uri)
114
118
 
115
119
 
116
120
  @server.feature(lspt.TEXT_DOCUMENT_DEFINITION)
@@ -138,14 +142,6 @@ def semantic_tokens_full(
138
142
  ls: JacLangServer, params: lspt.SemanticTokensParams
139
143
  ) -> lspt.SemanticTokens:
140
144
  """Provide semantic tokens."""
141
- # import logging
142
-
143
- # logging.info("\nGetting semantic tokens\n")
144
- # # logging.info(ls.get_semantic_tokens(params.text_document.uri))
145
- # i = 0
146
- # while i < len(ls.get_semantic_tokens(params.text_document.uri).data):
147
- # logging.info(ls.get_semantic_tokens(params.text_document.uri).data[i : i + 5])
148
- # i += 5
149
145
  return ls.get_semantic_tokens(params.text_document.uri)
150
146
 
151
147
 
@@ -35,7 +35,7 @@ obj Colour1 {
35
35
  }
36
36
 
37
37
  :obj:Colour1:can:get_color1 -> Colorenum {
38
- return self.color;
38
+ return self.color1;
39
39
  }
40
40
 
41
41
  obj red :Colour1: {
@@ -1,6 +1,6 @@
1
1
  import:py os;
2
2
  import:py from math, sqrt as square_root;
3
3
  import:py datetime as dt;
4
- import:jac from base_module_structure, add_numbers as adsd, subtract,x,Colorenum as clr;
4
+ import:jac from base_module_structure, add as add_numbers , subtract,x,Colorenum as clr;
5
5
  import:jac base_module_structure as base_module_structure;
6
6
  import:py from py_import,add1 as ss, sub1 as subtract1,apple,Orange1;