jaclang 0.7.14__py3-none-any.whl → 0.7.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jaclang might be problematic. Click here for more details.

Files changed (43) hide show
  1. jaclang/cli/cli.py +11 -8
  2. jaclang/cli/cmdreg.py +9 -12
  3. jaclang/compiler/__init__.py +0 -2
  4. jaclang/compiler/absyntree.py +3 -8
  5. jaclang/compiler/passes/ir_pass.py +3 -12
  6. jaclang/compiler/passes/main/fuse_typeinfo_pass.py +4 -5
  7. jaclang/compiler/passes/main/import_pass.py +4 -2
  8. jaclang/compiler/passes/main/pyast_gen_pass.py +32 -31
  9. jaclang/compiler/passes/main/registry_pass.py +1 -1
  10. jaclang/compiler/passes/tool/jac_formatter_pass.py +3 -20
  11. jaclang/compiler/passes/transform.py +4 -0
  12. jaclang/compiler/semtable.py +5 -3
  13. jaclang/compiler/tests/test_importer.py +3 -0
  14. jaclang/langserve/engine.py +210 -69
  15. jaclang/langserve/server.py +6 -10
  16. jaclang/langserve/tests/fixtures/base_module_structure.jac +1 -1
  17. jaclang/langserve/tests/fixtures/import_include_statements.jac +1 -1
  18. jaclang/langserve/tests/test_sem_tokens.py +277 -0
  19. jaclang/langserve/tests/test_server.py +4 -4
  20. jaclang/langserve/utils.py +128 -95
  21. jaclang/plugin/builtin.py +1 -1
  22. jaclang/plugin/default.py +23 -16
  23. jaclang/plugin/feature.py +4 -5
  24. jaclang/plugin/spec.py +2 -2
  25. jaclang/{core → runtimelib}/architype.py +1 -1
  26. jaclang/{core → runtimelib}/context.py +4 -1
  27. jaclang/runtimelib/importer.py +414 -0
  28. jaclang/runtimelib/machine.py +19 -0
  29. jaclang/{core → runtimelib}/utils.py +1 -1
  30. jaclang/tests/fixtures/deep/one_lev.jac +3 -3
  31. jaclang/tests/fixtures/deep/one_lev_dup.jac +2 -3
  32. jaclang/tests/test_cli.py +1 -1
  33. jaclang/tests/test_language.py +7 -0
  34. jaclang/utils/treeprinter.py +0 -4
  35. {jaclang-0.7.14.dist-info → jaclang-0.7.15.dist-info}/METADATA +1 -1
  36. {jaclang-0.7.14.dist-info → jaclang-0.7.15.dist-info}/RECORD +42 -40
  37. jaclang/core/importer.py +0 -344
  38. /jaclang/{core → runtimelib}/__init__.py +0 -0
  39. /jaclang/{core → runtimelib}/constructs.py +0 -0
  40. /jaclang/{core → runtimelib}/memory.py +0 -0
  41. /jaclang/{core → runtimelib}/test.py +0 -0
  42. {jaclang-0.7.14.dist-info → jaclang-0.7.15.dist-info}/WHEEL +0 -0
  43. {jaclang-0.7.14.dist-info → jaclang-0.7.15.dist-info}/entry_points.txt +0 -0
@@ -5,8 +5,7 @@ from __future__ import annotations
5
5
  import asyncio
6
6
  import logging
7
7
  from concurrent.futures import ThreadPoolExecutor
8
- from typing import Callable, Optional
9
-
8
+ from typing import Callable, List, Optional, Tuple
10
9
 
11
10
  import jaclang.compiler.absyntree as ast
12
11
  from jaclang.compiler.compile import jac_str_to_pass
@@ -16,13 +15,16 @@ from jaclang.compiler.passes.main.schedules import py_code_gen_typed
16
15
  from jaclang.compiler.passes.tool import FuseCommentsPass, JacFormatPass
17
16
  from jaclang.langserve.utils import (
18
17
  collect_all_symbols_in_scope,
19
- collect_symbols,
20
18
  create_range,
21
- find_deepest_symbol_node_at_pos,
19
+ find_index,
20
+ find_node_by_position,
21
+ find_surrounding_tokens,
22
22
  gen_diagnostics,
23
23
  get_item_path,
24
+ get_line_of_code,
24
25
  get_mod_path,
25
- locate_affected_token,
26
+ get_symbols_for_outline,
27
+ get_token_start,
26
28
  parse_symbol_path,
27
29
  resolve_completion_symbol_table,
28
30
  )
@@ -44,6 +46,9 @@ class ModuleInfo:
44
46
  self.ir = ir
45
47
  self.impl_parent: Optional[ModuleInfo] = impl_parent
46
48
  self.sem_tokens: list[int] = self.gen_sem_tokens()
49
+ self.static_sem_tokens: List[
50
+ Tuple[lspt.Position, int, int, ast.AstSymbolNode]
51
+ ] = self.gen_sem_tok_node()
47
52
 
48
53
  @property
49
54
  def uri(self) -> str:
@@ -71,8 +76,28 @@ class ModuleInfo:
71
76
  prev_line, prev_col = line, col_start
72
77
  return tokens
73
78
 
79
+ def gen_sem_tok_node(
80
+ self,
81
+ ) -> List[Tuple[lspt.Position, int, int, ast.AstSymbolNode]]:
82
+ """Return semantic tokens."""
83
+ tokens: List[Tuple[lspt.Position, int, int, ast.AstSymbolNode]] = []
84
+ for node in self.ir._in_mod_nodes:
85
+ if isinstance(node, ast.NameAtom) and node.sem_token:
86
+ line, col_start, col_end = (
87
+ node.loc.first_line - 1,
88
+ node.loc.col_start - 1,
89
+ node.loc.col_end - 1,
90
+ )
91
+ length = col_end - col_start
92
+ pos = lspt.Position(line, col_start)
93
+ tokens += [(pos, col_end, length, node)]
94
+ return tokens
95
+
74
96
  def update_sem_tokens(
75
- self, content_changes: lspt.DidChangeTextDocumentParams
97
+ self,
98
+ content_changes: lspt.DidChangeTextDocumentParams,
99
+ sem_tokens: list[int],
100
+ document_lines: List[str],
76
101
  ) -> list[int]:
77
102
  """Update semantic tokens on change."""
78
103
  for change in [
@@ -85,61 +110,161 @@ class ModuleInfo:
85
110
  change_end_line = change.range.end.line
86
111
  change_end_char = change.range.end.character
87
112
 
88
- line_delta = change.text.count("\n") - (change_end_line - change_start_line)
89
- if line_delta == 0:
90
- char_delta = len(change.text) - (change_end_char - change_start_char)
91
- else:
92
- last_newline_index = change.text.rfind("\n")
93
- char_delta = (
94
- len(change.text)
95
- - last_newline_index
96
- - 1
97
- - change_end_char
98
- + change_start_char
113
+ is_delete = change.text == ""
114
+ prev_token_index, next_token_index, insert_inside_token = (
115
+ find_surrounding_tokens(
116
+ change_start_line,
117
+ change_start_char,
118
+ change_end_line,
119
+ change_end_char,
120
+ sem_tokens,
99
121
  )
100
-
101
- changed_token_index = locate_affected_token(
102
- self.sem_tokens,
103
- change_start_line,
104
- change_start_char,
105
- change_end_line,
106
- change_end_char,
107
122
  )
108
- if changed_token_index:
109
- self.sem_tokens[changed_token_index + 2] = max(
110
- 1, self.sem_tokens[changed_token_index + 2] + char_delta
123
+ prev_tok_pos = get_token_start(prev_token_index, sem_tokens)
124
+ nxt_tok_pos = get_token_start(next_token_index, sem_tokens)
125
+ changing_line_text = get_line_of_code(change_start_line, document_lines)
126
+ if not changing_line_text:
127
+ return sem_tokens
128
+ is_edit_between_tokens = bool(
129
+ (
130
+ change_start_line > prev_tok_pos[0]
131
+ or (
132
+ change_start_line == prev_tok_pos[0]
133
+ and change_start_char
134
+ > prev_tok_pos[1] + sem_tokens[prev_token_index + 2]
135
+ if prev_token_index and prev_token_index + 2 < len(sem_tokens)
136
+ else 0
137
+ )
138
+ )
139
+ and (
140
+ change_end_line < nxt_tok_pos[0]
141
+ or (
142
+ change_end_line == nxt_tok_pos[0]
143
+ and change_end_char < nxt_tok_pos[1]
144
+ )
111
145
  )
146
+ )
147
+ text = r"%s" % change.text
148
+ line_delta = len(text.split("\n")) - 1
149
+ is_multiline_insertion = line_delta > 0
150
+ # logging.info(f"chnge text: {change}")
151
+ # logging.info(
152
+ # f"""\n\nprev_token_index: {prev_token_index}, next_token_index:{next_token_index}
153
+ # ,\n insert_inside_token: {insert_inside_token}, insert_between_tokens:
154
+ # {is_edit_between_tokens},\n multi_line_insertion: {is_multiline_insertion}\n\n"""
155
+ # )
156
+ if is_delete:
157
+ next_token_index = (
158
+ prev_token_index + 5
159
+ if insert_inside_token
160
+ and prev_token_index is not None
161
+ or (
162
+ next_token_index
163
+ and prev_token_index is not None
164
+ and next_token_index >= 10
165
+ and next_token_index - prev_token_index == 10
166
+ )
167
+ else next_token_index
168
+ )
169
+ if next_token_index is None:
170
+ return sem_tokens
171
+ nxt_tok_pos = get_token_start(next_token_index, sem_tokens)
172
+ is_single_line_change = change_end_line == change_start_line
173
+ is_next_token_same_line = change_end_line == nxt_tok_pos[0]
112
174
  if (
113
- len(self.sem_tokens) > changed_token_index + 5
114
- and self.sem_tokens[changed_token_index + 5] == 0
175
+ is_single_line_change
176
+ and insert_inside_token
177
+ and prev_token_index is not None
115
178
  ):
116
- next_token_index = changed_token_index + 5
117
- self.sem_tokens[next_token_index + 1] = max(
118
- 0, self.sem_tokens[next_token_index + 1] + char_delta
179
+ sem_tokens[prev_token_index + 2] -= change.range_length
180
+ if is_next_token_same_line:
181
+ sem_tokens[next_token_index + 1] -= change.range_length
182
+ elif is_single_line_change and is_edit_between_tokens:
183
+ if is_next_token_same_line:
184
+ sem_tokens[next_token_index + 1] -= change.range_length
185
+
186
+ else:
187
+ sem_tokens[next_token_index] -= (
188
+ change_end_line - change_start_line
189
+ )
190
+ else:
191
+ if is_next_token_same_line:
192
+ char_del = nxt_tok_pos[1] - change_end_char
193
+ total_char_del = change_start_char + char_del
194
+ sem_tokens[next_token_index + 1] = (
195
+ (total_char_del - prev_tok_pos[1])
196
+ if prev_tok_pos[0] == change_start_line
197
+ else total_char_del
198
+ )
199
+ sem_tokens[next_token_index] -= change_end_line - change_start_line
200
+ return sem_tokens
201
+
202
+ is_token_boundary_edit = False
203
+ if insert_inside_token and prev_token_index is not None:
204
+ for i in ["\n", " ", "\t"]:
205
+ if i in change.text:
206
+ if prev_tok_pos[1] == change_start_char:
207
+ if i == "\n":
208
+ sem_tokens[prev_token_index] += line_delta
209
+ sem_tokens[prev_token_index + 1] = changing_line_text[1]
210
+ else:
211
+ sem_tokens[prev_token_index + 1] += len(change.text)
212
+ return sem_tokens
213
+ else:
214
+ is_token_boundary_edit = True
215
+ next_token_index = prev_token_index + 5
216
+ nxt_tok_pos = get_token_start(next_token_index, sem_tokens)
217
+ break
218
+ if not is_token_boundary_edit:
219
+ selected_region = change_end_char - change_start_char
220
+ index_offset = 2
221
+ sem_tokens[prev_token_index + index_offset] += (
222
+ len(change.text) - selected_region
119
223
  )
120
- return self.sem_tokens
121
-
122
- current_token_index = 0
123
- line_offset = 0
124
- while current_token_index < len(self.sem_tokens):
125
- token_line_number = self.sem_tokens[current_token_index] + line_offset
126
- token_start_pos = self.sem_tokens[current_token_index + 1]
127
-
128
- if token_line_number > change_start_line or (
129
- token_line_number == change_start_line
130
- and token_start_pos >= change_start_char
131
- ):
132
- self.sem_tokens[current_token_index] += line_delta
133
- if token_line_number == change_start_line:
134
- self.sem_tokens[current_token_index + 1] += char_delta
135
- if token_line_number > change_end_line or (
136
- token_line_number == change_end_line
137
- and token_start_pos >= change_end_char
224
+ if (
225
+ prev_tok_pos[0]
226
+ == get_token_start(prev_token_index + 5, sem_tokens)[0]
138
227
  ):
139
- break
140
- line_offset += self.sem_tokens[current_token_index]
141
- current_token_index += 5
142
- return self.sem_tokens
228
+ sem_tokens[prev_token_index + index_offset + 4] += (
229
+ len(change.text) - selected_region
230
+ )
231
+
232
+ tokens_on_same_line = prev_tok_pos[0] == nxt_tok_pos[0]
233
+ if (
234
+ is_edit_between_tokens
235
+ or is_token_boundary_edit
236
+ or is_multiline_insertion
237
+ ) and next_token_index is not None:
238
+ if is_multiline_insertion:
239
+ if tokens_on_same_line:
240
+ char_del = nxt_tok_pos[1] - change_end_char
241
+ total_char_del = changing_line_text[1] + char_del
242
+
243
+ else:
244
+ is_prev_token_same_line = change_end_line == prev_tok_pos[0]
245
+ is_next_token_same_line = change_start_line == nxt_tok_pos[0]
246
+ if is_prev_token_same_line:
247
+ total_char_del = nxt_tok_pos[1]
248
+ elif is_next_token_same_line:
249
+ char_del = nxt_tok_pos[1] - change_end_char
250
+ total_char_del = changing_line_text[1] + char_del
251
+ else:
252
+ total_char_del = sem_tokens[next_token_index + 1]
253
+ line_delta -= change_end_line - change_start_line
254
+ sem_tokens[next_token_index + 1] = total_char_del
255
+ sem_tokens[next_token_index] += line_delta
256
+ else:
257
+ if tokens_on_same_line:
258
+ sem_tokens[next_token_index + 1] += len(change.text)
259
+ sem_tokens[next_token_index] += line_delta
260
+ else:
261
+ is_next_token_same_line = change_start_line == nxt_tok_pos[0]
262
+ if is_next_token_same_line:
263
+ sem_tokens[next_token_index] += line_delta
264
+ sem_tokens[next_token_index + 1] += len(change.text)
265
+ else:
266
+ sem_tokens[next_token_index] += line_delta
267
+ return sem_tokens
143
268
 
144
269
 
145
270
  class JacLangServer(LanguageServer):
@@ -256,8 +381,8 @@ class JacLangServer(LanguageServer):
256
381
  current_line = document.lines[position.line]
257
382
  current_pos = position.character
258
383
  current_symbol_path = parse_symbol_path(current_line, current_pos)
259
- node_selected = find_deepest_symbol_node_at_pos(
260
- self.modules[file_path].ir,
384
+ node_selected = find_node_by_position(
385
+ self.modules[file_path].static_sem_tokens,
261
386
  position.line,
262
387
  position.character - 2,
263
388
  )
@@ -327,9 +452,14 @@ class JacLangServer(LanguageServer):
327
452
  """Return hover information for a file."""
328
453
  if file_path not in self.modules:
329
454
  return None
330
- node_selected = find_deepest_symbol_node_at_pos(
331
- self.modules[file_path].ir, position.line, position.character
455
+ token_index = find_index(
456
+ self.modules[file_path].sem_tokens,
457
+ position.line,
458
+ position.character,
332
459
  )
460
+ if token_index is None:
461
+ return None
462
+ node_selected = self.modules[file_path].static_sem_tokens[token_index][3]
333
463
  value = self.get_node_info(node_selected) if node_selected else None
334
464
  if value:
335
465
  return lspt.Hover(
@@ -361,12 +491,12 @@ class JacLangServer(LanguageServer):
361
491
  self.log_warning(f"Attribute error when accessing node attributes: {e}")
362
492
  return node_info.strip()
363
493
 
364
- def get_document_symbols(self, file_path: str) -> list[lspt.DocumentSymbol]:
494
+ def get_outline(self, file_path: str) -> list[lspt.DocumentSymbol]:
365
495
  """Return document symbols for a file."""
366
496
  if file_path in self.modules and (
367
497
  root_node := self.modules[file_path].ir._sym_tab
368
498
  ):
369
- return collect_symbols(root_node)
499
+ return get_symbols_for_outline(root_node)
370
500
  return []
371
501
 
372
502
  def get_definition(
@@ -375,9 +505,14 @@ class JacLangServer(LanguageServer):
375
505
  """Return definition location for a file."""
376
506
  if file_path not in self.modules:
377
507
  return None
378
- node_selected: Optional[ast.AstSymbolNode] = find_deepest_symbol_node_at_pos(
379
- self.modules[file_path].ir, position.line, position.character
508
+ token_index = find_index(
509
+ self.modules[file_path].sem_tokens,
510
+ position.line,
511
+ position.character,
380
512
  )
513
+ if token_index is None:
514
+ return None
515
+ node_selected = self.modules[file_path].static_sem_tokens[token_index][3]
381
516
  if node_selected:
382
517
  if (
383
518
  isinstance(node_selected, ast.Name)
@@ -400,13 +535,13 @@ class JacLangServer(LanguageServer):
400
535
  ):
401
536
  path_range = get_item_path(node_selected.parent)
402
537
  if path_range:
403
- path, range = path_range
404
- if path and range:
538
+ path, loc_range = path_range
539
+ if path and loc_range:
405
540
  return lspt.Location(
406
541
  uri=uris.from_fs_path(path),
407
542
  range=lspt.Range(
408
- start=lspt.Position(line=range[0], character=0),
409
- end=lspt.Position(line=range[1], character=5),
543
+ start=lspt.Position(line=loc_range[0], character=0),
544
+ end=lspt.Position(line=loc_range[1], character=5),
410
545
  ),
411
546
  )
412
547
  else:
@@ -424,7 +559,6 @@ class JacLangServer(LanguageServer):
424
559
  else node_selected
425
560
  )
426
561
  )
427
- self.log_py(f"{node_selected}, {decl_node}")
428
562
  decl_uri = uris.from_fs_path(decl_node.loc.mod_path)
429
563
  try:
430
564
  decl_range = create_range(decl_node.loc)
@@ -443,9 +577,16 @@ class JacLangServer(LanguageServer):
443
577
  self, file_path: str, position: lspt.Position
444
578
  ) -> list[lspt.Location]:
445
579
  """Return references for a file."""
446
- node_selected = find_deepest_symbol_node_at_pos(
447
- self.modules[file_path].ir, position.line, position.character
580
+ if file_path not in self.modules:
581
+ return []
582
+ index1 = find_index(
583
+ self.modules[file_path].sem_tokens,
584
+ position.line,
585
+ position.character,
448
586
  )
587
+ if index1 is None:
588
+ return []
589
+ node_selected = self.modules[file_path].static_sem_tokens[index1][3]
449
590
  if node_selected and node_selected.sym:
450
591
  list_of_references: list[lspt.Location] = [
451
592
  lspt.Location(
@@ -30,7 +30,11 @@ async def did_change(
30
30
  """Check syntax on change."""
31
31
  await ls.launch_quick_check(file_path := params.text_document.uri)
32
32
  if file_path in ls.modules:
33
- ls.modules[file_path].update_sem_tokens(params)
33
+ document = ls.workspace.get_text_document(file_path)
34
+ lines = document.source.splitlines()
35
+ ls.modules[file_path].update_sem_tokens(
36
+ params, ls.modules[file_path].sem_tokens, lines
37
+ )
34
38
  ls.lsp.send_request(lspt.WORKSPACE_SEMANTIC_TOKENS_REFRESH)
35
39
 
36
40
 
@@ -110,7 +114,7 @@ def document_symbol(
110
114
  ls: JacLangServer, params: lspt.DocumentSymbolParams
111
115
  ) -> list[lspt.DocumentSymbol]:
112
116
  """Provide document symbols."""
113
- return ls.get_document_symbols(params.text_document.uri)
117
+ return ls.get_outline(params.text_document.uri)
114
118
 
115
119
 
116
120
  @server.feature(lspt.TEXT_DOCUMENT_DEFINITION)
@@ -138,14 +142,6 @@ def semantic_tokens_full(
138
142
  ls: JacLangServer, params: lspt.SemanticTokensParams
139
143
  ) -> lspt.SemanticTokens:
140
144
  """Provide semantic tokens."""
141
- # import logging
142
-
143
- # logging.info("\nGetting semantic tokens\n")
144
- # # logging.info(ls.get_semantic_tokens(params.text_document.uri))
145
- # i = 0
146
- # while i < len(ls.get_semantic_tokens(params.text_document.uri).data):
147
- # logging.info(ls.get_semantic_tokens(params.text_document.uri).data[i : i + 5])
148
- # i += 5
149
145
  return ls.get_semantic_tokens(params.text_document.uri)
150
146
 
151
147
 
@@ -35,7 +35,7 @@ obj Colour1 {
35
35
  }
36
36
 
37
37
  :obj:Colour1:can:get_color1 -> Colorenum {
38
- return self.color;
38
+ return self.color1;
39
39
  }
40
40
 
41
41
  obj red :Colour1: {
@@ -1,6 +1,6 @@
1
1
  import:py os;
2
2
  import:py from math, sqrt as square_root;
3
3
  import:py datetime as dt;
4
- import:jac from base_module_structure, add_numbers as adsd, subtract,x,Colorenum as clr;
4
+ import:jac from base_module_structure, add as add_numbers , subtract,x,Colorenum as clr;
5
5
  import:jac base_module_structure as base_module_structure;
6
6
  import:py from py_import,add1 as ss, sub1 as subtract1,apple,Orange1;