Typhon-Language 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. Typhon/Driver/configs.py +14 -0
  2. Typhon/Driver/debugging.py +148 -5
  3. Typhon/Driver/diagnostic.py +4 -3
  4. Typhon/Driver/language_server.py +25 -0
  5. Typhon/Driver/run.py +1 -1
  6. Typhon/Driver/translate.py +14 -10
  7. Typhon/Driver/utils.py +39 -1
  8. Typhon/Grammar/_typhon_parser.py +2738 -2525
  9. Typhon/Grammar/parser.py +80 -53
  10. Typhon/Grammar/parser_helper.py +68 -87
  11. Typhon/Grammar/syntax_errors.py +31 -21
  12. Typhon/Grammar/token_factory_custom.py +541 -485
  13. Typhon/Grammar/tokenizer_custom.py +52 -0
  14. Typhon/Grammar/typhon_ast.py +372 -44
  15. Typhon/Grammar/typhon_ast_error.py +438 -0
  16. Typhon/LanguageServer/__init__.py +3 -0
  17. Typhon/LanguageServer/client/__init__.py +42 -0
  18. Typhon/LanguageServer/client/pyrefly.py +115 -0
  19. Typhon/LanguageServer/client/pyright.py +173 -0
  20. Typhon/LanguageServer/semantic_tokens.py +446 -0
  21. Typhon/LanguageServer/server.py +376 -0
  22. Typhon/LanguageServer/utils.py +65 -0
  23. Typhon/SourceMap/ast_match_based_map.py +199 -152
  24. Typhon/SourceMap/ast_matching.py +102 -87
  25. Typhon/SourceMap/datatype.py +27 -16
  26. Typhon/SourceMap/defined_name_retrieve.py +145 -0
  27. Typhon/Transform/comprehension_to_function.py +2 -5
  28. Typhon/Transform/const_member_to_final.py +12 -7
  29. Typhon/Transform/forbidden_statements.py +1 -0
  30. Typhon/Transform/optional_operators_to_checked.py +14 -6
  31. Typhon/Transform/scope_check_rename.py +44 -18
  32. Typhon/Transform/type_abbrev_desugar.py +11 -15
  33. Typhon/Transform/type_annotation_check_expand.py +2 -2
  34. Typhon/Transform/utils/imports.py +39 -4
  35. Typhon/Transform/utils/make_class.py +18 -23
  36. Typhon/Transform/visitor.py +25 -0
  37. Typhon/Typing/pyrefly.py +145 -0
  38. Typhon/Typing/pyright.py +2 -4
  39. Typhon/__main__.py +15 -1
  40. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/METADATA +7 -5
  41. typhon_language-0.1.4.dist-info/RECORD +65 -0
  42. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/WHEEL +1 -1
  43. typhon_language-0.1.4.dist-info/licenses/LICENSE +201 -0
  44. typhon_language-0.1.3.dist-info/RECORD +0 -53
  45. typhon_language-0.1.3.dist-info/licenses/LICENSE +0 -21
  46. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/entry_points.txt +0 -0
  47. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/top_level.txt +0 -0
Typhon/Grammar/parser.py CHANGED
@@ -1,53 +1,80 @@
1
- import ast
2
- import sys
3
- import tokenize
4
- import os
5
- import io
6
-
7
- from typing import (
8
- Literal,
9
- Union,
10
- Optional,
11
- )
12
-
13
- from .tokenizer_custom import TokenizerCustom
14
- from .token_factory_custom import token_stream_factory
15
- from ._typhon_parser import parse
16
-
17
-
18
- def parse_file(
19
- file_path: str,
20
- py_version: Optional[tuple[int, int]] = None,
21
- verbose: bool = False,
22
- ) -> ast.Module:
23
- """Parse a file."""
24
- with open(file_path) as f:
25
- tok_stream = token_stream_factory(f.readline)
26
- tokenizer = TokenizerCustom(tok_stream, verbose=verbose, path=file_path)
27
- parsed = parse(
28
- filename=os.path.basename(file_path),
29
- tokenizer=tokenizer,
30
- mode="file",
31
- py_version=py_version,
32
- verbose=verbose,
33
- )
34
- assert isinstance(parsed, ast.Module)
35
- return parsed
36
-
37
-
38
- def parse_string(
39
- source: str,
40
- mode: Union[Literal["eval"], Literal["exec"]] = "exec",
41
- py_version: Optional[tuple[int, int]] = None,
42
- verbose: bool = False,
43
- ) -> ast.AST | None:
44
- """Parse a string."""
45
- tok_stream = token_stream_factory(io.StringIO(source).readline)
46
- tokenizer = TokenizerCustom(tok_stream, verbose=verbose)
47
- return parse(
48
- filename="<string>",
49
- tokenizer=tokenizer,
50
- mode=mode if mode == "eval" else "file",
51
- py_version=py_version,
52
- verbose=verbose,
53
- )
1
+ import ast
2
+ from pathlib import Path
3
+ import sys
4
+ import tokenize
5
+ import os
6
+ import io
7
+
8
+ from typing import (
9
+ Literal,
10
+ Union,
11
+ Optional,
12
+ )
13
+
14
+ from ..Driver.debugging import is_debug_verbose
15
+
16
+ from .tokenizer_custom import TokenizerCustom, show_token
17
+ from .token_factory_custom import token_stream_factory
18
+ from ._typhon_parser import parse
19
+ from .typhon_ast_error import gather_errors
20
+
21
+
22
+ def parse_file(
23
+ file_path: str,
24
+ py_version: Optional[tuple[int, int]] = None,
25
+ verbose: bool = False,
26
+ ) -> ast.Module:
27
+ """Parse a file."""
28
+ with open(file_path) as f:
29
+ if is_debug_verbose():
30
+ show_token(Path(file_path).read_text())
31
+ tok_stream = token_stream_factory(f.readline)
32
+ tokenizer = TokenizerCustom(tok_stream, verbose=verbose, path=file_path)
33
+ parsed = parse_tokenizer(
34
+ tokenizer,
35
+ py_version=py_version,
36
+ verbose=verbose,
37
+ )
38
+ assert isinstance(parsed, ast.Module), f"Parsing failed: {parsed}"
39
+ gather_errors(parsed)
40
+ return parsed
41
+
42
+
43
+ def parse_tokenizer(
44
+ tokenizer: TokenizerCustom,
45
+ py_version: Optional[tuple[int, int]] = None,
46
+ verbose: bool = False,
47
+ ) -> ast.AST:
48
+ """Parse using a tokenizer."""
49
+ parsed = parse(
50
+ filename="<tokenizer>",
51
+ tokenizer=tokenizer,
52
+ mode="file",
53
+ py_version=py_version,
54
+ verbose=verbose,
55
+ )
56
+ # Must be successful parse
57
+ assert isinstance(parsed, ast.AST), f"Parsing failed: {parsed}"
58
+ gather_errors(parsed)
59
+ return parsed
60
+
61
+
62
+ def parse_string(
63
+ source: str,
64
+ mode: Union[Literal["eval"], Literal["exec"]] = "exec",
65
+ py_version: Optional[tuple[int, int]] = None,
66
+ verbose: bool = False,
67
+ ) -> ast.AST | None:
68
+ """Parse a string."""
69
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
70
+ tokenizer = TokenizerCustom(tok_stream, verbose=verbose)
71
+ parsed = parse(
72
+ filename="<string>",
73
+ tokenizer=tokenizer,
74
+ mode=mode if mode == "eval" else "file",
75
+ py_version=py_version,
76
+ verbose=verbose,
77
+ )
78
+ if parsed:
79
+ gather_errors(parsed)
80
+ return parsed
@@ -6,28 +6,33 @@ import sys
6
6
  import tokenize
7
7
 
8
8
  import enum
9
- import io
10
- import itertools
11
- import os
12
- import token
13
9
  from typing import (
14
10
  Any,
15
11
  Callable,
16
12
  Iterator,
17
13
  List,
18
14
  Literal,
19
- NoReturn,
20
15
  Sequence,
16
+ Unpack,
21
17
  Tuple,
22
18
  TypeVar,
23
19
  Union,
24
20
  Optional,
25
21
  Protocol,
26
22
  cast,
23
+ Type,
27
24
  )
28
25
 
29
26
  from pegen.tokenizer import Tokenizer
30
27
  from pegen.parser import P, memoize, memoize_left_rec, logger, Parser as PegenParser
28
+ from ..Driver.debugging import is_debug_first_error
29
+ from .typhon_ast import (
30
+ set_anonymous_name_id,
31
+ PosAttributes,
32
+ get_pos_attributes,
33
+ get_anonymous_base_name,
34
+ )
35
+ from .syntax_errors import SkipTokensError, ExpectedTokenError
31
36
 
32
37
  EXPR_NAME_MAPPING: dict[type, str] = {
33
38
  ast.Attribute: "attribute",
@@ -57,6 +62,7 @@ EXPR_NAME_MAPPING: dict[type, str] = {
57
62
  ast.NamedExpr: "named expression",
58
63
  }
59
64
 
65
+
60
66
  # Singleton ast nodes, created once for efficiency
61
67
  Load = ast.Load()
62
68
  Store = ast.Store()
@@ -83,6 +89,7 @@ class PositionedNode(Protocol):
83
89
  class Parser(PegenParser):
84
90
  #: Name of the source file, used in error reports
85
91
  filename: str
92
+ _anonymous_id = 0
86
93
 
87
94
  def __init__(
88
95
  self,
@@ -97,8 +104,10 @@ class Parser(PegenParser):
97
104
  self.py_version = (
98
105
  min(py_version, sys.version_info) if py_version else sys.version_info
99
106
  )
107
+ # Note "invalid_*" rules returns None. Cannot use for error recovery.
108
+ self.call_invalid_rules = True
100
109
 
101
- def parse(self, rule: str, call_invalid_rules: bool = False) -> Optional[ast.AST]:
110
+ def parse(self, rule: str, call_invalid_rules: bool = True) -> Optional[ast.AST]:
102
111
  old = self.call_invalid_rules
103
112
  self.call_invalid_rules = call_invalid_rules
104
113
  res = getattr(self, rule)()
@@ -135,6 +144,15 @@ class Parser(PegenParser):
135
144
  f"{error_msg} is only supported in Python {min_version} and above."
136
145
  )
137
146
 
147
+ def make_anonymous_name(
148
+ self, ctx: ast.expr_context, **kwargs: Unpack[PosAttributes]
149
+ ) -> tuple[ast.Name, int]:
150
+ anon_id = self._anonymous_id
151
+ self._anonymous_id += 1
152
+ name = ast.Name(f"{get_anonymous_base_name()}_{anon_id}", ctx, **kwargs)
153
+ set_anonymous_name_id(name, anon_id)
154
+ return name, anon_id
155
+
138
156
  def raise_indentation_error(self, msg: str) -> None:
139
157
  """Raise an indentation error."""
140
158
  last_token = self._tokenizer.diagnose()
@@ -359,89 +377,52 @@ class Parser(PegenParser):
359
377
  arg.type_comment = type_comment
360
378
  return arg
361
379
 
362
- def make_arguments(
363
- self,
364
- pos_only: Optional[List[Tuple[ast.arg, None]]],
365
- pos_only_with_default: List[Tuple[ast.arg, Any]],
366
- param_no_default: Optional[List[ast.arg]],
367
- param_default: Optional[List[Tuple[ast.arg, Any]]],
368
- after_star: Optional[
369
- Tuple[Optional[ast.arg], List[Tuple[ast.arg, Any]], Optional[ast.arg]]
370
- ],
371
- ) -> ast.arguments:
372
- """Build a function definition arguments."""
373
- defaults = (
374
- [d for _, d in pos_only_with_default if d is not None]
375
- if pos_only_with_default
376
- else []
377
- )
378
- defaults += (
379
- [d for _, d in param_default if d is not None] if param_default else []
380
- )
381
-
382
- pos_only = pos_only or pos_only_with_default
383
-
384
- # Because we need to combine pos only with and without default even
385
- # the version with no default is a tuple
386
- pos_only_args = [p for p, _ in pos_only]
387
- params = (param_no_default or []) + (
388
- [p for p, _ in param_default] if param_default else []
389
- )
390
-
391
- # If after_star is None, make a default tuple
392
- after_star = after_star or (None, [], None)
393
-
394
- return ast.arguments(
395
- posonlyargs=pos_only_args,
396
- args=params,
397
- defaults=defaults,
398
- vararg=after_star[0],
399
- kwonlyargs=[p for p, _ in after_star[1]],
400
- kw_defaults=[d for _, d in after_star[1]],
401
- kwarg=after_star[2],
402
- )
403
-
404
- def _build_syntax_error(
380
+ def build_syntax_error[T: SyntaxError](
405
381
  self,
406
382
  message: str,
407
- start: Optional[Tuple[int, int]] = None,
408
- end: Optional[Tuple[int, int]] = None,
409
- ) -> SyntaxError:
410
- line_from_token = start is None and end is None
411
- if start is None or end is None:
412
- tok = self._tokenizer.diagnose()
413
- start = start or tok.start
414
- end = end or tok.end
415
-
416
- if line_from_token:
417
- line = tok.line
418
- else:
419
- # End is used only to get the proper text
420
- line = "\\n".join(
421
- self._tokenizer.get_lines(list(range(start[0], end[0] + 1)))
422
- )
383
+ start: Tuple[int, int],
384
+ end: Tuple[int, int],
385
+ error_type: Type[T] = SyntaxError,
386
+ ) -> T:
387
+ # End is used only to get the proper text
388
+ line = "\\n".join(self._tokenizer.get_lines(list(range(start[0], end[0] + 1))))
423
389
 
424
390
  # tokenize.py index column offset from 0 while Cpython index column
425
391
  # offset at 1 when reporting SyntaxError, so we need to increment
426
392
  # the column offset when reporting the error.
427
- args = (self.filename, start[0], start[1] + 1, line)
393
+ # But in Typhon, first we take all the ranges as 0-based.
394
+ args = (self.filename, start[0], start[1], line)
428
395
  if sys.version_info >= (3, 10):
429
- args += (end[0], end[1] + 1)
396
+ args += (end[0], end[1])
430
397
 
431
- return SyntaxError(message, args)
398
+ result = error_type(message, args)
399
+ if is_debug_first_error():
400
+ raise result
401
+ return result
432
402
 
433
- def raise_raw_syntax_error(
403
+ def build_expected_error(
404
+ self, message: str, start: Tuple[int, int], end: Tuple[int, int]
405
+ ) -> SyntaxError:
406
+ error = self.build_syntax_error(
407
+ f"expected {message}", start, end, ExpectedTokenError
408
+ )
409
+ error.expected = message
410
+ return error
411
+
412
+ def build_skip_tokens_error(
434
413
  self,
435
- message: str,
436
- start: Optional[Tuple[int, int]] = None,
437
- end: Optional[Tuple[int, int]] = None,
438
- ) -> NoReturn:
439
- raise self._build_syntax_error(message, start, end)
414
+ tokens: list[tokenize.TokenInfo],
415
+ start: Tuple[int, int],
416
+ end: Tuple[int, int],
417
+ ) -> SyntaxError:
418
+ error = self.build_syntax_error("unknown tokens", start, end, SkipTokensError)
419
+ error.tokens = tokens
420
+ return error
440
421
 
441
- def make_syntax_error(
442
- self, message: str, filename: str = "<unknown>"
422
+ def raise_raw_syntax_error(
423
+ self, message: str, start: Tuple[int, int], end: Tuple[int, int]
443
424
  ) -> SyntaxError:
444
- return self._build_syntax_error(message)
425
+ return self.build_syntax_error(message, start, end)
445
426
 
446
427
  def expect_forced(self, res: Any, expectation: str) -> Optional[tokenize.TokenInfo]:
447
428
  if res is None:
@@ -456,10 +437,10 @@ class Parser(PegenParser):
456
437
  )
457
438
  return res
458
439
 
459
- def raise_syntax_error(self, message: str) -> NoReturn:
440
+ def raise_syntax_error(self, message: str) -> SyntaxError:
460
441
  """Raise a syntax error."""
461
442
  tok = self._tokenizer.diagnose()
462
- raise self._build_syntax_error(
443
+ return self.build_syntax_error(
463
444
  message,
464
445
  tok.start,
465
446
  tok.end if sys.version_info >= (3, 12) or tok.type != 4 else tok.start,
@@ -467,7 +448,7 @@ class Parser(PegenParser):
467
448
 
468
449
  def raise_syntax_error_known_location(
469
450
  self, message: str, node: PositionedNode | tokenize.TokenInfo
470
- ) -> NoReturn:
451
+ ) -> SyntaxError:
471
452
  """Raise a syntax error that occured at a given AST node."""
472
453
  if isinstance(node, tokenize.TokenInfo):
473
454
  start = node.start
@@ -476,14 +457,14 @@ class Parser(PegenParser):
476
457
  start = node.lineno, node.col_offset
477
458
  end = node.end_lineno, node.end_col_offset
478
459
 
479
- raise self._build_syntax_error(message, start, end)
460
+ return self.build_syntax_error(message, start, end)
480
461
 
481
462
  def raise_syntax_error_known_range(
482
463
  self,
483
464
  message: str,
484
465
  start_node: Union[PositionedNode, tokenize.TokenInfo],
485
466
  end_node: Union[PositionedNode, tokenize.TokenInfo],
486
- ) -> NoReturn:
467
+ ) -> SyntaxError:
487
468
  if isinstance(start_node, tokenize.TokenInfo):
488
469
  start = start_node.start
489
470
  else:
@@ -494,11 +475,11 @@ class Parser(PegenParser):
494
475
  else:
495
476
  end = end_node.end_lineno, end_node.end_col_offset
496
477
 
497
- raise self._build_syntax_error(message, start, end)
478
+ return self.build_syntax_error(message, start, end)
498
479
 
499
480
  def raise_syntax_error_starting_from(
500
481
  self, message: str, start_node: Union[PositionedNode, tokenize.TokenInfo]
501
- ) -> NoReturn:
482
+ ) -> SyntaxError:
502
483
  if isinstance(start_node, tokenize.TokenInfo):
503
484
  start = start_node.start
504
485
  else:
@@ -506,7 +487,7 @@ class Parser(PegenParser):
506
487
 
507
488
  last_token = self._tokenizer.diagnose()
508
489
 
509
- raise self._build_syntax_error(message, start, last_token.start)
490
+ return self.build_syntax_error(message, start, last_token.start)
510
491
 
511
492
  def raise_syntax_error_invalid_target(
512
493
  self, target: Target, node: Optional[ast.AST]
@@ -527,6 +508,6 @@ class Parser(PegenParser):
527
508
  msg, cast(PositionedNode, invalid_target)
528
509
  )
529
510
 
530
- def raise_syntax_error_on_next_token(self, message: str) -> NoReturn:
511
+ def raise_syntax_error_on_next_token(self, message: str) -> SyntaxError:
531
512
  next_token = self._tokenizer.peek()
532
- raise self._build_syntax_error(message, next_token.start, next_token.end)
513
+ return self.build_syntax_error(message, next_token.start, next_token.end)
@@ -1,5 +1,6 @@
1
1
  import ast
2
2
  from pathlib import Path
3
+ from tokenize import TokenInfo
3
4
  from .typhon_ast import PosAttributes
4
5
  from typing import Unpack, Callable
5
6
  from ..Driver.diagnostic import diag_error_file_position, positioned_source_code
@@ -9,7 +10,16 @@ from ..Driver.debugging import debug_print, is_debug_first_error
9
10
  _SYNTAX_ERROR_IN_MODULE = "_typh_syntax_error_in_module"
10
11
 
11
12
 
12
- class TyphonSyntaxError(Exception):
13
+ # Error recovery skips unrecognized tokens
14
+ class SkipTokensError(SyntaxError):
15
+ tokens: list[TokenInfo]
16
+
17
+
18
+ class ExpectedTokenError(SyntaxError):
19
+ expected: str
20
+
21
+
22
+ class TyphonTransformSyntaxError(SyntaxError):
13
23
  message: str
14
24
  pos: PosAttributes
15
25
 
@@ -22,20 +32,18 @@ class TyphonSyntaxError(Exception):
22
32
  return f"{self.message} at {self.pos}"
23
33
 
24
34
 
25
- def set_syntax_error_in_module(
26
- module: ast.Module, error_details: list[TyphonSyntaxError]
27
- ):
28
- setattr(module, _SYNTAX_ERROR_IN_MODULE, error_details)
35
+ def set_syntax_error(node: ast.AST, error_details: list[SyntaxError]):
36
+ setattr(node, _SYNTAX_ERROR_IN_MODULE, error_details)
29
37
 
30
38
 
31
- def add_syntax_error_in_module(module: ast.Module, error_detail: TyphonSyntaxError):
39
+ def add_syntax_error_in_module(module: ast.Module, error_detail: SyntaxError):
32
40
  if not hasattr(module, _SYNTAX_ERROR_IN_MODULE):
33
41
  setattr(module, _SYNTAX_ERROR_IN_MODULE, [])
34
42
  error_list = getattr(module, _SYNTAX_ERROR_IN_MODULE)
35
43
  error_list.append(error_detail)
36
44
 
37
45
 
38
- def get_syntax_error_in_module(module: ast.Module) -> list[TyphonSyntaxError] | None:
46
+ def get_syntax_error_in_module(module: ast.Module) -> list[SyntaxError] | None:
39
47
  return getattr(module, _SYNTAX_ERROR_IN_MODULE, None)
40
48
 
41
49
 
@@ -49,9 +57,9 @@ def clear_syntax_error_in_module(module: ast.Module) -> None:
49
57
 
50
58
 
51
59
  class TyphonSyntaxErrorList(Exception):
52
- errors: list[TyphonSyntaxError]
60
+ errors: list[SyntaxError]
53
61
 
54
- def __init__(self, errors: list[TyphonSyntaxError]):
62
+ def __init__(self, errors: list[SyntaxError]):
55
63
  self.errors = errors
56
64
  super().__init__(self.errors)
57
65
 
@@ -65,7 +73,9 @@ def raise_from_module_syntax_errors(module: ast.Module):
65
73
  raise TyphonSyntaxErrorList(errors)
66
74
 
67
75
 
68
- def handle_syntax_error(module: ast.Module, syntax_error: TyphonSyntaxError) -> None:
76
+ def handle_syntax_error(
77
+ module: ast.Module, syntax_error: TyphonTransformSyntaxError
78
+ ) -> None:
69
79
  if is_debug_first_error():
70
80
  debug_print(f"Raising syntax error: {syntax_error}")
71
81
  raise syntax_error
@@ -78,12 +88,12 @@ def try_handle_syntax_error_or[T](
78
88
  ) -> T:
79
89
  try:
80
90
  return maybe_syntax_error()
81
- except TyphonSyntaxError as syntax_error:
91
+ except TyphonTransformSyntaxError as syntax_error:
82
92
  handle_syntax_error(module, syntax_error)
83
93
  return orelse
84
94
 
85
95
 
86
- class ScopeError(TyphonSyntaxError):
96
+ class ScopeError(TyphonTransformSyntaxError):
87
97
  pass
88
98
 
89
99
 
@@ -94,7 +104,7 @@ def raise_scope_error(
94
104
  raise ScopeError(message, **pos)
95
105
 
96
106
 
97
- class ForbiddenStatementError(TyphonSyntaxError):
107
+ class ForbiddenStatementError(TyphonTransformSyntaxError):
98
108
  pass
99
109
 
100
110
 
@@ -105,7 +115,7 @@ def raise_forbidden_statement_error(
105
115
  raise ForbiddenStatementError(message, **pos)
106
116
 
107
117
 
108
- class TypeAnnotationError(TyphonSyntaxError):
118
+ class TypeAnnotationError(TyphonTransformSyntaxError):
109
119
  pass
110
120
 
111
121
 
@@ -116,7 +126,7 @@ def raise_type_annotation_error(
116
126
  raise TypeAnnotationError(message, **pos)
117
127
 
118
128
 
119
- class LetMissingElseError(TyphonSyntaxError):
129
+ class LetMissingElseError(TyphonTransformSyntaxError):
120
130
  pass
121
131
 
122
132
 
@@ -128,18 +138,18 @@ def raise_let_missing_else_error(
128
138
 
129
139
 
130
140
  def _get_range_of_error(
131
- syntax_error: SyntaxError | TyphonSyntaxError,
141
+ syntax_error: SyntaxError | TyphonTransformSyntaxError,
132
142
  ) -> Range:
133
- if isinstance(syntax_error, TyphonSyntaxError):
143
+ if isinstance(syntax_error, TyphonTransformSyntaxError):
134
144
  return Range.from_pos_attr_may_not_end(syntax_error.pos)
135
145
  else:
136
146
  return Range.from_syntax_error(syntax_error)
137
147
 
138
148
 
139
149
  def _get_message_of_error(
140
- syntax_error: SyntaxError | TyphonSyntaxError,
150
+ syntax_error: SyntaxError | TyphonTransformSyntaxError,
141
151
  ) -> str:
142
- if isinstance(syntax_error, TyphonSyntaxError):
152
+ if isinstance(syntax_error, TyphonTransformSyntaxError):
143
153
  return syntax_error.message
144
154
  else:
145
155
  return syntax_error.msg
@@ -152,7 +162,7 @@ def raise_must_have_resolved(msg: str):
152
162
 
153
163
 
154
164
  def diag_error(
155
- syntax_error: SyntaxError | TyphonSyntaxError,
165
+ syntax_error: SyntaxError | TyphonTransformSyntaxError,
156
166
  source: Path,
157
167
  source_code: str,
158
168
  ) -> str:
@@ -173,7 +183,7 @@ def diag_error(
173
183
 
174
184
 
175
185
  def diag_errors(
176
- syntax_error: SyntaxError | TyphonSyntaxError | TyphonSyntaxErrorList,
186
+ syntax_error: SyntaxError | TyphonTransformSyntaxError | TyphonSyntaxErrorList,
177
187
  source: Path,
178
188
  source_code: str,
179
189
  ) -> str: