Typhon-Language 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. Typhon/Driver/configs.py +14 -0
  2. Typhon/Driver/debugging.py +148 -5
  3. Typhon/Driver/diagnostic.py +4 -3
  4. Typhon/Driver/language_server.py +25 -0
  5. Typhon/Driver/run.py +1 -1
  6. Typhon/Driver/translate.py +14 -10
  7. Typhon/Driver/utils.py +39 -1
  8. Typhon/Grammar/_typhon_parser.py +2738 -2525
  9. Typhon/Grammar/parser.py +80 -53
  10. Typhon/Grammar/parser_helper.py +68 -87
  11. Typhon/Grammar/syntax_errors.py +31 -21
  12. Typhon/Grammar/token_factory_custom.py +541 -485
  13. Typhon/Grammar/tokenizer_custom.py +52 -0
  14. Typhon/Grammar/typhon_ast.py +372 -44
  15. Typhon/Grammar/typhon_ast_error.py +438 -0
  16. Typhon/LanguageServer/__init__.py +3 -0
  17. Typhon/LanguageServer/client/__init__.py +42 -0
  18. Typhon/LanguageServer/client/pyrefly.py +115 -0
  19. Typhon/LanguageServer/client/pyright.py +173 -0
  20. Typhon/LanguageServer/semantic_tokens.py +446 -0
  21. Typhon/LanguageServer/server.py +376 -0
  22. Typhon/LanguageServer/utils.py +65 -0
  23. Typhon/SourceMap/ast_match_based_map.py +199 -152
  24. Typhon/SourceMap/ast_matching.py +102 -87
  25. Typhon/SourceMap/datatype.py +27 -16
  26. Typhon/SourceMap/defined_name_retrieve.py +145 -0
  27. Typhon/Transform/comprehension_to_function.py +2 -5
  28. Typhon/Transform/const_member_to_final.py +12 -7
  29. Typhon/Transform/forbidden_statements.py +1 -0
  30. Typhon/Transform/optional_operators_to_checked.py +14 -6
  31. Typhon/Transform/scope_check_rename.py +44 -18
  32. Typhon/Transform/type_abbrev_desugar.py +11 -15
  33. Typhon/Transform/type_annotation_check_expand.py +2 -2
  34. Typhon/Transform/utils/imports.py +39 -4
  35. Typhon/Transform/utils/make_class.py +18 -23
  36. Typhon/Transform/visitor.py +25 -0
  37. Typhon/Typing/pyrefly.py +145 -0
  38. Typhon/Typing/pyright.py +2 -4
  39. Typhon/__main__.py +15 -1
  40. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/METADATA +7 -5
  41. typhon_language-0.1.4.dist-info/RECORD +65 -0
  42. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/WHEEL +1 -1
  43. typhon_language-0.1.4.dist-info/licenses/LICENSE +201 -0
  44. typhon_language-0.1.3.dist-info/RECORD +0 -53
  45. typhon_language-0.1.3.dist-info/licenses/LICENSE +0 -21
  46. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/entry_points.txt +0 -0
  47. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/top_level.txt +0 -0
@@ -2,10 +2,13 @@ from typing import NamedTuple
2
2
  from tokenize import TokenInfo, OP, NAME
3
3
  import tokenize
4
4
  import token
5
+ import io
5
6
  from pegen.tokenizer import Tokenizer as PegenTokenizer
6
7
  from typing import override
7
8
  from .line_break import line_breakable_after, line_breakable_before
8
9
  from .typhon_ast import get_postfix_operator_temp_name
10
+ from ..Driver.debugging import debug_verbose_print
11
+ from .token_factory_custom import token_stream_factory, generate_tokens_ignore_error
9
12
 
10
13
 
11
14
  # Combine sequencial 2 tokens (optionally without space between) into 1
@@ -186,6 +189,7 @@ class TokenizerCustom(PegenTokenizer):
186
189
  while True:
187
190
  tok = next(self._tokengen)
188
191
  if self._is_token_to_skip(tok):
192
+ self._all_tokens.append(tok)
189
193
  continue
190
194
  if tok.type == token.ENDMARKER:
191
195
  self._end_tok = tok
@@ -209,6 +213,7 @@ class TokenizerCustom(PegenTokenizer):
209
213
 
210
214
  def _commit_token(self, tok: TokenInfo) -> None:
211
215
  self._tokens.append(tok)
216
+ self._all_tokens.append(tok)
212
217
  if not self._path and tok.start[0] not in self._lines:
213
218
  self._lines[tok.start[0]] = tok.line
214
219
 
@@ -295,3 +300,50 @@ class TokenizerCustom(PegenTokenizer):
295
300
  continue
296
301
  self._commit_token(tok)
297
302
  return self._tokens[self._index]
303
+
304
+ def read_all_tokens(self) -> list[TokenInfo]:
305
+ """Return all tokens including comments."""
306
+ # Force to consume all tokens
307
+ debug_verbose_print("Reading all tokens for tokenizer.")
308
+ while tok := self.getnext():
309
+ debug_verbose_print(f" Token: {tok}")
310
+ if tok.type == token.ENDMARKER:
311
+ break
312
+ debug_verbose_print("Finished reading all tokens.")
313
+ self.reset(0)
314
+ return sorted(self._all_tokens, key=lambda t: t.start)
315
+
316
+
317
+ def tokenizer_for_file(file_path: str) -> TokenizerCustom:
318
+ """Tokenize the specified file."""
319
+ with open(file_path) as f:
320
+ tok_stream = token_stream_factory(f.readline)
321
+ tokenizer = TokenizerCustom(tok_stream, path=file_path)
322
+ return tokenizer
323
+
324
+
325
+ def tokenizer_for_string(source: str) -> TokenizerCustom:
326
+ """Tokenize the specified string."""
327
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
328
+ tokenizer = TokenizerCustom(tok_stream)
329
+ return tokenizer
330
+
331
+
332
+ def show_token(
333
+ source: str, show_typhon_token: bool = True, show_python_token: bool = True
334
+ ):
335
+ if show_python_token:
336
+ print("Tokens of Python tokenizer:")
337
+ for tok in generate_tokens_ignore_error(io.StringIO(source).readline):
338
+ print(f" {tok}")
339
+ if show_typhon_token:
340
+ print("Tokens of Typhon Token Factory:")
341
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
342
+ for tok in tok_stream:
343
+ print(f" {tok}")
344
+ print("Tokens of Typhon Custom tokenizer:")
345
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
346
+ tokenizer = TokenizerCustom(tok_stream, verbose=True)
347
+ tokens = tokenizer.read_all_tokens()
348
+ for tok in tokens:
349
+ print(f" {tok}")
@@ -1,11 +1,26 @@
1
1
  # Ast Extensions for Typhon
2
-
2
+ from __future__ import annotations
3
+ from re import A
4
+ from typing import (
5
+ Union,
6
+ Unpack,
7
+ TypedDict,
8
+ Tuple,
9
+ cast,
10
+ TYPE_CHECKING,
11
+ Optional,
12
+ List,
13
+ Any,
14
+ )
3
15
  import ast
4
- from typing import Union, Unpack, TypedDict, Tuple, cast
5
16
  from dataclasses import dataclass
6
17
  from copy import copy
18
+ from tokenize import TokenInfo
7
19
  from ..Driver.debugging import debug_print, debug_verbose_print
8
20
 
21
+ if TYPE_CHECKING:
22
+ from .parser_helper import Parser
23
+
9
24
 
10
25
  # Same as ast module's position attributes
11
26
  class PosAttributes(TypedDict):
@@ -15,6 +30,25 @@ class PosAttributes(TypedDict):
15
30
  end_col_offset: int | None
16
31
 
17
32
 
33
+ def unpack_pos_default(pos: PosAttributes) -> Tuple[int, int, int, int]:
34
+ return (
35
+ pos["lineno"],
36
+ pos["col_offset"],
37
+ pos["end_lineno"] or pos["lineno"],
38
+ pos["end_col_offset"] or pos["col_offset"] + 1,
39
+ )
40
+
41
+
42
+ def unpack_pos_tuple(pos: PosAttributes) -> Tuple[Tuple[int, int], Tuple[int, int]]:
43
+ return (
44
+ (pos["lineno"], pos["col_offset"]),
45
+ (
46
+ pos["end_lineno"] or pos["lineno"],
47
+ pos["end_col_offset"] or pos["col_offset"] + 1,
48
+ ),
49
+ )
50
+
51
+
18
52
  class PosRange(TypedDict):
19
53
  lineno: int
20
54
  col_offset: int
@@ -46,13 +80,21 @@ type PosNode = (
46
80
  | ast.excepthandler
47
81
  | ast.pattern
48
82
  | ast.keyword
83
+ | ast.match_case
49
84
  )
50
85
 
51
86
 
52
- def get_pos_attributes(node: PosNode) -> PosAttributes:
87
+ def get_pos_attributes(node: PosNode | TokenInfo) -> PosAttributes:
88
+ if isinstance(node, TokenInfo):
89
+ return PosAttributes(
90
+ lineno=node.start[0],
91
+ col_offset=node.start[1],
92
+ end_lineno=node.end[0],
93
+ end_col_offset=node.end[1],
94
+ )
53
95
  return PosAttributes(
54
- lineno=node.lineno,
55
- col_offset=node.col_offset,
96
+ lineno=getattr(node, "lineno", 1),
97
+ col_offset=getattr(node, "col_offset", 0),
56
98
  end_lineno=getattr(node, "end_lineno", None),
57
99
  end_col_offset=getattr(node, "end_col_offset", None),
58
100
  )
@@ -70,16 +112,16 @@ def get_pos_attributes_if_exists(node: ast.AST) -> PosAttributes | None:
70
112
 
71
113
 
72
114
  def get_empty_pos_attributes() -> PosAttributes:
115
+ # Python ast position is 1-based for line, 0-based for column
73
116
  return PosAttributes(
74
- lineno=0,
117
+ lineno=1,
75
118
  col_offset=0,
76
- end_lineno=0,
119
+ end_lineno=1,
77
120
  end_col_offset=0,
78
121
  )
79
122
 
80
123
 
81
124
  _ANONYMOUS_NAME = "_typh_anonymous"
82
- _anonymous_global_id = 0
83
125
 
84
126
 
85
127
  def set_anonymous_name_id(node: ast.Name, id: int) -> ast.Name:
@@ -87,6 +129,10 @@ def set_anonymous_name_id(node: ast.Name, id: int) -> ast.Name:
87
129
  return node
88
130
 
89
131
 
132
+ def get_anonymous_base_name() -> str:
133
+ return _ANONYMOUS_NAME
134
+
135
+
90
136
  def get_anonymous_name_id(node: ast.Name) -> int | None:
91
137
  return getattr(node, _ANONYMOUS_NAME, None)
92
138
 
@@ -100,17 +146,6 @@ def is_anonymous_name(node: ast.Name) -> bool:
100
146
  return hasattr(node, _ANONYMOUS_NAME)
101
147
 
102
148
 
103
- def make_anonymous_name(
104
- ctx: ast.expr_context, **kwargs: Unpack[PosAttributes]
105
- ) -> tuple[ast.Name, int]:
106
- global _anonymous_global_id
107
- anon_id = _anonymous_global_id
108
- name = ast.Name(f"{_ANONYMOUS_NAME}_{anon_id}", ctx, **kwargs)
109
- set_anonymous_name_id(name, anon_id)
110
- _anonymous_global_id += 1
111
- return name, anon_id
112
-
113
-
114
149
  def copy_anonymous_name(src: ast.Name, ctx: ast.expr_context) -> ast.Name:
115
150
  result = ast.Name(src.id, ctx, **get_pos_attributes(src))
116
151
  anon_id = get_anonymous_name_id(src)
@@ -119,6 +154,13 @@ def copy_anonymous_name(src: ast.Name, ctx: ast.expr_context) -> ast.Name:
119
154
  return result
120
155
 
121
156
 
157
+ _TYPE_INVALID_NAME = "_typh_invalid_name"
158
+
159
+
160
+ def get_invalid_name() -> str:
161
+ return _TYPE_INVALID_NAME
162
+
163
+
122
164
  _TYPE_IGNORE_NODES = "_typh_type_ignore"
123
165
 
124
166
 
@@ -147,6 +189,24 @@ def clear_type_ignore_node(node: ast.AST) -> None:
147
189
  delattr(node, _TYPE_IGNORE_NODES)
148
190
 
149
191
 
192
+ # The name is internal when it has no counterpart in input typhon source code.
193
+ _INTERNAL_NAME = "_typh_internal_name"
194
+
195
+
196
+ def is_internal_name(name: ast.Name) -> bool:
197
+ return getattr(name, _INTERNAL_NAME, False)
198
+
199
+
200
+ def set_is_internal_name(name: ast.Name, is_internal: bool = True) -> ast.Name:
201
+ setattr(name, _INTERNAL_NAME, is_internal)
202
+ return name
203
+
204
+
205
+ def clear_internal_name(name: ast.Name) -> None:
206
+ if hasattr(name, _INTERNAL_NAME):
207
+ delattr(name, _INTERNAL_NAME)
208
+
209
+
150
210
  # Normal assignments, let assignments for variable declarations,
151
211
  # and constant assignments for constant definitions.
152
212
  # They all are Assign/AnnAssign in Python, we distinguish them by
@@ -476,6 +536,7 @@ def declaration_as_withitem(assign: Union[ast.Assign, ast.AnnAssign]) -> ast.wit
476
536
 
477
537
 
478
538
  def _make_with_let_pattern(
539
+ parser: Parser,
479
540
  is_async: bool,
480
541
  decl_type: str,
481
542
  pattern_subjects: list[tuple[ast.pattern, ast.expr]],
@@ -485,7 +546,9 @@ def _make_with_let_pattern(
485
546
  items: list[ast.withitem] = []
486
547
  pattern_vars: list[tuple[ast.pattern, ast.expr]] = []
487
548
  for pattern, subject in pattern_subjects:
488
- var, var_id = make_anonymous_name(ast.Store(), **get_pos_attributes(pattern))
549
+ var, var_id = parser.make_anonymous_name(
550
+ ast.Store(), **get_pos_attributes(pattern)
551
+ )
489
552
  item = ast.withitem(context_expr=subject, optional_vars=var)
490
553
  _set_is_let_var(item, decl_type)
491
554
  items.append(item)
@@ -503,6 +566,7 @@ def _make_with_let_pattern(
503
566
 
504
567
 
505
568
  def make_with_let_pattern(
569
+ parser: Parser,
506
570
  is_async: bool,
507
571
  decl_type: str,
508
572
  pattern_subjects: list[tuple[ast.pattern, ast.expr]],
@@ -510,6 +574,7 @@ def make_with_let_pattern(
510
574
  **kwargs: Unpack[PosAttributes],
511
575
  ) -> ast.With | ast.AsyncWith:
512
576
  let_pattern_stmt, items = _make_with_let_pattern(
577
+ parser,
513
578
  is_async,
514
579
  decl_type,
515
580
  pattern_subjects,
@@ -526,6 +591,7 @@ def make_with_let_pattern(
526
591
 
527
592
 
528
593
  def make_inline_with_let_pattern(
594
+ parser: Parser,
529
595
  is_async: bool,
530
596
  decl_type: str,
531
597
  pattern_subjects: list[tuple[ast.pattern, ast.expr]],
@@ -551,6 +617,7 @@ def make_inline_with_let_pattern(
551
617
  pass
552
618
  """
553
619
  let_pattern_stmt, items = _make_with_let_pattern(
620
+ parser,
554
621
  is_async,
555
622
  decl_type,
556
623
  pattern_subjects,
@@ -588,6 +655,7 @@ def set_function_literal_def(
588
655
  ):
589
656
  setattr(name, _FUNC_DEF, func_def)
590
657
  setattr(func_def, _IS_FUNCTION_LITERAL, True)
658
+ set_is_internal_name(name)
591
659
 
592
660
 
593
661
  def clear_function_literal_def(name: FunctionLiteral):
@@ -798,6 +866,7 @@ def make_for_stmt(
798
866
 
799
867
 
800
868
  def make_for_let_pattern(
869
+ parser: Parser,
801
870
  decl_type: str,
802
871
  pattern: ast.pattern,
803
872
  iter: ast.expr,
@@ -807,7 +876,9 @@ def make_for_let_pattern(
807
876
  is_async: bool,
808
877
  **kwargs: Unpack[PosAttributes],
809
878
  ):
810
- temp_name, anon_id = make_anonymous_name(ast.Load(), **get_pos_attributes(pattern))
879
+ temp_name, anon_id = parser.make_anonymous_name(
880
+ ast.Load(), **get_pos_attributes(pattern)
881
+ )
811
882
  let_stmt = make_if_let(
812
883
  decl_type,
813
884
  pattern_subjects=[(pattern, temp_name)],
@@ -883,8 +954,26 @@ def clear_is_let_else(node: LetElseAnnotatedNode) -> None:
883
954
  delattr(node, _IS_LET_ELSE)
884
955
 
885
956
 
957
+ def _let_pattern_check(
958
+ parser: Parser,
959
+ decl_type_str: str,
960
+ pattern_subjects: list[tuple[ast.pattern, ast.expr]],
961
+ start_pos: tuple[int, int],
962
+ end_pos: tuple[int, int],
963
+ ) -> bool:
964
+ if decl_type_str != "let":
965
+ error = parser.build_syntax_error(
966
+ "declaration pattern must be 'let' declaration", start_pos, end_pos
967
+ )
968
+ if len(pattern_subjects) == 0:
969
+ parser.build_syntax_error(
970
+ "declaration pattern must have at least one pattern", start_pos, end_pos
971
+ )
972
+ return True
973
+
974
+
886
975
  def make_if_let(
887
- decl_type: str,
976
+ decl_type: TokenInfo | str,
888
977
  pattern_subjects: list[tuple[ast.pattern, ast.expr]],
889
978
  cond: ast.expr | None,
890
979
  body: list[ast.stmt],
@@ -892,15 +981,13 @@ def make_if_let(
892
981
  is_let_else: bool,
893
982
  **kwargs: Unpack[PosAttributes],
894
983
  ) -> ast.stmt:
895
- if len(pattern_subjects) == 0:
896
- raise SyntaxError("if let must have at least one pattern")
897
- else:
898
- return set_is_let_else(
899
- _make_if_let_multiple(
900
- decl_type, pattern_subjects, cond, body, orelse, is_let_else, **kwargs
901
- ),
902
- is_let_else,
903
- )
984
+ decl_type_str = decl_type.string if isinstance(decl_type, TokenInfo) else decl_type
985
+ return set_is_let_else(
986
+ _make_if_let_multiple(
987
+ decl_type_str, pattern_subjects, cond, body, orelse, is_let_else, **kwargs
988
+ ),
989
+ is_let_else,
990
+ )
904
991
 
905
992
 
906
993
  def _make_if_let_single_case(
@@ -922,19 +1009,18 @@ def _make_if_let_single_case(
922
1009
  # Variable capture pattern, e.g. `let x = ...` without condition.
923
1010
  # In this case, the condition is None check.
924
1011
  return set_is_let_else(
925
- ast.match_case(
1012
+ make_match_case(
926
1013
  pattern=pattern,
927
1014
  guard=_make_none_check(pattern.name, get_pos_attributes(pattern)),
928
1015
  body=body,
1016
+ **get_pos_attributes(pattern),
929
1017
  ),
930
1018
  is_let_else,
931
1019
  )
932
1020
  else:
933
1021
  return set_is_let_else(
934
- ast.match_case(
935
- pattern=pattern,
936
- guard=cond,
937
- body=body,
1022
+ make_match_case(
1023
+ pattern=pattern, guard=cond, body=body, **get_pos_attributes(pattern)
938
1024
  ),
939
1025
  is_let_else,
940
1026
  )
@@ -1073,10 +1159,7 @@ def make_while_let(
1073
1159
  orelse: list[ast.stmt] | None,
1074
1160
  **kwargs: Unpack[PosAttributes],
1075
1161
  ) -> ast.While:
1076
- if len(pattern_subjects) == 0:
1077
- raise SyntaxError("if let must have at least one pattern")
1078
- else:
1079
- return _make_while_let(pattern_subjects, cond, body, orelse, **kwargs)
1162
+ return _make_while_let(pattern_subjects, cond, body, orelse, **kwargs)
1080
1163
 
1081
1164
 
1082
1165
  def _make_while_let(
@@ -1143,10 +1226,65 @@ def is_static(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool:
1143
1226
  return getattr(node, IS_STATIC, False)
1144
1227
 
1145
1228
 
1229
+ _DEFINED_NAME = "_typh_defined_name"
1230
+ DefinesName = (
1231
+ ast.FunctionDef
1232
+ | ast.AsyncFunctionDef
1233
+ | ast.ClassDef
1234
+ | ast.alias
1235
+ | ast.Attribute
1236
+ | ast.arg
1237
+ )
1238
+
1239
+
1240
+ def get_defined_name(node: DefinesName) -> ast.Name | None:
1241
+ return getattr(node, _DEFINED_NAME, None)
1242
+
1243
+
1244
+ def set_defined_name(
1245
+ node: DefinesName,
1246
+ name: ast.Name,
1247
+ ):
1248
+ setattr(node, _DEFINED_NAME, name)
1249
+
1250
+
1251
+ def maybe_copy_defined_name[T: ast.AST](
1252
+ from_node: T,
1253
+ to_node: T,
1254
+ ) -> T:
1255
+ if not isinstance(from_node, DefinesName) or not isinstance(to_node, DefinesName):
1256
+ return to_node
1257
+ name = get_defined_name(from_node)
1258
+ if name is not None:
1259
+ set_defined_name(to_node, name)
1260
+ return to_node
1261
+
1262
+
1263
+ def set_defined_name_token(
1264
+ node: DefinesName, name: TokenInfo | ast.Name, ctx: ast.expr_context = ast.Store()
1265
+ ):
1266
+ if isinstance(name, TokenInfo):
1267
+ name = ast.Name(
1268
+ id=name.string,
1269
+ lineno=name.start[0],
1270
+ col_offset=name.start[1],
1271
+ end_lineno=name.end[0],
1272
+ end_col_offset=name.end[1],
1273
+ ctx=ctx,
1274
+ )
1275
+ setattr(node, _DEFINED_NAME, name)
1276
+ return node
1277
+
1278
+
1279
+ def clear_defined_name(node: DefinesName):
1280
+ if hasattr(node, _DEFINED_NAME):
1281
+ delattr(node, _DEFINED_NAME)
1282
+
1283
+
1146
1284
  def make_function_def(
1147
1285
  is_async: bool,
1148
1286
  is_static: bool,
1149
- name: str,
1287
+ name: TokenInfo | str,
1150
1288
  args: ast.arguments,
1151
1289
  returns: ast.expr | None,
1152
1290
  body: list[ast.stmt],
@@ -1156,7 +1294,7 @@ def make_function_def(
1156
1294
  ) -> ast.FunctionDef | ast.AsyncFunctionDef:
1157
1295
  if is_async:
1158
1296
  result = ast.AsyncFunctionDef(
1159
- name=name,
1297
+ name=name.string if isinstance(name, TokenInfo) else name,
1160
1298
  args=args,
1161
1299
  returns=returns,
1162
1300
  body=body,
@@ -1166,7 +1304,7 @@ def make_function_def(
1166
1304
  )
1167
1305
  else:
1168
1306
  result = ast.FunctionDef(
1169
- name=name,
1307
+ name=name.string if isinstance(name, TokenInfo) else name,
1170
1308
  args=args,
1171
1309
  returns=returns,
1172
1310
  body=body,
@@ -1175,6 +1313,120 @@ def make_function_def(
1175
1313
  **kwargs,
1176
1314
  )
1177
1315
  set_is_static(result, is_static)
1316
+ if isinstance(name, TokenInfo):
1317
+ set_defined_name_token(result, name)
1318
+ return result
1319
+
1320
+
1321
+ def make_class_def(
1322
+ name: TokenInfo | str,
1323
+ bases: list[ast.expr],
1324
+ keywords: list[ast.keyword],
1325
+ body: list[ast.stmt],
1326
+ decorator_list: list[ast.expr],
1327
+ type_params: list[ast.type_param],
1328
+ **kwargs: Unpack[PosAttributes],
1329
+ ) -> ast.ClassDef:
1330
+ name_str = name.string if isinstance(name, TokenInfo) else name
1331
+ result = ast.ClassDef(
1332
+ name=name_str,
1333
+ bases=bases,
1334
+ keywords=keywords,
1335
+ body=body,
1336
+ decorator_list=decorator_list,
1337
+ type_params=type_params,
1338
+ **kwargs,
1339
+ )
1340
+ set_defined_name_token(
1341
+ result,
1342
+ name
1343
+ if isinstance(name, TokenInfo)
1344
+ else ast.Name(id=name, ctx=ast.Store(), **kwargs),
1345
+ )
1346
+ return result
1347
+
1348
+
1349
+ def make_alias(
1350
+ name: list[TokenInfo],
1351
+ asname: TokenInfo | None,
1352
+ **kwargs: Unpack[PosAttributes],
1353
+ ) -> ast.alias:
1354
+ result = ast.alias(
1355
+ name=".".join(n.string for n in name),
1356
+ asname=asname.string if asname else None,
1357
+ **kwargs,
1358
+ )
1359
+ if asname is not None:
1360
+ set_defined_name_token(result, asname)
1361
+ else:
1362
+ set_defined_name_token(result, name[-1])
1363
+ return result
1364
+
1365
+
1366
+ def make_attribute(
1367
+ value: ast.expr,
1368
+ attr: TokenInfo,
1369
+ ctx: ast.expr_context,
1370
+ **kwargs: Unpack[PosAttributes],
1371
+ ):
1372
+ return set_defined_name_token(
1373
+ ast.Attribute(
1374
+ value=value,
1375
+ attr=attr.string,
1376
+ ctx=ctx,
1377
+ **kwargs,
1378
+ ),
1379
+ attr,
1380
+ ctx,
1381
+ )
1382
+
1383
+
1384
+ _IMPORT_FROM_NAMES = "_typh_import_from_names"
1385
+
1386
+
1387
+ def get_import_from_names(node: ast.ImportFrom) -> list[ast.Name]:
1388
+ return getattr(node, _IMPORT_FROM_NAMES, [])
1389
+
1390
+
1391
+ def set_import_from_names(
1392
+ node: ast.ImportFrom,
1393
+ names: list[ast.Name],
1394
+ ):
1395
+ setattr(node, _IMPORT_FROM_NAMES, names)
1396
+
1397
+
1398
+ def clear_import_from_names(node: ast.ImportFrom):
1399
+ if hasattr(node, _IMPORT_FROM_NAMES):
1400
+ delattr(node, _IMPORT_FROM_NAMES)
1401
+
1402
+
1403
+ # Used only the case module part exists.
1404
+ def make_import_from(
1405
+ module: list[TokenInfo] | None,
1406
+ names: list[ast.alias],
1407
+ level: int,
1408
+ **kwargs: Unpack[PosAttributes],
1409
+ ) -> ast.ImportFrom:
1410
+ mod_name = ".".join(n.string for n in module) if module else None
1411
+ result = ast.ImportFrom(
1412
+ module=mod_name,
1413
+ names=names,
1414
+ level=level,
1415
+ **kwargs,
1416
+ )
1417
+ if module:
1418
+ import_names = [
1419
+ ast.Name(
1420
+ id=n.string,
1421
+ lineno=n.start[0],
1422
+ col_offset=n.start[1],
1423
+ end_lineno=n.end[0],
1424
+ end_col_offset=n.end[1],
1425
+ ctx=ast.Load(),
1426
+ )
1427
+ for n in module
1428
+ ]
1429
+ set_import_from_names(result, import_names)
1178
1430
  return result
1179
1431
 
1180
1432
 
@@ -1196,6 +1448,7 @@ def set_control_comprehension_def(
1196
1448
  node: ControlComprehension, func_def: ast.FunctionDef | ast.AsyncFunctionDef
1197
1449
  ):
1198
1450
  setattr(node, _CONTROL_COMPREHENSION, func_def)
1451
+ set_is_internal_name(node)
1199
1452
 
1200
1453
 
1201
1454
  def clear_is_control_comprehension(node: ControlComprehension) -> None:
@@ -1564,7 +1817,7 @@ def make_match_comp(
1564
1817
  ast.Match(
1565
1818
  subject=subject,
1566
1819
  cases=[
1567
- ast.match_case(
1820
+ make_match_case(
1568
1821
  pattern=get_case_comp_case(case)[0],
1569
1822
  guard=get_case_comp_case(case)[1],
1570
1823
  body=[
@@ -1573,6 +1826,7 @@ def make_match_comp(
1573
1826
  **get_pos_attributes(case),
1574
1827
  )
1575
1828
  ],
1829
+ **get_pos_attributes(case),
1576
1830
  )
1577
1831
  for case in cases
1578
1832
  ],
@@ -2089,3 +2343,77 @@ def is_case_irrefutable(case: ast.match_case) -> bool:
2089
2343
  if case.guard:
2090
2344
  return False
2091
2345
  return is_pattern_irrefutable(case.pattern)
2346
+
2347
+
2348
+ def make_match_case(
2349
+ pattern: ast.pattern,
2350
+ guard: ast.expr | None,
2351
+ body: list[ast.stmt],
2352
+ **kwargs: Unpack[PosAttributes],
2353
+ ) -> ast.match_case:
2354
+ node = ast.match_case(pattern=pattern, guard=guard, body=body)
2355
+ # Append position attributes
2356
+ for key, value in kwargs.items():
2357
+ setattr(node, key, value)
2358
+ return node
2359
+
2360
+
2361
+ def make_arg(
2362
+ arg: TokenInfo | None,
2363
+ annotation: ast.expr | None,
2364
+ **kwargs: Unpack[PosAttributes],
2365
+ ):
2366
+ """Build a function definition argument."""
2367
+ node = ast.arg(
2368
+ arg=arg.string if arg else "",
2369
+ annotation=annotation,
2370
+ **kwargs,
2371
+ )
2372
+ if arg:
2373
+ set_defined_name_token(node, arg)
2374
+ return node
2375
+
2376
+
2377
+ def make_arguments(
2378
+ pos_only: Optional[List[Tuple[ast.arg, None]]],
2379
+ pos_only_with_default: List[Tuple[ast.arg, Any]],
2380
+ param_no_default: Optional[List[ast.arg]],
2381
+ param_default: Optional[List[Tuple[ast.arg, Any]]],
2382
+ after_star: Optional[
2383
+ Tuple[Optional[ast.arg], List[Tuple[ast.arg, Any]], Optional[ast.arg]]
2384
+ ],
2385
+ **kwargs: Unpack[PosAttributes],
2386
+ ) -> ast.arguments:
2387
+ """Build a function definition arguments."""
2388
+ defaults = (
2389
+ [d for _, d in pos_only_with_default if d is not None]
2390
+ if pos_only_with_default
2391
+ else []
2392
+ )
2393
+ defaults += [d for _, d in param_default if d is not None] if param_default else []
2394
+
2395
+ pos_only = pos_only or pos_only_with_default
2396
+
2397
+ # Because we need to combine pos only with and without default even
2398
+ # the version with no default is a tuple
2399
+ pos_only_args = [p for p, _ in pos_only]
2400
+ params = (param_no_default or []) + (
2401
+ [p for p, _ in param_default] if param_default else []
2402
+ )
2403
+
2404
+ # If after_star is None, make a default tuple
2405
+ after_star = after_star or (None, [], None)
2406
+
2407
+ node = ast.arguments(
2408
+ posonlyargs=pos_only_args,
2409
+ args=params,
2410
+ defaults=defaults,
2411
+ vararg=after_star[0],
2412
+ kwonlyargs=[p for p, _ in after_star[1]],
2413
+ kw_defaults=[d for _, d in after_star[1]],
2414
+ kwarg=after_star[2],
2415
+ )
2416
+ # Append position attributes
2417
+ for key, value in kwargs.items():
2418
+ setattr(node, key, value)
2419
+ return node