Typhon-Language 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- Typhon/Driver/configs.py +14 -0
- Typhon/Driver/debugging.py +148 -5
- Typhon/Driver/diagnostic.py +4 -3
- Typhon/Driver/language_server.py +25 -0
- Typhon/Driver/run.py +1 -1
- Typhon/Driver/translate.py +16 -11
- Typhon/Driver/utils.py +39 -1
- Typhon/Grammar/_typhon_parser.py +2920 -2718
- Typhon/Grammar/parser.py +80 -53
- Typhon/Grammar/parser_helper.py +68 -87
- Typhon/Grammar/syntax_errors.py +41 -20
- Typhon/Grammar/token_factory_custom.py +541 -485
- Typhon/Grammar/tokenizer_custom.py +52 -0
- Typhon/Grammar/typhon_ast.py +754 -76
- Typhon/Grammar/typhon_ast_error.py +438 -0
- Typhon/Grammar/unparse_custom.py +25 -0
- Typhon/LanguageServer/__init__.py +3 -0
- Typhon/LanguageServer/client/__init__.py +42 -0
- Typhon/LanguageServer/client/pyrefly.py +115 -0
- Typhon/LanguageServer/client/pyright.py +173 -0
- Typhon/LanguageServer/semantic_tokens.py +446 -0
- Typhon/LanguageServer/server.py +376 -0
- Typhon/LanguageServer/utils.py +65 -0
- Typhon/SourceMap/ast_match_based_map.py +199 -152
- Typhon/SourceMap/ast_matching.py +102 -87
- Typhon/SourceMap/datatype.py +275 -264
- Typhon/SourceMap/defined_name_retrieve.py +145 -0
- Typhon/Transform/comprehension_to_function.py +2 -5
- Typhon/Transform/const_member_to_final.py +12 -7
- Typhon/Transform/extended_patterns.py +139 -0
- Typhon/Transform/forbidden_statements.py +25 -0
- Typhon/Transform/if_while_let.py +122 -11
- Typhon/Transform/inline_statement_block_capture.py +22 -15
- Typhon/Transform/optional_operators_to_checked.py +14 -6
- Typhon/Transform/placeholder_to_function.py +0 -1
- Typhon/Transform/record_to_dataclass.py +22 -238
- Typhon/Transform/scope_check_rename.py +109 -29
- Typhon/Transform/transform.py +16 -12
- Typhon/Transform/type_abbrev_desugar.py +11 -15
- Typhon/Transform/type_annotation_check_expand.py +2 -2
- Typhon/Transform/utils/__init__.py +0 -0
- Typhon/Transform/utils/imports.py +83 -0
- Typhon/Transform/{utils.py → utils/jump_away.py} +2 -38
- Typhon/Transform/utils/make_class.py +135 -0
- Typhon/Transform/visitor.py +25 -0
- Typhon/Typing/pyrefly.py +145 -0
- Typhon/Typing/pyright.py +141 -144
- Typhon/Typing/result_diagnostic.py +1 -1
- Typhon/__main__.py +15 -1
- {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/METADATA +13 -6
- typhon_language-0.1.4.dist-info/RECORD +65 -0
- {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/WHEEL +1 -1
- typhon_language-0.1.4.dist-info/licenses/LICENSE +201 -0
- typhon_language-0.1.2.dist-info/RECORD +0 -48
- typhon_language-0.1.2.dist-info/licenses/LICENSE +0 -21
- {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/entry_points.txt +0 -0
- {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/top_level.txt +0 -0
Typhon/Grammar/typhon_ast.py
CHANGED
|
@@ -1,10 +1,26 @@
|
|
|
1
1
|
# Ast Extensions for Typhon
|
|
2
|
-
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
from re import A
|
|
4
|
+
from typing import (
|
|
5
|
+
Union,
|
|
6
|
+
Unpack,
|
|
7
|
+
TypedDict,
|
|
8
|
+
Tuple,
|
|
9
|
+
cast,
|
|
10
|
+
TYPE_CHECKING,
|
|
11
|
+
Optional,
|
|
12
|
+
List,
|
|
13
|
+
Any,
|
|
14
|
+
)
|
|
3
15
|
import ast
|
|
4
|
-
from
|
|
16
|
+
from dataclasses import dataclass
|
|
5
17
|
from copy import copy
|
|
18
|
+
from tokenize import TokenInfo
|
|
6
19
|
from ..Driver.debugging import debug_print, debug_verbose_print
|
|
7
20
|
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from .parser_helper import Parser
|
|
23
|
+
|
|
8
24
|
|
|
9
25
|
# Same as ast module's position attributes
|
|
10
26
|
class PosAttributes(TypedDict):
|
|
@@ -14,6 +30,25 @@ class PosAttributes(TypedDict):
|
|
|
14
30
|
end_col_offset: int | None
|
|
15
31
|
|
|
16
32
|
|
|
33
|
+
def unpack_pos_default(pos: PosAttributes) -> Tuple[int, int, int, int]:
|
|
34
|
+
return (
|
|
35
|
+
pos["lineno"],
|
|
36
|
+
pos["col_offset"],
|
|
37
|
+
pos["end_lineno"] or pos["lineno"],
|
|
38
|
+
pos["end_col_offset"] or pos["col_offset"] + 1,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def unpack_pos_tuple(pos: PosAttributes) -> Tuple[Tuple[int, int], Tuple[int, int]]:
|
|
43
|
+
return (
|
|
44
|
+
(pos["lineno"], pos["col_offset"]),
|
|
45
|
+
(
|
|
46
|
+
pos["end_lineno"] or pos["lineno"],
|
|
47
|
+
pos["end_col_offset"] or pos["col_offset"] + 1,
|
|
48
|
+
),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
17
52
|
class PosRange(TypedDict):
|
|
18
53
|
lineno: int
|
|
19
54
|
col_offset: int
|
|
@@ -45,13 +80,21 @@ type PosNode = (
|
|
|
45
80
|
| ast.excepthandler
|
|
46
81
|
| ast.pattern
|
|
47
82
|
| ast.keyword
|
|
83
|
+
| ast.match_case
|
|
48
84
|
)
|
|
49
85
|
|
|
50
86
|
|
|
51
|
-
def get_pos_attributes(node: PosNode) -> PosAttributes:
|
|
87
|
+
def get_pos_attributes(node: PosNode | TokenInfo) -> PosAttributes:
|
|
88
|
+
if isinstance(node, TokenInfo):
|
|
89
|
+
return PosAttributes(
|
|
90
|
+
lineno=node.start[0],
|
|
91
|
+
col_offset=node.start[1],
|
|
92
|
+
end_lineno=node.end[0],
|
|
93
|
+
end_col_offset=node.end[1],
|
|
94
|
+
)
|
|
52
95
|
return PosAttributes(
|
|
53
|
-
lineno=node
|
|
54
|
-
col_offset=node
|
|
96
|
+
lineno=getattr(node, "lineno", 1),
|
|
97
|
+
col_offset=getattr(node, "col_offset", 0),
|
|
55
98
|
end_lineno=getattr(node, "end_lineno", None),
|
|
56
99
|
end_col_offset=getattr(node, "end_col_offset", None),
|
|
57
100
|
)
|
|
@@ -69,14 +112,101 @@ def get_pos_attributes_if_exists(node: ast.AST) -> PosAttributes | None:
|
|
|
69
112
|
|
|
70
113
|
|
|
71
114
|
def get_empty_pos_attributes() -> PosAttributes:
|
|
115
|
+
# Python ast position is 1-based for line, 0-based for column
|
|
72
116
|
return PosAttributes(
|
|
73
|
-
lineno=
|
|
117
|
+
lineno=1,
|
|
74
118
|
col_offset=0,
|
|
75
|
-
end_lineno=
|
|
119
|
+
end_lineno=1,
|
|
76
120
|
end_col_offset=0,
|
|
77
121
|
)
|
|
78
122
|
|
|
79
123
|
|
|
124
|
+
_ANONYMOUS_NAME = "_typh_anonymous"
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def set_anonymous_name_id(node: ast.Name, id: int) -> ast.Name:
|
|
128
|
+
setattr(node, _ANONYMOUS_NAME, id)
|
|
129
|
+
return node
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_anonymous_base_name() -> str:
|
|
133
|
+
return _ANONYMOUS_NAME
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def get_anonymous_name_id(node: ast.Name) -> int | None:
|
|
137
|
+
return getattr(node, _ANONYMOUS_NAME, None)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def clear_anonymous_name(node: ast.Name) -> None:
|
|
141
|
+
if hasattr(node, _ANONYMOUS_NAME):
|
|
142
|
+
delattr(node, _ANONYMOUS_NAME)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def is_anonymous_name(node: ast.Name) -> bool:
|
|
146
|
+
return hasattr(node, _ANONYMOUS_NAME)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def copy_anonymous_name(src: ast.Name, ctx: ast.expr_context) -> ast.Name:
|
|
150
|
+
result = ast.Name(src.id, ctx, **get_pos_attributes(src))
|
|
151
|
+
anon_id = get_anonymous_name_id(src)
|
|
152
|
+
if anon_id is not None:
|
|
153
|
+
set_anonymous_name_id(result, anon_id)
|
|
154
|
+
return result
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
_TYPE_INVALID_NAME = "_typh_invalid_name"
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def get_invalid_name() -> str:
|
|
161
|
+
return _TYPE_INVALID_NAME
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
_TYPE_IGNORE_NODES = "_typh_type_ignore"
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def get_type_ignore_tag(node: ast.AST) -> str | None:
|
|
168
|
+
return getattr(node, _TYPE_IGNORE_NODES, None)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def get_type_ignore_comment(node: ast.AST) -> str | None:
|
|
172
|
+
tag = get_type_ignore_tag(node)
|
|
173
|
+
if tag is not None:
|
|
174
|
+
return f"# type: ignore[{tag}]"
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def set_type_ignore_node(node: ast.AST, tag: str) -> ast.AST:
|
|
179
|
+
setattr(node, _TYPE_IGNORE_NODES, tag)
|
|
180
|
+
return node
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def is_type_ignore_node(node: ast.AST) -> bool:
|
|
184
|
+
return hasattr(node, _TYPE_IGNORE_NODES)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def clear_type_ignore_node(node: ast.AST) -> None:
|
|
188
|
+
if hasattr(node, _TYPE_IGNORE_NODES):
|
|
189
|
+
delattr(node, _TYPE_IGNORE_NODES)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
# The name is internal when it has no counterpart in input typhon source code.
|
|
193
|
+
_INTERNAL_NAME = "_typh_internal_name"
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def is_internal_name(name: ast.Name) -> bool:
|
|
197
|
+
return getattr(name, _INTERNAL_NAME, False)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def set_is_internal_name(name: ast.Name, is_internal: bool = True) -> ast.Name:
|
|
201
|
+
setattr(name, _INTERNAL_NAME, is_internal)
|
|
202
|
+
return name
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def clear_internal_name(name: ast.Name) -> None:
|
|
206
|
+
if hasattr(name, _INTERNAL_NAME):
|
|
207
|
+
delattr(name, _INTERNAL_NAME)
|
|
208
|
+
|
|
209
|
+
|
|
80
210
|
# Normal assignments, let assignments for variable declarations,
|
|
81
211
|
# and constant assignments for constant definitions.
|
|
82
212
|
# They all are Assign/AnnAssign in Python, we distinguish them by
|
|
@@ -93,7 +223,13 @@ def is_decl_stmt(node: ast.AST) -> bool:
|
|
|
93
223
|
|
|
94
224
|
|
|
95
225
|
type DeclarableStmt = Union[
|
|
96
|
-
ast.Assign,
|
|
226
|
+
ast.Assign,
|
|
227
|
+
ast.AnnAssign,
|
|
228
|
+
ast.withitem,
|
|
229
|
+
ast.For,
|
|
230
|
+
ast.AsyncFor,
|
|
231
|
+
ast.comprehension,
|
|
232
|
+
ast.pattern,
|
|
97
233
|
]
|
|
98
234
|
|
|
99
235
|
_IS_VAR = "_typh_is_var"
|
|
@@ -399,6 +535,106 @@ def declaration_as_withitem(assign: Union[ast.Assign, ast.AnnAssign]) -> ast.wit
|
|
|
399
535
|
return item
|
|
400
536
|
|
|
401
537
|
|
|
538
|
+
def _make_with_let_pattern(
|
|
539
|
+
parser: Parser,
|
|
540
|
+
is_async: bool,
|
|
541
|
+
decl_type: str,
|
|
542
|
+
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
543
|
+
body: list[ast.stmt],
|
|
544
|
+
**kwargs: Unpack[PosAttributes],
|
|
545
|
+
) -> tuple[ast.stmt, list[ast.withitem]]:
|
|
546
|
+
items: list[ast.withitem] = []
|
|
547
|
+
pattern_vars: list[tuple[ast.pattern, ast.expr]] = []
|
|
548
|
+
for pattern, subject in pattern_subjects:
|
|
549
|
+
var, var_id = parser.make_anonymous_name(
|
|
550
|
+
ast.Store(), **get_pos_attributes(pattern)
|
|
551
|
+
)
|
|
552
|
+
item = ast.withitem(context_expr=subject, optional_vars=var)
|
|
553
|
+
_set_is_let_var(item, decl_type)
|
|
554
|
+
items.append(item)
|
|
555
|
+
pattern_vars.append((pattern, copy_anonymous_name(var, ast.Load())))
|
|
556
|
+
let_pattern_stmt = make_if_let(
|
|
557
|
+
decl_type,
|
|
558
|
+
pattern_subjects=pattern_vars,
|
|
559
|
+
cond=None,
|
|
560
|
+
body=body,
|
|
561
|
+
orelse=None,
|
|
562
|
+
is_let_else=True,
|
|
563
|
+
**kwargs,
|
|
564
|
+
)
|
|
565
|
+
return let_pattern_stmt, items
|
|
566
|
+
|
|
567
|
+
|
|
568
|
+
def make_with_let_pattern(
|
|
569
|
+
parser: Parser,
|
|
570
|
+
is_async: bool,
|
|
571
|
+
decl_type: str,
|
|
572
|
+
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
573
|
+
body: list[ast.stmt],
|
|
574
|
+
**kwargs: Unpack[PosAttributes],
|
|
575
|
+
) -> ast.With | ast.AsyncWith:
|
|
576
|
+
let_pattern_stmt, items = _make_with_let_pattern(
|
|
577
|
+
parser,
|
|
578
|
+
is_async,
|
|
579
|
+
decl_type,
|
|
580
|
+
pattern_subjects,
|
|
581
|
+
body,
|
|
582
|
+
**kwargs,
|
|
583
|
+
)
|
|
584
|
+
return make_with_stmt(
|
|
585
|
+
is_async=is_async,
|
|
586
|
+
items=items,
|
|
587
|
+
body=[let_pattern_stmt],
|
|
588
|
+
is_inline=False,
|
|
589
|
+
**kwargs,
|
|
590
|
+
)
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def make_inline_with_let_pattern(
|
|
594
|
+
parser: Parser,
|
|
595
|
+
is_async: bool,
|
|
596
|
+
decl_type: str,
|
|
597
|
+
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
598
|
+
**kwargs: Unpack[PosAttributes],
|
|
599
|
+
) -> list[ast.stmt]:
|
|
600
|
+
"""
|
|
601
|
+
with <expr1> as <temp_name1>, ...: # inline with_let_pattern
|
|
602
|
+
if True: # Sequentially expanded. Captured later.
|
|
603
|
+
match <temp_name1>:
|
|
604
|
+
case <pattern1>:
|
|
605
|
+
match <temp_name2>:
|
|
606
|
+
case <pattern2>:
|
|
607
|
+
...
|
|
608
|
+
match <temp_nameN>:
|
|
609
|
+
case <patternN> if <cond>:
|
|
610
|
+
<body>
|
|
611
|
+
case _:
|
|
612
|
+
pass
|
|
613
|
+
...
|
|
614
|
+
case _:
|
|
615
|
+
pass
|
|
616
|
+
case _:
|
|
617
|
+
pass
|
|
618
|
+
"""
|
|
619
|
+
let_pattern_stmt, items = _make_with_let_pattern(
|
|
620
|
+
parser,
|
|
621
|
+
is_async,
|
|
622
|
+
decl_type,
|
|
623
|
+
pattern_subjects,
|
|
624
|
+
body=[],
|
|
625
|
+
**kwargs,
|
|
626
|
+
)
|
|
627
|
+
inline_with = make_with_stmt(
|
|
628
|
+
is_async=is_async,
|
|
629
|
+
items=items,
|
|
630
|
+
body=[],
|
|
631
|
+
is_inline=True,
|
|
632
|
+
**kwargs,
|
|
633
|
+
)
|
|
634
|
+
# inline_with will capture the let pattern stmt (and the following stmts) in its body later.
|
|
635
|
+
return [inline_with, let_pattern_stmt]
|
|
636
|
+
|
|
637
|
+
|
|
402
638
|
# Use Name as a function literal. Replaced to name of FunctionDef.
|
|
403
639
|
type FunctionLiteral = ast.Name
|
|
404
640
|
|
|
@@ -419,6 +655,7 @@ def set_function_literal_def(
|
|
|
419
655
|
):
|
|
420
656
|
setattr(name, _FUNC_DEF, func_def)
|
|
421
657
|
setattr(func_def, _IS_FUNCTION_LITERAL, True)
|
|
658
|
+
set_is_internal_name(name)
|
|
422
659
|
|
|
423
660
|
|
|
424
661
|
def clear_function_literal_def(name: FunctionLiteral):
|
|
@@ -628,6 +865,43 @@ def make_for_stmt(
|
|
|
628
865
|
return result
|
|
629
866
|
|
|
630
867
|
|
|
868
|
+
def make_for_let_pattern(
|
|
869
|
+
parser: Parser,
|
|
870
|
+
decl_type: str,
|
|
871
|
+
pattern: ast.pattern,
|
|
872
|
+
iter: ast.expr,
|
|
873
|
+
body: list[ast.stmt],
|
|
874
|
+
orelse: list[ast.stmt] | None,
|
|
875
|
+
type_comment: str | None,
|
|
876
|
+
is_async: bool,
|
|
877
|
+
**kwargs: Unpack[PosAttributes],
|
|
878
|
+
):
|
|
879
|
+
temp_name, anon_id = parser.make_anonymous_name(
|
|
880
|
+
ast.Load(), **get_pos_attributes(pattern)
|
|
881
|
+
)
|
|
882
|
+
let_stmt = make_if_let(
|
|
883
|
+
decl_type,
|
|
884
|
+
pattern_subjects=[(pattern, temp_name)],
|
|
885
|
+
cond=None,
|
|
886
|
+
body=body,
|
|
887
|
+
orelse=None,
|
|
888
|
+
is_let_else=True,
|
|
889
|
+
**kwargs,
|
|
890
|
+
)
|
|
891
|
+
temp_name_store = copy_anonymous_name(temp_name, ast.Store())
|
|
892
|
+
return make_for_stmt(
|
|
893
|
+
decl_type=decl_type,
|
|
894
|
+
target=temp_name_store,
|
|
895
|
+
type_annotation=None,
|
|
896
|
+
iter=iter,
|
|
897
|
+
body=[let_stmt],
|
|
898
|
+
orelse=orelse,
|
|
899
|
+
type_comment=type_comment,
|
|
900
|
+
is_async=is_async,
|
|
901
|
+
**kwargs,
|
|
902
|
+
)
|
|
903
|
+
|
|
904
|
+
|
|
631
905
|
def _make_none_check(name: str, pos: PosAttributes) -> ast.Compare:
|
|
632
906
|
return ast.Compare(
|
|
633
907
|
left=ast.Name(id=name, ctx=ast.Load(), **pos),
|
|
@@ -641,12 +915,18 @@ _LET_PATTERN_BODY = "_typh_multiple_let_pattern_body"
|
|
|
641
915
|
_IS_LET_ELSE = "_typh_is_let_else"
|
|
642
916
|
|
|
643
917
|
|
|
644
|
-
|
|
918
|
+
@dataclass
|
|
919
|
+
class LetPatternInfo:
|
|
920
|
+
body: list[ast.stmt]
|
|
921
|
+
is_all_pattern_irrefutable: bool
|
|
922
|
+
|
|
923
|
+
|
|
924
|
+
def get_let_pattern_body(node: ast.While | ast.If) -> LetPatternInfo | None:
|
|
645
925
|
return getattr(node, _LET_PATTERN_BODY, None)
|
|
646
926
|
|
|
647
927
|
|
|
648
928
|
def set_let_pattern_body(
|
|
649
|
-
node: ast.While | ast.If, body:
|
|
929
|
+
node: ast.While | ast.If, body: LetPatternInfo
|
|
650
930
|
) -> ast.While | ast.If:
|
|
651
931
|
setattr(node, _LET_PATTERN_BODY, body)
|
|
652
932
|
return node
|
|
@@ -657,21 +937,43 @@ def clear_let_pattern_body(node: ast.While | ast.If) -> None:
|
|
|
657
937
|
delattr(node, _LET_PATTERN_BODY)
|
|
658
938
|
|
|
659
939
|
|
|
660
|
-
|
|
940
|
+
type LetElseAnnotatedNode = ast.If | ast.Match | ast.match_case
|
|
941
|
+
|
|
942
|
+
|
|
943
|
+
def is_let_else(node: LetElseAnnotatedNode) -> bool:
|
|
661
944
|
return getattr(node, _IS_LET_ELSE, False)
|
|
662
945
|
|
|
663
946
|
|
|
664
|
-
def set_is_let_else(node:
|
|
947
|
+
def set_is_let_else[T: LetElseAnnotatedNode](node: T, is_let_else: bool) -> T:
|
|
665
948
|
setattr(node, _IS_LET_ELSE, is_let_else)
|
|
666
949
|
return node
|
|
667
950
|
|
|
668
951
|
|
|
669
|
-
def clear_is_let_else(node:
|
|
952
|
+
def clear_is_let_else(node: LetElseAnnotatedNode) -> None:
|
|
670
953
|
if hasattr(node, _IS_LET_ELSE):
|
|
671
954
|
delattr(node, _IS_LET_ELSE)
|
|
672
955
|
|
|
673
956
|
|
|
957
|
+
def _let_pattern_check(
|
|
958
|
+
parser: Parser,
|
|
959
|
+
decl_type_str: str,
|
|
960
|
+
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
961
|
+
start_pos: tuple[int, int],
|
|
962
|
+
end_pos: tuple[int, int],
|
|
963
|
+
) -> bool:
|
|
964
|
+
if decl_type_str != "let":
|
|
965
|
+
error = parser.build_syntax_error(
|
|
966
|
+
"declaration pattern must be 'let' declaration", start_pos, end_pos
|
|
967
|
+
)
|
|
968
|
+
if len(pattern_subjects) == 0:
|
|
969
|
+
parser.build_syntax_error(
|
|
970
|
+
"declaration pattern must have at least one pattern", start_pos, end_pos
|
|
971
|
+
)
|
|
972
|
+
return True
|
|
973
|
+
|
|
974
|
+
|
|
674
975
|
def make_if_let(
|
|
976
|
+
decl_type: TokenInfo | str,
|
|
675
977
|
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
676
978
|
cond: ast.expr | None,
|
|
677
979
|
body: list[ast.stmt],
|
|
@@ -679,27 +981,24 @@ def make_if_let(
|
|
|
679
981
|
is_let_else: bool,
|
|
680
982
|
**kwargs: Unpack[PosAttributes],
|
|
681
983
|
) -> ast.stmt:
|
|
682
|
-
if
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
# )
|
|
690
|
-
else:
|
|
691
|
-
return set_is_let_else(
|
|
692
|
-
_make_if_let_multiple(pattern_subjects, cond, body, orelse, **kwargs),
|
|
693
|
-
is_let_else,
|
|
694
|
-
)
|
|
984
|
+
decl_type_str = decl_type.string if isinstance(decl_type, TokenInfo) else decl_type
|
|
985
|
+
return set_is_let_else(
|
|
986
|
+
_make_if_let_multiple(
|
|
987
|
+
decl_type_str, pattern_subjects, cond, body, orelse, is_let_else, **kwargs
|
|
988
|
+
),
|
|
989
|
+
is_let_else,
|
|
990
|
+
)
|
|
695
991
|
|
|
696
992
|
|
|
697
993
|
def _make_if_let_single_case(
|
|
994
|
+
decl_type: str,
|
|
698
995
|
pattern: ast.pattern,
|
|
699
996
|
cond: ast.expr | None,
|
|
700
997
|
body: list[ast.stmt],
|
|
701
998
|
make_none_check: bool,
|
|
999
|
+
is_let_else: bool,
|
|
702
1000
|
) -> ast.match_case:
|
|
1001
|
+
_set_is_let_var(pattern, decl_type)
|
|
703
1002
|
if (
|
|
704
1003
|
isinstance(pattern, ast.MatchAs)
|
|
705
1004
|
and pattern.pattern is None
|
|
@@ -709,37 +1008,65 @@ def _make_if_let_single_case(
|
|
|
709
1008
|
):
|
|
710
1009
|
# Variable capture pattern, e.g. `let x = ...` without condition.
|
|
711
1010
|
# In this case, the condition is None check.
|
|
712
|
-
return
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
1011
|
+
return set_is_let_else(
|
|
1012
|
+
make_match_case(
|
|
1013
|
+
pattern=pattern,
|
|
1014
|
+
guard=_make_none_check(pattern.name, get_pos_attributes(pattern)),
|
|
1015
|
+
body=body,
|
|
1016
|
+
**get_pos_attributes(pattern),
|
|
1017
|
+
),
|
|
1018
|
+
is_let_else,
|
|
716
1019
|
)
|
|
717
1020
|
else:
|
|
718
|
-
return
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
1021
|
+
return set_is_let_else(
|
|
1022
|
+
make_match_case(
|
|
1023
|
+
pattern=pattern, guard=cond, body=body, **get_pos_attributes(pattern)
|
|
1024
|
+
),
|
|
1025
|
+
is_let_else,
|
|
722
1026
|
)
|
|
723
1027
|
|
|
724
1028
|
|
|
725
1029
|
def _make_nested_match_for_multiple_let(
|
|
1030
|
+
decl_type: str,
|
|
726
1031
|
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
727
1032
|
cond: ast.expr | None,
|
|
728
1033
|
body: list[ast.stmt],
|
|
1034
|
+
type_error_on_failure: bool,
|
|
1035
|
+
is_let_else: bool,
|
|
729
1036
|
**kwargs: Unpack[PosAttributes],
|
|
730
1037
|
) -> list[ast.stmt]:
|
|
731
1038
|
# Build nested match statements from inside out.
|
|
732
1039
|
for pattern, subject in reversed(pattern_subjects):
|
|
1040
|
+
# Add a wildcard case to handle non-matching case to avoid linter error.
|
|
1041
|
+
default_case = ast.match_case( # case _: pass
|
|
1042
|
+
pattern=ast.MatchAs(
|
|
1043
|
+
name=None, pattern=None, **pos_attribute_to_range(kwargs)
|
|
1044
|
+
),
|
|
1045
|
+
guard=None,
|
|
1046
|
+
body=[
|
|
1047
|
+
ast.Raise(
|
|
1048
|
+
ast.Name(id="TypeError", ctx=ast.Load(), **kwargs),
|
|
1049
|
+
None,
|
|
1050
|
+
**kwargs,
|
|
1051
|
+
)
|
|
1052
|
+
if type_error_on_failure
|
|
1053
|
+
else ast.Pass(**kwargs)
|
|
1054
|
+
],
|
|
1055
|
+
)
|
|
1056
|
+
if type_error_on_failure:
|
|
1057
|
+
# Ignore unreachable clause error for this default case, because this is recovery for the
|
|
1058
|
+
# case type check can not detect a pattern mismatch (e.g. due to cast).
|
|
1059
|
+
set_type_ignore_node(default_case, "all")
|
|
733
1060
|
cases = [
|
|
734
|
-
_make_if_let_single_case(
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
body=[ast.Pass(**kwargs)],
|
|
1061
|
+
_make_if_let_single_case(
|
|
1062
|
+
decl_type,
|
|
1063
|
+
pattern,
|
|
1064
|
+
cond,
|
|
1065
|
+
body,
|
|
1066
|
+
make_none_check=cond is None,
|
|
1067
|
+
is_let_else=is_let_else,
|
|
742
1068
|
),
|
|
1069
|
+
default_case,
|
|
743
1070
|
]
|
|
744
1071
|
nested_match: ast.stmt = ast.Match(
|
|
745
1072
|
subject=subject,
|
|
@@ -753,10 +1080,12 @@ def _make_nested_match_for_multiple_let(
|
|
|
753
1080
|
|
|
754
1081
|
# Multiple patterns are combined into nested match statements.
|
|
755
1082
|
def _make_if_let_multiple(
|
|
1083
|
+
decl_type: str,
|
|
756
1084
|
pattern_subjects: list[tuple[ast.pattern, ast.expr]],
|
|
757
1085
|
cond: ast.expr | None,
|
|
758
1086
|
body: list[ast.stmt],
|
|
759
1087
|
orelse: list[ast.stmt] | None,
|
|
1088
|
+
is_let_else: bool,
|
|
760
1089
|
**kwargs: Unpack[PosAttributes],
|
|
761
1090
|
) -> ast.If:
|
|
762
1091
|
"""
|
|
@@ -777,19 +1106,49 @@ def _make_if_let_multiple(
|
|
|
777
1106
|
match <subjectN>:
|
|
778
1107
|
case <patternN> if <cond>:
|
|
779
1108
|
<body>
|
|
1109
|
+
case _:
|
|
1110
|
+
pass
|
|
1111
|
+
...
|
|
1112
|
+
case _:
|
|
1113
|
+
pass
|
|
1114
|
+
case _:
|
|
1115
|
+
pass
|
|
780
1116
|
else:
|
|
781
1117
|
<orelse>
|
|
782
1118
|
"""
|
|
783
1119
|
# Build nested match statements from inside out.
|
|
1120
|
+
is_all_pattern_irrefutable = all(
|
|
1121
|
+
is_pattern_irrefutable(pat) for pat, _ in pattern_subjects
|
|
1122
|
+
)
|
|
1123
|
+
is_all_pattern_truly_irrefutable = all(
|
|
1124
|
+
is_pattern_irrefutable(pat, assume_type_checked=False)
|
|
1125
|
+
for pat, _ in pattern_subjects
|
|
1126
|
+
)
|
|
784
1127
|
result = ast.If(
|
|
785
1128
|
test=ast.Constant(value=True, **kwargs),
|
|
786
1129
|
body=_make_nested_match_for_multiple_let(
|
|
787
|
-
|
|
1130
|
+
decl_type,
|
|
1131
|
+
pattern_subjects,
|
|
1132
|
+
cond,
|
|
1133
|
+
body,
|
|
1134
|
+
type_error_on_failure=(
|
|
1135
|
+
is_let_else
|
|
1136
|
+
and (orelse is None)
|
|
1137
|
+
and (not is_all_pattern_truly_irrefutable)
|
|
1138
|
+
),
|
|
1139
|
+
is_let_else=is_let_else,
|
|
1140
|
+
**kwargs,
|
|
788
1141
|
),
|
|
789
1142
|
orelse=orelse or [],
|
|
790
1143
|
**kwargs,
|
|
791
1144
|
)
|
|
792
|
-
set_let_pattern_body(
|
|
1145
|
+
set_let_pattern_body(
|
|
1146
|
+
result,
|
|
1147
|
+
LetPatternInfo(
|
|
1148
|
+
body=body,
|
|
1149
|
+
is_all_pattern_irrefutable=is_all_pattern_irrefutable,
|
|
1150
|
+
),
|
|
1151
|
+
)
|
|
793
1152
|
return result
|
|
794
1153
|
|
|
795
1154
|
|
|
@@ -800,10 +1159,7 @@ def make_while_let(
|
|
|
800
1159
|
orelse: list[ast.stmt] | None,
|
|
801
1160
|
**kwargs: Unpack[PosAttributes],
|
|
802
1161
|
) -> ast.While:
|
|
803
|
-
|
|
804
|
-
raise SyntaxError("if let must have at least one pattern")
|
|
805
|
-
else:
|
|
806
|
-
return _make_while_let(pattern_subjects, cond, body, orelse, **kwargs)
|
|
1162
|
+
return _make_while_let(pattern_subjects, cond, body, orelse, **kwargs)
|
|
807
1163
|
|
|
808
1164
|
|
|
809
1165
|
def _make_while_let(
|
|
@@ -842,12 +1198,20 @@ def _make_while_let(
|
|
|
842
1198
|
result = ast.While(
|
|
843
1199
|
test=ast.Constant(value=True, **kwargs),
|
|
844
1200
|
body=_make_nested_match_for_multiple_let(
|
|
845
|
-
pattern_subjects, cond, body, **kwargs
|
|
1201
|
+
"let", pattern_subjects, cond, body, False, False, **kwargs
|
|
846
1202
|
),
|
|
847
1203
|
orelse=orelse or [],
|
|
848
1204
|
**kwargs,
|
|
849
1205
|
)
|
|
850
|
-
set_let_pattern_body(
|
|
1206
|
+
set_let_pattern_body(
|
|
1207
|
+
result,
|
|
1208
|
+
LetPatternInfo(
|
|
1209
|
+
body=body,
|
|
1210
|
+
is_all_pattern_irrefutable=all(
|
|
1211
|
+
is_pattern_irrefutable(pat) for pat, _ in pattern_subjects
|
|
1212
|
+
),
|
|
1213
|
+
),
|
|
1214
|
+
)
|
|
851
1215
|
return result
|
|
852
1216
|
|
|
853
1217
|
|
|
@@ -862,10 +1226,65 @@ def is_static(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool:
|
|
|
862
1226
|
return getattr(node, IS_STATIC, False)
|
|
863
1227
|
|
|
864
1228
|
|
|
1229
|
+
_DEFINED_NAME = "_typh_defined_name"
|
|
1230
|
+
DefinesName = (
|
|
1231
|
+
ast.FunctionDef
|
|
1232
|
+
| ast.AsyncFunctionDef
|
|
1233
|
+
| ast.ClassDef
|
|
1234
|
+
| ast.alias
|
|
1235
|
+
| ast.Attribute
|
|
1236
|
+
| ast.arg
|
|
1237
|
+
)
|
|
1238
|
+
|
|
1239
|
+
|
|
1240
|
+
def get_defined_name(node: DefinesName) -> ast.Name | None:
|
|
1241
|
+
return getattr(node, _DEFINED_NAME, None)
|
|
1242
|
+
|
|
1243
|
+
|
|
1244
|
+
def set_defined_name(
|
|
1245
|
+
node: DefinesName,
|
|
1246
|
+
name: ast.Name,
|
|
1247
|
+
):
|
|
1248
|
+
setattr(node, _DEFINED_NAME, name)
|
|
1249
|
+
|
|
1250
|
+
|
|
1251
|
+
def maybe_copy_defined_name[T: ast.AST](
|
|
1252
|
+
from_node: T,
|
|
1253
|
+
to_node: T,
|
|
1254
|
+
) -> T:
|
|
1255
|
+
if not isinstance(from_node, DefinesName) or not isinstance(to_node, DefinesName):
|
|
1256
|
+
return to_node
|
|
1257
|
+
name = get_defined_name(from_node)
|
|
1258
|
+
if name is not None:
|
|
1259
|
+
set_defined_name(to_node, name)
|
|
1260
|
+
return to_node
|
|
1261
|
+
|
|
1262
|
+
|
|
1263
|
+
def set_defined_name_token(
|
|
1264
|
+
node: DefinesName, name: TokenInfo | ast.Name, ctx: ast.expr_context = ast.Store()
|
|
1265
|
+
):
|
|
1266
|
+
if isinstance(name, TokenInfo):
|
|
1267
|
+
name = ast.Name(
|
|
1268
|
+
id=name.string,
|
|
1269
|
+
lineno=name.start[0],
|
|
1270
|
+
col_offset=name.start[1],
|
|
1271
|
+
end_lineno=name.end[0],
|
|
1272
|
+
end_col_offset=name.end[1],
|
|
1273
|
+
ctx=ctx,
|
|
1274
|
+
)
|
|
1275
|
+
setattr(node, _DEFINED_NAME, name)
|
|
1276
|
+
return node
|
|
1277
|
+
|
|
1278
|
+
|
|
1279
|
+
def clear_defined_name(node: DefinesName):
|
|
1280
|
+
if hasattr(node, _DEFINED_NAME):
|
|
1281
|
+
delattr(node, _DEFINED_NAME)
|
|
1282
|
+
|
|
1283
|
+
|
|
865
1284
|
def make_function_def(
|
|
866
1285
|
is_async: bool,
|
|
867
1286
|
is_static: bool,
|
|
868
|
-
name: str,
|
|
1287
|
+
name: TokenInfo | str,
|
|
869
1288
|
args: ast.arguments,
|
|
870
1289
|
returns: ast.expr | None,
|
|
871
1290
|
body: list[ast.stmt],
|
|
@@ -875,7 +1294,7 @@ def make_function_def(
|
|
|
875
1294
|
) -> ast.FunctionDef | ast.AsyncFunctionDef:
|
|
876
1295
|
if is_async:
|
|
877
1296
|
result = ast.AsyncFunctionDef(
|
|
878
|
-
name=name,
|
|
1297
|
+
name=name.string if isinstance(name, TokenInfo) else name,
|
|
879
1298
|
args=args,
|
|
880
1299
|
returns=returns,
|
|
881
1300
|
body=body,
|
|
@@ -885,7 +1304,7 @@ def make_function_def(
|
|
|
885
1304
|
)
|
|
886
1305
|
else:
|
|
887
1306
|
result = ast.FunctionDef(
|
|
888
|
-
name=name,
|
|
1307
|
+
name=name.string if isinstance(name, TokenInfo) else name,
|
|
889
1308
|
args=args,
|
|
890
1309
|
returns=returns,
|
|
891
1310
|
body=body,
|
|
@@ -894,6 +1313,120 @@ def make_function_def(
|
|
|
894
1313
|
**kwargs,
|
|
895
1314
|
)
|
|
896
1315
|
set_is_static(result, is_static)
|
|
1316
|
+
if isinstance(name, TokenInfo):
|
|
1317
|
+
set_defined_name_token(result, name)
|
|
1318
|
+
return result
|
|
1319
|
+
|
|
1320
|
+
|
|
1321
|
+
def make_class_def(
|
|
1322
|
+
name: TokenInfo | str,
|
|
1323
|
+
bases: list[ast.expr],
|
|
1324
|
+
keywords: list[ast.keyword],
|
|
1325
|
+
body: list[ast.stmt],
|
|
1326
|
+
decorator_list: list[ast.expr],
|
|
1327
|
+
type_params: list[ast.type_param],
|
|
1328
|
+
**kwargs: Unpack[PosAttributes],
|
|
1329
|
+
) -> ast.ClassDef:
|
|
1330
|
+
name_str = name.string if isinstance(name, TokenInfo) else name
|
|
1331
|
+
result = ast.ClassDef(
|
|
1332
|
+
name=name_str,
|
|
1333
|
+
bases=bases,
|
|
1334
|
+
keywords=keywords,
|
|
1335
|
+
body=body,
|
|
1336
|
+
decorator_list=decorator_list,
|
|
1337
|
+
type_params=type_params,
|
|
1338
|
+
**kwargs,
|
|
1339
|
+
)
|
|
1340
|
+
set_defined_name_token(
|
|
1341
|
+
result,
|
|
1342
|
+
name
|
|
1343
|
+
if isinstance(name, TokenInfo)
|
|
1344
|
+
else ast.Name(id=name, ctx=ast.Store(), **kwargs),
|
|
1345
|
+
)
|
|
1346
|
+
return result
|
|
1347
|
+
|
|
1348
|
+
|
|
1349
|
+
def make_alias(
|
|
1350
|
+
name: list[TokenInfo],
|
|
1351
|
+
asname: TokenInfo | None,
|
|
1352
|
+
**kwargs: Unpack[PosAttributes],
|
|
1353
|
+
) -> ast.alias:
|
|
1354
|
+
result = ast.alias(
|
|
1355
|
+
name=".".join(n.string for n in name),
|
|
1356
|
+
asname=asname.string if asname else None,
|
|
1357
|
+
**kwargs,
|
|
1358
|
+
)
|
|
1359
|
+
if asname is not None:
|
|
1360
|
+
set_defined_name_token(result, asname)
|
|
1361
|
+
else:
|
|
1362
|
+
set_defined_name_token(result, name[-1])
|
|
1363
|
+
return result
|
|
1364
|
+
|
|
1365
|
+
|
|
1366
|
+
def make_attribute(
|
|
1367
|
+
value: ast.expr,
|
|
1368
|
+
attr: TokenInfo,
|
|
1369
|
+
ctx: ast.expr_context,
|
|
1370
|
+
**kwargs: Unpack[PosAttributes],
|
|
1371
|
+
):
|
|
1372
|
+
return set_defined_name_token(
|
|
1373
|
+
ast.Attribute(
|
|
1374
|
+
value=value,
|
|
1375
|
+
attr=attr.string,
|
|
1376
|
+
ctx=ctx,
|
|
1377
|
+
**kwargs,
|
|
1378
|
+
),
|
|
1379
|
+
attr,
|
|
1380
|
+
ctx,
|
|
1381
|
+
)
|
|
1382
|
+
|
|
1383
|
+
|
|
1384
|
+
_IMPORT_FROM_NAMES = "_typh_import_from_names"
|
|
1385
|
+
|
|
1386
|
+
|
|
1387
|
+
def get_import_from_names(node: ast.ImportFrom) -> list[ast.Name]:
|
|
1388
|
+
return getattr(node, _IMPORT_FROM_NAMES, [])
|
|
1389
|
+
|
|
1390
|
+
|
|
1391
|
+
def set_import_from_names(
|
|
1392
|
+
node: ast.ImportFrom,
|
|
1393
|
+
names: list[ast.Name],
|
|
1394
|
+
):
|
|
1395
|
+
setattr(node, _IMPORT_FROM_NAMES, names)
|
|
1396
|
+
|
|
1397
|
+
|
|
1398
|
+
def clear_import_from_names(node: ast.ImportFrom):
|
|
1399
|
+
if hasattr(node, _IMPORT_FROM_NAMES):
|
|
1400
|
+
delattr(node, _IMPORT_FROM_NAMES)
|
|
1401
|
+
|
|
1402
|
+
|
|
1403
|
+
# Used only the case module part exists.
|
|
1404
|
+
def make_import_from(
|
|
1405
|
+
module: list[TokenInfo] | None,
|
|
1406
|
+
names: list[ast.alias],
|
|
1407
|
+
level: int,
|
|
1408
|
+
**kwargs: Unpack[PosAttributes],
|
|
1409
|
+
) -> ast.ImportFrom:
|
|
1410
|
+
mod_name = ".".join(n.string for n in module) if module else None
|
|
1411
|
+
result = ast.ImportFrom(
|
|
1412
|
+
module=mod_name,
|
|
1413
|
+
names=names,
|
|
1414
|
+
level=level,
|
|
1415
|
+
**kwargs,
|
|
1416
|
+
)
|
|
1417
|
+
if module:
|
|
1418
|
+
import_names = [
|
|
1419
|
+
ast.Name(
|
|
1420
|
+
id=n.string,
|
|
1421
|
+
lineno=n.start[0],
|
|
1422
|
+
col_offset=n.start[1],
|
|
1423
|
+
end_lineno=n.end[0],
|
|
1424
|
+
end_col_offset=n.end[1],
|
|
1425
|
+
ctx=ast.Load(),
|
|
1426
|
+
)
|
|
1427
|
+
for n in module
|
|
1428
|
+
]
|
|
1429
|
+
set_import_from_names(result, import_names)
|
|
897
1430
|
return result
|
|
898
1431
|
|
|
899
1432
|
|
|
@@ -915,6 +1448,7 @@ def set_control_comprehension_def(
|
|
|
915
1448
|
node: ControlComprehension, func_def: ast.FunctionDef | ast.AsyncFunctionDef
|
|
916
1449
|
):
|
|
917
1450
|
setattr(node, _CONTROL_COMPREHENSION, func_def)
|
|
1451
|
+
set_is_internal_name(node)
|
|
918
1452
|
|
|
919
1453
|
|
|
920
1454
|
def clear_is_control_comprehension(node: ControlComprehension) -> None:
|
|
@@ -1283,7 +1817,7 @@ def make_match_comp(
|
|
|
1283
1817
|
ast.Match(
|
|
1284
1818
|
subject=subject,
|
|
1285
1819
|
cases=[
|
|
1286
|
-
|
|
1820
|
+
make_match_case(
|
|
1287
1821
|
pattern=get_case_comp_case(case)[0],
|
|
1288
1822
|
guard=get_case_comp_case(case)[1],
|
|
1289
1823
|
body=[
|
|
@@ -1292,6 +1826,7 @@ def make_match_comp(
|
|
|
1292
1826
|
**get_pos_attributes(case),
|
|
1293
1827
|
)
|
|
1294
1828
|
],
|
|
1829
|
+
**get_pos_attributes(case),
|
|
1295
1830
|
)
|
|
1296
1831
|
for case in cases
|
|
1297
1832
|
],
|
|
@@ -1358,6 +1893,7 @@ def make_if_let_comp(
|
|
|
1358
1893
|
args=_empty_args(),
|
|
1359
1894
|
body=[
|
|
1360
1895
|
make_if_let(
|
|
1896
|
+
"let",
|
|
1361
1897
|
pattern_subjects,
|
|
1362
1898
|
cond,
|
|
1363
1899
|
[ast.Return(value=body, **get_pos_attributes(body))],
|
|
@@ -1563,10 +2099,8 @@ def clear_is_placeholder(node: ast.Name) -> None:
|
|
|
1563
2099
|
|
|
1564
2100
|
_RECORD_LITERAL_FIELDS = "_typh_is_record_literal_fields"
|
|
1565
2101
|
_RECORD_TYPE = "_typh_is_record_literal_type"
|
|
1566
|
-
_RECORD_PATTERN = "_typh_is_record_pattern"
|
|
1567
2102
|
type RecordLiteral = ast.Name
|
|
1568
2103
|
type RecordType = ast.Name
|
|
1569
|
-
type RecordPatternClass = ast.Name
|
|
1570
2104
|
|
|
1571
2105
|
|
|
1572
2106
|
def set_record_literal_fields(
|
|
@@ -1637,24 +2171,29 @@ def make_record_type(
|
|
|
1637
2171
|
return result
|
|
1638
2172
|
|
|
1639
2173
|
|
|
1640
|
-
|
|
1641
|
-
|
|
2174
|
+
_ATTRIBUTES_PATTERN = "_typh_is_attributes_pattern"
|
|
2175
|
+
type AttributesPatternClass = ast.Name
|
|
2176
|
+
|
|
2177
|
+
|
|
2178
|
+
def set_is_attributes_pattern(node: ast.Name, is_record_pattern: bool) -> ast.expr:
|
|
2179
|
+
setattr(node, _ATTRIBUTES_PATTERN, is_record_pattern)
|
|
1642
2180
|
return node
|
|
1643
2181
|
|
|
1644
2182
|
|
|
1645
|
-
def
|
|
1646
|
-
return getattr(node,
|
|
2183
|
+
def is_attributes_pattern(node: ast.Name) -> bool:
|
|
2184
|
+
return getattr(node, _ATTRIBUTES_PATTERN, None) is not None
|
|
1647
2185
|
|
|
1648
2186
|
|
|
1649
|
-
def
|
|
1650
|
-
if hasattr(node,
|
|
1651
|
-
delattr(node,
|
|
2187
|
+
def clear_is_attributes_pattern(node: ast.Name) -> None:
|
|
2188
|
+
if hasattr(node, _ATTRIBUTES_PATTERN):
|
|
2189
|
+
delattr(node, _ATTRIBUTES_PATTERN)
|
|
1652
2190
|
|
|
1653
2191
|
|
|
1654
|
-
def
|
|
2192
|
+
def make_attributes_pattern(
|
|
1655
2193
|
keywords: list[tuple[str, ast.pattern]],
|
|
1656
2194
|
**kwargs: Unpack[PosAttributes],
|
|
1657
2195
|
) -> ast.MatchClass:
|
|
2196
|
+
debug_verbose_print(f"Creating attributes pattern with keywords: {keywords}")
|
|
1658
2197
|
kwd_attrs = [k for k, _ in keywords]
|
|
1659
2198
|
kwd_patterns = [
|
|
1660
2199
|
(
|
|
@@ -1666,17 +2205,19 @@ def make_record_pattern(
|
|
|
1666
2205
|
for k, p in keywords
|
|
1667
2206
|
]
|
|
1668
2207
|
cls_name = ast.Name(
|
|
1669
|
-
id="
|
|
2208
|
+
id="__attribute_pattern",
|
|
1670
2209
|
**kwargs,
|
|
1671
2210
|
)
|
|
1672
|
-
|
|
1673
|
-
|
|
2211
|
+
set_is_attributes_pattern(cls_name, True)
|
|
2212
|
+
result = ast.MatchClass(
|
|
1674
2213
|
cls=cls_name,
|
|
1675
2214
|
patterns=[],
|
|
1676
2215
|
kwd_attrs=kwd_attrs,
|
|
1677
2216
|
kwd_patterns=kwd_patterns,
|
|
1678
2217
|
**pos_attribute_to_range(kwargs),
|
|
1679
2218
|
)
|
|
2219
|
+
debug_verbose_print(f"Created attributes pattern: {ast.dump(result)}")
|
|
2220
|
+
return result
|
|
1680
2221
|
|
|
1681
2222
|
|
|
1682
2223
|
def if_comp_exp(
|
|
@@ -1699,7 +2240,25 @@ def get_postfix_operator_temp_name(symbol: str) -> str:
|
|
|
1699
2240
|
raise ValueError(f"Unknown postfix operator symbol: {symbol}")
|
|
1700
2241
|
|
|
1701
2242
|
|
|
2243
|
+
_IMPORTS = "_typh_imports"
|
|
2244
|
+
|
|
2245
|
+
|
|
2246
|
+
def get_imports(mod: ast.Module) -> dict[tuple[str, str], ast.alias]:
|
|
2247
|
+
imports: dict[tuple[str, str], ast.alias] | None = getattr(mod, _IMPORTS, None)
|
|
2248
|
+
if imports is None:
|
|
2249
|
+
imports = {}
|
|
2250
|
+
setattr(mod, _IMPORTS, imports)
|
|
2251
|
+
return imports
|
|
2252
|
+
|
|
2253
|
+
|
|
1702
2254
|
def add_import_alias_top(mod: ast.Module, from_module: str, name: str, as_name: str):
|
|
2255
|
+
debug_verbose_print(f"Adding import: from {from_module} import {name} as {as_name}")
|
|
2256
|
+
# Check if already imported.
|
|
2257
|
+
imports = get_imports(mod)
|
|
2258
|
+
if (from_module, name) in imports and imports[
|
|
2259
|
+
(from_module, name)
|
|
2260
|
+
].asname == as_name:
|
|
2261
|
+
return
|
|
1703
2262
|
# Duplicate import is NOT a problem, but better to avoid it for speed.
|
|
1704
2263
|
for stmt in mod.body:
|
|
1705
2264
|
if isinstance(stmt, ast.ImportFrom):
|
|
@@ -1710,28 +2269,73 @@ def add_import_alias_top(mod: ast.Module, from_module: str, name: str, as_name:
|
|
|
1710
2269
|
else:
|
|
1711
2270
|
break # Only check the top sequence of import statements.
|
|
1712
2271
|
# Add import at the top.
|
|
2272
|
+
alias = ast.alias(name=name, asname=as_name, **get_empty_pos_attributes())
|
|
2273
|
+
imports[(from_module, name)] = alias
|
|
1713
2274
|
import_stmt = ast.ImportFrom(
|
|
1714
2275
|
module=from_module,
|
|
1715
|
-
names=[
|
|
1716
|
-
ast.alias(
|
|
1717
|
-
name=name,
|
|
1718
|
-
asname=as_name,
|
|
1719
|
-
**get_empty_pos_attributes(),
|
|
1720
|
-
)
|
|
1721
|
-
],
|
|
2276
|
+
names=[alias],
|
|
1722
2277
|
level=0,
|
|
1723
2278
|
**get_empty_pos_attributes(),
|
|
1724
2279
|
)
|
|
1725
2280
|
mod.body.insert(0, import_stmt)
|
|
1726
2281
|
|
|
1727
2282
|
|
|
1728
|
-
|
|
2283
|
+
_PATTERN_IS_TUPLE = "_typh_pattern_is_tuple"
|
|
2284
|
+
|
|
2285
|
+
|
|
2286
|
+
def set_pattern_is_tuple(pattern: ast.pattern, is_tuple: bool = True) -> ast.pattern:
|
|
2287
|
+
setattr(pattern, _PATTERN_IS_TUPLE, is_tuple)
|
|
2288
|
+
return pattern
|
|
2289
|
+
|
|
2290
|
+
|
|
2291
|
+
def is_pattern_tuple(pattern: ast.pattern) -> bool:
|
|
2292
|
+
return getattr(pattern, _PATTERN_IS_TUPLE, False)
|
|
2293
|
+
|
|
2294
|
+
|
|
2295
|
+
def clear_pattern_is_tuple(pattern: ast.pattern) -> None:
|
|
2296
|
+
if hasattr(pattern, _PATTERN_IS_TUPLE):
|
|
2297
|
+
delattr(pattern, _PATTERN_IS_TUPLE)
|
|
2298
|
+
|
|
2299
|
+
|
|
2300
|
+
def make_tuple_pattern(
|
|
2301
|
+
patterns: list[ast.pattern],
|
|
2302
|
+
**kwargs: Unpack[PosAttributes],
|
|
2303
|
+
) -> ast.MatchSequence:
|
|
2304
|
+
result = ast.MatchSequence(
|
|
2305
|
+
patterns=patterns,
|
|
2306
|
+
**pos_attribute_to_range(kwargs),
|
|
2307
|
+
)
|
|
2308
|
+
set_pattern_is_tuple(result, True)
|
|
2309
|
+
return result
|
|
2310
|
+
|
|
2311
|
+
|
|
2312
|
+
def is_pattern_irrefutable(
|
|
2313
|
+
pattern: ast.pattern, assume_type_checked: bool = True
|
|
2314
|
+
) -> bool:
|
|
1729
2315
|
if isinstance(pattern, ast.MatchAs):
|
|
1730
|
-
if pattern.pattern is None:
|
|
2316
|
+
if pattern.pattern is None: # Wildcard pattern or variable capture.
|
|
1731
2317
|
return True
|
|
1732
2318
|
return is_pattern_irrefutable(pattern.pattern)
|
|
1733
2319
|
if isinstance(pattern, ast.MatchOr):
|
|
1734
2320
|
return any(is_pattern_irrefutable(p) for p in pattern.patterns)
|
|
2321
|
+
if assume_type_checked: # Irrefutable if type is correct.
|
|
2322
|
+
if isinstance(pattern, ast.MatchClass):
|
|
2323
|
+
for p in pattern.patterns:
|
|
2324
|
+
if not is_pattern_irrefutable(p, assume_type_checked):
|
|
2325
|
+
return False
|
|
2326
|
+
for p in pattern.kwd_patterns:
|
|
2327
|
+
if not is_pattern_irrefutable(p, assume_type_checked):
|
|
2328
|
+
return False
|
|
2329
|
+
return True
|
|
2330
|
+
if isinstance(pattern, ast.MatchSequence):
|
|
2331
|
+
# Sequence is in general refutable due to length mismatch,
|
|
2332
|
+
# but if we assume tuple is type-checked, we can consider it irrefutable.
|
|
2333
|
+
if is_pattern_tuple(pattern):
|
|
2334
|
+
for p in pattern.patterns:
|
|
2335
|
+
if not is_pattern_irrefutable(p, assume_type_checked):
|
|
2336
|
+
return False
|
|
2337
|
+
return True
|
|
2338
|
+
# Other patterns are considered refutable.
|
|
1735
2339
|
return False
|
|
1736
2340
|
|
|
1737
2341
|
|
|
@@ -1739,3 +2343,77 @@ def is_case_irrefutable(case: ast.match_case) -> bool:
|
|
|
1739
2343
|
if case.guard:
|
|
1740
2344
|
return False
|
|
1741
2345
|
return is_pattern_irrefutable(case.pattern)
|
|
2346
|
+
|
|
2347
|
+
|
|
2348
|
+
def make_match_case(
|
|
2349
|
+
pattern: ast.pattern,
|
|
2350
|
+
guard: ast.expr | None,
|
|
2351
|
+
body: list[ast.stmt],
|
|
2352
|
+
**kwargs: Unpack[PosAttributes],
|
|
2353
|
+
) -> ast.match_case:
|
|
2354
|
+
node = ast.match_case(pattern=pattern, guard=guard, body=body)
|
|
2355
|
+
# Append position attributes
|
|
2356
|
+
for key, value in kwargs.items():
|
|
2357
|
+
setattr(node, key, value)
|
|
2358
|
+
return node
|
|
2359
|
+
|
|
2360
|
+
|
|
2361
|
+
def make_arg(
|
|
2362
|
+
arg: TokenInfo | None,
|
|
2363
|
+
annotation: ast.expr | None,
|
|
2364
|
+
**kwargs: Unpack[PosAttributes],
|
|
2365
|
+
):
|
|
2366
|
+
"""Build a function definition argument."""
|
|
2367
|
+
node = ast.arg(
|
|
2368
|
+
arg=arg.string if arg else "",
|
|
2369
|
+
annotation=annotation,
|
|
2370
|
+
**kwargs,
|
|
2371
|
+
)
|
|
2372
|
+
if arg:
|
|
2373
|
+
set_defined_name_token(node, arg)
|
|
2374
|
+
return node
|
|
2375
|
+
|
|
2376
|
+
|
|
2377
|
+
def make_arguments(
|
|
2378
|
+
pos_only: Optional[List[Tuple[ast.arg, None]]],
|
|
2379
|
+
pos_only_with_default: List[Tuple[ast.arg, Any]],
|
|
2380
|
+
param_no_default: Optional[List[ast.arg]],
|
|
2381
|
+
param_default: Optional[List[Tuple[ast.arg, Any]]],
|
|
2382
|
+
after_star: Optional[
|
|
2383
|
+
Tuple[Optional[ast.arg], List[Tuple[ast.arg, Any]], Optional[ast.arg]]
|
|
2384
|
+
],
|
|
2385
|
+
**kwargs: Unpack[PosAttributes],
|
|
2386
|
+
) -> ast.arguments:
|
|
2387
|
+
"""Build a function definition arguments."""
|
|
2388
|
+
defaults = (
|
|
2389
|
+
[d for _, d in pos_only_with_default if d is not None]
|
|
2390
|
+
if pos_only_with_default
|
|
2391
|
+
else []
|
|
2392
|
+
)
|
|
2393
|
+
defaults += [d for _, d in param_default if d is not None] if param_default else []
|
|
2394
|
+
|
|
2395
|
+
pos_only = pos_only or pos_only_with_default
|
|
2396
|
+
|
|
2397
|
+
# Because we need to combine pos only with and without default even
|
|
2398
|
+
# the version with no default is a tuple
|
|
2399
|
+
pos_only_args = [p for p, _ in pos_only]
|
|
2400
|
+
params = (param_no_default or []) + (
|
|
2401
|
+
[p for p, _ in param_default] if param_default else []
|
|
2402
|
+
)
|
|
2403
|
+
|
|
2404
|
+
# If after_star is None, make a default tuple
|
|
2405
|
+
after_star = after_star or (None, [], None)
|
|
2406
|
+
|
|
2407
|
+
node = ast.arguments(
|
|
2408
|
+
posonlyargs=pos_only_args,
|
|
2409
|
+
args=params,
|
|
2410
|
+
defaults=defaults,
|
|
2411
|
+
vararg=after_star[0],
|
|
2412
|
+
kwonlyargs=[p for p, _ in after_star[1]],
|
|
2413
|
+
kw_defaults=[d for _, d in after_star[1]],
|
|
2414
|
+
kwarg=after_star[2],
|
|
2415
|
+
)
|
|
2416
|
+
# Append position attributes
|
|
2417
|
+
for key, value in kwargs.items():
|
|
2418
|
+
setattr(node, key, value)
|
|
2419
|
+
return node
|