jaclang 0.7.31__py3-none-any.whl → 0.7.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jaclang might be problematic. Click here for more details.
- jaclang/compiler/parser.py +591 -758
- jaclang/compiler/passes/main/inheritance_pass.py +20 -1
- jaclang/plugin/default.py +13 -2
- jaclang/plugin/tests/fixtures/traversing_save.jac +17 -0
- jaclang/plugin/tests/test_jaseci.py +33 -0
- jaclang/tests/fixtures/base_class_complex_expr.jac +38 -0
- jaclang/tests/test_cli.py +21 -0
- jaclang/utils/profiler.py +62 -0
- {jaclang-0.7.31.dist-info → jaclang-0.7.32.dist-info}/METADATA +1 -1
- {jaclang-0.7.31.dist-info → jaclang-0.7.32.dist-info}/RECORD +12 -9
- {jaclang-0.7.31.dist-info → jaclang-0.7.32.dist-info}/WHEEL +0 -0
- {jaclang-0.7.31.dist-info → jaclang-0.7.32.dist-info}/entry_points.txt +0 -0
jaclang/compiler/parser.py
CHANGED
|
@@ -557,23 +557,20 @@ class JacParser(Pass):
|
|
|
557
557
|
kid=self.cur_nodes,
|
|
558
558
|
)
|
|
559
559
|
|
|
560
|
-
def architype_def(self,
|
|
560
|
+
def architype_def(self, _: None) -> ast.ArchDef:
|
|
561
561
|
"""Grammar rule.
|
|
562
562
|
|
|
563
563
|
architype_def: abil_to_arch_chain member_block
|
|
564
564
|
"""
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
)
|
|
573
|
-
else:
|
|
574
|
-
raise self.ice()
|
|
565
|
+
archref = self.consume(ast.ArchRefChain)
|
|
566
|
+
subnodelist = self.consume(ast.SubNodeList)
|
|
567
|
+
return ast.ArchDef(
|
|
568
|
+
target=archref,
|
|
569
|
+
body=subnodelist,
|
|
570
|
+
kid=self.cur_nodes,
|
|
571
|
+
)
|
|
575
572
|
|
|
576
|
-
def arch_type(self,
|
|
573
|
+
def arch_type(self, _: None) -> ast.Token:
|
|
577
574
|
"""Grammar rule.
|
|
578
575
|
|
|
579
576
|
arch_type: KW_WALKER
|
|
@@ -581,25 +578,19 @@ class JacParser(Pass):
|
|
|
581
578
|
| KW_EDGE
|
|
582
579
|
| KW_NODE
|
|
583
580
|
"""
|
|
584
|
-
|
|
585
|
-
return kid[0]
|
|
586
|
-
else:
|
|
587
|
-
raise self.ice()
|
|
581
|
+
return self.consume(ast.Token)
|
|
588
582
|
|
|
589
|
-
def decorators(self,
|
|
583
|
+
def decorators(self, _: None) -> ast.SubNodeList[ast.Expr]:
|
|
590
584
|
"""Grammar rule.
|
|
591
585
|
|
|
592
586
|
decorators: (DECOR_OP atomic_chain)+
|
|
593
587
|
"""
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
)
|
|
601
|
-
else:
|
|
602
|
-
raise self.ice()
|
|
588
|
+
self.consume_token(Tok.DECOR_OP)
|
|
589
|
+
return ast.SubNodeList[ast.Expr](
|
|
590
|
+
items=self.consume_many(ast.Expr),
|
|
591
|
+
delim=Tok.DECOR_OP,
|
|
592
|
+
kid=self.cur_nodes,
|
|
593
|
+
)
|
|
603
594
|
|
|
604
595
|
def inherited_archs(self, kid: list[ast.AstNode]) -> ast.SubNodeList[ast.Expr]:
|
|
605
596
|
"""Grammar rule.
|
|
@@ -607,39 +598,36 @@ class JacParser(Pass):
|
|
|
607
598
|
inherited_archs: LT (atomic_chain COMMA)* atomic_chain GT
|
|
608
599
|
| COLON (atomic_chain COMMA)* atomic_chain COLON
|
|
609
600
|
"""
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
)
|
|
601
|
+
self.match_token(Tok.LT) or self.consume_token(Tok.COLON)
|
|
602
|
+
items: list = []
|
|
603
|
+
while inherited_arch := self.match(ast.Expr):
|
|
604
|
+
items.append(inherited_arch)
|
|
605
|
+
self.match_token(Tok.COMMA)
|
|
606
|
+
self.match_token(Tok.LT) or self.consume_token(Tok.COLON)
|
|
607
|
+
return ast.SubNodeList[ast.Expr](items=items, delim=Tok.COMMA, kid=kid)
|
|
616
608
|
|
|
617
|
-
def sub_name(self,
|
|
609
|
+
def sub_name(self, _: None) -> ast.SubTag[ast.Name]:
|
|
618
610
|
"""Grammar rule.
|
|
619
611
|
|
|
620
612
|
sub_name: COLON NAME
|
|
621
613
|
"""
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
raise self.ice()
|
|
614
|
+
self.consume_token(Tok.COLON)
|
|
615
|
+
target = self.consume(ast.Name)
|
|
616
|
+
return ast.SubTag(
|
|
617
|
+
tag=target,
|
|
618
|
+
kid=self.cur_nodes,
|
|
619
|
+
)
|
|
629
620
|
|
|
630
|
-
def named_ref(self,
|
|
621
|
+
def named_ref(self, _: None) -> ast.NameAtom:
|
|
631
622
|
"""Grammar rule.
|
|
632
623
|
|
|
633
624
|
named_ref: special_ref
|
|
634
625
|
| KWESC_NAME
|
|
635
626
|
| NAME
|
|
636
627
|
"""
|
|
637
|
-
|
|
638
|
-
return kid[0]
|
|
639
|
-
else:
|
|
640
|
-
raise self.ice()
|
|
628
|
+
return self.consume(ast.NameAtom)
|
|
641
629
|
|
|
642
|
-
def special_ref(self,
|
|
630
|
+
def special_ref(self, _: None) -> ast.SpecialVarRef:
|
|
643
631
|
"""Grammar rule.
|
|
644
632
|
|
|
645
633
|
special_ref: KW_INIT
|
|
@@ -649,10 +637,7 @@ class JacParser(Pass):
|
|
|
649
637
|
| KW_SELF
|
|
650
638
|
| KW_HERE
|
|
651
639
|
"""
|
|
652
|
-
|
|
653
|
-
return ast.SpecialVarRef(var=kid[0])
|
|
654
|
-
else:
|
|
655
|
-
raise self.ice()
|
|
640
|
+
return ast.SpecialVarRef(var=self.consume(ast.Name))
|
|
656
641
|
|
|
657
642
|
def enum(self, _: None) -> ast.Enum | ast.EnumDef:
|
|
658
643
|
"""Grammar rule.
|
|
@@ -692,32 +677,35 @@ class JacParser(Pass):
|
|
|
692
677
|
kid=self.cur_nodes,
|
|
693
678
|
)
|
|
694
679
|
|
|
695
|
-
def enum_def(self,
|
|
680
|
+
def enum_def(self, _: None) -> ast.EnumDef:
|
|
696
681
|
"""Grammar rule.
|
|
697
682
|
|
|
698
683
|
enum_def: arch_to_enum_chain enum_block
|
|
699
684
|
"""
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
)
|
|
708
|
-
else:
|
|
709
|
-
raise self.ice()
|
|
685
|
+
enum_def = self.consume(ast.ArchRefChain)
|
|
686
|
+
enum_block = self.consume(ast.SubNodeList)
|
|
687
|
+
return ast.EnumDef(
|
|
688
|
+
target=enum_def,
|
|
689
|
+
body=enum_block,
|
|
690
|
+
kid=self.cur_nodes,
|
|
691
|
+
)
|
|
710
692
|
|
|
711
|
-
def enum_block(
|
|
712
|
-
self, kid: list[ast.AstNode]
|
|
713
|
-
) -> ast.SubNodeList[ast.EnumBlockStmt]:
|
|
693
|
+
def enum_block(self, _: None) -> ast.SubNodeList[ast.EnumBlockStmt]:
|
|
714
694
|
"""Grammar rule.
|
|
715
695
|
|
|
716
696
|
enum_block: LBRACE ((enum_stmt COMMA)* enum_stmt COMMA?)? RBRACE
|
|
717
697
|
"""
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
698
|
+
self.consume_token(Tok.LBRACE)
|
|
699
|
+
enum_statements: list = []
|
|
700
|
+
while enum_stmt := self.match(ast.EnumBlockStmt):
|
|
701
|
+
enum_statements.append(enum_stmt)
|
|
702
|
+
self.match_token(Tok.COMMA)
|
|
703
|
+
self.consume_token(Tok.RBRACE)
|
|
704
|
+
return ast.SubNodeList[ast.EnumBlockStmt](
|
|
705
|
+
items=enum_statements,
|
|
706
|
+
delim=Tok.COMMA,
|
|
707
|
+
kid=self.cur_nodes,
|
|
708
|
+
)
|
|
721
709
|
|
|
722
710
|
def enum_stmt(self, _: None) -> ast.EnumBlockStmt:
|
|
723
711
|
"""Grammar rule.
|
|
@@ -818,19 +806,18 @@ class JacParser(Pass):
|
|
|
818
806
|
|
|
819
807
|
ability_def: arch_to_abil_chain (func_decl | event_clause) code_block
|
|
820
808
|
"""
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
)
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
raise self.ice()
|
|
809
|
+
target = self.consume(ast.ArchRefChain)
|
|
810
|
+
signature = self.match(ast.FuncSignature) or self.consume(
|
|
811
|
+
ast.EventSignature
|
|
812
|
+
)
|
|
813
|
+
body = self.consume(ast.SubNodeList)
|
|
814
|
+
|
|
815
|
+
return ast.AbilityDef(
|
|
816
|
+
target=target,
|
|
817
|
+
signature=signature,
|
|
818
|
+
body=body,
|
|
819
|
+
kid=self.cur_nodes,
|
|
820
|
+
)
|
|
834
821
|
|
|
835
822
|
# We need separate production rule for abstract_ability because we don't
|
|
836
823
|
# want to allow regular abilities outside of classed to be abstract.
|
|
@@ -917,53 +904,46 @@ class JacParser(Pass):
|
|
|
917
904
|
kid=self.cur_nodes,
|
|
918
905
|
)
|
|
919
906
|
|
|
920
|
-
def func_decl(self,
|
|
907
|
+
def func_decl(self, _: None) -> ast.FuncSignature:
|
|
921
908
|
"""Grammar rule.
|
|
922
909
|
|
|
923
910
|
func_decl: (LPAREN func_decl_params? RPAREN)? (RETURN_HINT (STRING COLON)? expression)?
|
|
924
911
|
"""
|
|
925
|
-
params =
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
912
|
+
params: ast.SubNodeList | None = None
|
|
913
|
+
return_spec: ast.Expr | None = None
|
|
914
|
+
semstr: ast.String | None = None
|
|
915
|
+
if self.match_token(Tok.LPAREN):
|
|
916
|
+
params = self.match(ast.SubNodeList)
|
|
917
|
+
self.consume_token(Tok.RPAREN)
|
|
918
|
+
if self.match_token(Tok.RETURN_HINT):
|
|
919
|
+
if semstr := self.match(ast.String):
|
|
920
|
+
self.consume_token(Tok.COLON)
|
|
921
|
+
return_spec = self.match(ast.Expr)
|
|
922
|
+
return ast.FuncSignature(
|
|
923
|
+
semstr=semstr,
|
|
924
|
+
params=params,
|
|
925
|
+
return_type=return_spec,
|
|
926
|
+
kid=(
|
|
927
|
+
self.cur_nodes
|
|
928
|
+
if len(self.cur_nodes)
|
|
929
|
+
else [ast.EmptyToken(ast.JacSource("", self.parse_ref.mod_path))]
|
|
930
|
+
),
|
|
935
931
|
)
|
|
936
|
-
if (isinstance(params, ast.SubNodeList) or params is None) and (
|
|
937
|
-
isinstance(return_spec, ast.Expr) or return_spec is None
|
|
938
|
-
):
|
|
939
|
-
return ast.FuncSignature(
|
|
940
|
-
semstr=semstr,
|
|
941
|
-
params=params,
|
|
942
|
-
return_type=return_spec,
|
|
943
|
-
kid=(
|
|
944
|
-
kid
|
|
945
|
-
if len(kid)
|
|
946
|
-
else [
|
|
947
|
-
ast.EmptyToken(ast.JacSource("", self.parse_ref.mod_path))
|
|
948
|
-
]
|
|
949
|
-
),
|
|
950
|
-
)
|
|
951
|
-
else:
|
|
952
|
-
raise self.ice()
|
|
953
932
|
|
|
954
|
-
def func_decl_params(
|
|
955
|
-
self, kid: list[ast.AstNode]
|
|
956
|
-
) -> ast.SubNodeList[ast.ParamVar]:
|
|
933
|
+
def func_decl_params(self, _: None) -> ast.SubNodeList[ast.ParamVar]:
|
|
957
934
|
"""Grammar rule.
|
|
958
935
|
|
|
959
936
|
func_decl_params: (param_var COMMA)* param_var COMMA?
|
|
960
937
|
"""
|
|
961
|
-
|
|
962
|
-
|
|
938
|
+
paramvar: list = []
|
|
939
|
+
while param_stmt := self.match(ast.ParamVar):
|
|
940
|
+
paramvar.append(param_stmt)
|
|
941
|
+
self.match_token(Tok.COMMA)
|
|
942
|
+
return ast.SubNodeList[ast.ParamVar](
|
|
943
|
+
items=paramvar,
|
|
963
944
|
delim=Tok.COMMA,
|
|
964
|
-
kid=
|
|
945
|
+
kid=self.cur_nodes,
|
|
965
946
|
)
|
|
966
|
-
return ret
|
|
967
947
|
|
|
968
948
|
def param_var(self, kid: list[ast.AstNode]) -> ast.ParamVar:
|
|
969
949
|
"""Grammar rule.
|
|
@@ -1005,21 +985,21 @@ class JacParser(Pass):
|
|
|
1005
985
|
else:
|
|
1006
986
|
raise self.ice()
|
|
1007
987
|
|
|
1008
|
-
def member_block(
|
|
1009
|
-
self, kid: list[ast.AstNode]
|
|
1010
|
-
) -> ast.SubNodeList[ast.ArchBlockStmt]:
|
|
988
|
+
def member_block(self, _: None) -> ast.SubNodeList[ast.ArchBlockStmt]:
|
|
1011
989
|
"""Grammar rule.
|
|
1012
990
|
|
|
1013
991
|
member_block: LBRACE member_stmt* RBRACE
|
|
1014
992
|
"""
|
|
993
|
+
left_enc = self.consume_token(Tok.LBRACE)
|
|
994
|
+
items = self.match_many(ast.ArchBlockStmt)
|
|
995
|
+
right_enc = self.consume_token(Tok.RBRACE)
|
|
1015
996
|
ret = ast.SubNodeList[ast.ArchBlockStmt](
|
|
1016
|
-
items=
|
|
997
|
+
items=items,
|
|
1017
998
|
delim=Tok.WS,
|
|
1018
|
-
kid=
|
|
999
|
+
kid=self.cur_nodes,
|
|
1019
1000
|
)
|
|
1020
|
-
ret.
|
|
1021
|
-
ret.
|
|
1022
|
-
ret.right_enc = kid[-1] if isinstance(kid[-1], ast.Token) else None
|
|
1001
|
+
ret.left_enc = left_enc
|
|
1002
|
+
ret.right_enc = right_enc
|
|
1023
1003
|
return ret
|
|
1024
1004
|
|
|
1025
1005
|
def member_stmt(self, kid: list[ast.AstNode]) -> ast.ArchBlockStmt:
|
|
@@ -1073,23 +1053,17 @@ class JacParser(Pass):
|
|
|
1073
1053
|
else:
|
|
1074
1054
|
raise self.ice()
|
|
1075
1055
|
|
|
1076
|
-
def has_assign_list(
|
|
1077
|
-
self, kid: list[ast.AstNode]
|
|
1078
|
-
) -> ast.SubNodeList[ast.HasVar]:
|
|
1056
|
+
def has_assign_list(self, _: None) -> ast.SubNodeList[ast.HasVar]:
|
|
1079
1057
|
"""Grammar rule.
|
|
1080
1058
|
|
|
1081
1059
|
has_assign_list: (has_assign_list COMMA)? typed_has_clause
|
|
1082
1060
|
"""
|
|
1083
|
-
consume
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
if isinstance(kid[0], ast.SubNodeList):
|
|
1087
|
-
consume = kid[0]
|
|
1088
|
-
comma = kid[1]
|
|
1089
|
-
assign = kid[2]
|
|
1061
|
+
if consume := self.match(ast.SubNodeList):
|
|
1062
|
+
comma = self.consume_token(Tok.COMMA)
|
|
1063
|
+
assign = self.consume(ast.HasVar)
|
|
1090
1064
|
new_kid = [*consume.kid, comma, assign]
|
|
1091
1065
|
else:
|
|
1092
|
-
assign =
|
|
1066
|
+
assign = self.consume(ast.HasVar)
|
|
1093
1067
|
new_kid = [assign]
|
|
1094
1068
|
valid_kid = [i for i in new_kid if isinstance(i, ast.HasVar)]
|
|
1095
1069
|
return ast.SubNodeList[ast.HasVar](
|
|
@@ -1207,15 +1181,17 @@ class JacParser(Pass):
|
|
|
1207
1181
|
| walker_stmt
|
|
1208
1182
|
| SEMI
|
|
1209
1183
|
"""
|
|
1210
|
-
if
|
|
1211
|
-
|
|
1212
|
-
|
|
1184
|
+
if (code_block := self.match(ast.CodeBlockStmt)) and len(
|
|
1185
|
+
self.cur_nodes
|
|
1186
|
+
) < 2:
|
|
1187
|
+
return code_block
|
|
1188
|
+
elif (token := self.match(ast.Token)) and token.name == Tok.KW_YIELD:
|
|
1213
1189
|
return ast.ExprStmt(
|
|
1214
1190
|
expr=(
|
|
1215
1191
|
expr := ast.YieldExpr(
|
|
1216
1192
|
expr=None,
|
|
1217
1193
|
with_from=False,
|
|
1218
|
-
kid=
|
|
1194
|
+
kid=self.cur_nodes,
|
|
1219
1195
|
)
|
|
1220
1196
|
),
|
|
1221
1197
|
in_fstring=False,
|
|
@@ -1225,7 +1201,7 @@ class JacParser(Pass):
|
|
|
1225
1201
|
return ast.ExprStmt(
|
|
1226
1202
|
expr=kid[0],
|
|
1227
1203
|
in_fstring=False,
|
|
1228
|
-
kid=
|
|
1204
|
+
kid=self.cur_nodes,
|
|
1229
1205
|
)
|
|
1230
1206
|
elif isinstance(kid[0], ast.CodeBlockStmt):
|
|
1231
1207
|
kid[0].add_kids_right([kid[1]])
|
|
@@ -1233,19 +1209,19 @@ class JacParser(Pass):
|
|
|
1233
1209
|
else:
|
|
1234
1210
|
raise self.ice()
|
|
1235
1211
|
|
|
1236
|
-
def typed_ctx_block(self,
|
|
1212
|
+
def typed_ctx_block(self, _: None) -> ast.TypedCtxBlock:
|
|
1237
1213
|
"""Grammar rule.
|
|
1238
1214
|
|
|
1239
1215
|
typed_ctx_block: RETURN_HINT expression code_block
|
|
1240
1216
|
"""
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1217
|
+
self.consume_token(Tok.RETURN_HINT)
|
|
1218
|
+
ctx = self.consume(ast.Expr)
|
|
1219
|
+
body = self.consume(ast.SubNodeList)
|
|
1220
|
+
return ast.TypedCtxBlock(
|
|
1221
|
+
type_ctx=ctx,
|
|
1222
|
+
body=body,
|
|
1223
|
+
kid=self.cur_nodes,
|
|
1224
|
+
)
|
|
1249
1225
|
|
|
1250
1226
|
def if_stmt(self, _: None) -> ast.IfStmt:
|
|
1251
1227
|
"""Grammar rule.
|
|
@@ -1545,15 +1521,12 @@ class JacParser(Pass):
|
|
|
1545
1521
|
kid=self.cur_nodes,
|
|
1546
1522
|
)
|
|
1547
1523
|
|
|
1548
|
-
def walker_stmt(self,
|
|
1524
|
+
def walker_stmt(self, _: None) -> ast.CodeBlockStmt:
|
|
1549
1525
|
"""Grammar rule.
|
|
1550
1526
|
|
|
1551
1527
|
walker_stmt: disengage_stmt | revisit_stmt | visit_stmt | ignore_stmt
|
|
1552
1528
|
"""
|
|
1553
|
-
|
|
1554
|
-
return kid[0]
|
|
1555
|
-
else:
|
|
1556
|
-
raise self.ice()
|
|
1529
|
+
return self.consume(ast.CodeBlockStmt)
|
|
1557
1530
|
|
|
1558
1531
|
def ignore_stmt(self, _: None) -> ast.IgnoreStmt:
|
|
1559
1532
|
"""Grammar rule.
|
|
@@ -1637,70 +1610,55 @@ class JacParser(Pass):
|
|
|
1637
1610
|
kid=self.cur_nodes,
|
|
1638
1611
|
)
|
|
1639
1612
|
|
|
1640
|
-
def assignment(self,
|
|
1613
|
+
def assignment(self, _: None) -> ast.Assignment:
|
|
1641
1614
|
"""Grammar rule.
|
|
1642
1615
|
|
|
1643
1616
|
assignment: KW_LET? (atomic_chain EQ)+ (yield_expr | expression)
|
|
1644
1617
|
| atomic_chain (COLON STRING)? type_tag (EQ (yield_expr | expression))?
|
|
1645
1618
|
| atomic_chain aug_op (yield_expr | expression)
|
|
1646
1619
|
"""
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
is_aug = None
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
)
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
):
|
|
1678
|
-
assignees += [chomp[0]]
|
|
1679
|
-
is_aug = chomp[1]
|
|
1680
|
-
chomp = chomp[2:]
|
|
1681
|
-
else:
|
|
1682
|
-
while (
|
|
1683
|
-
len(chomp) > 1
|
|
1684
|
-
and isinstance(chomp[0], ast.Expr)
|
|
1685
|
-
and isinstance(chomp[1], ast.Token)
|
|
1686
|
-
and chomp[1].name == Tok.EQ
|
|
1687
|
-
):
|
|
1688
|
-
assignees += [chomp[0], chomp[1]]
|
|
1689
|
-
chomp = chomp[2:]
|
|
1690
|
-
|
|
1691
|
-
assignees += chomp
|
|
1620
|
+
assignees: list = []
|
|
1621
|
+
type_tag: ast.SubTag | None = None
|
|
1622
|
+
is_aug: ast.Token | None = None
|
|
1623
|
+
semstr: ast.String | None = None
|
|
1624
|
+
|
|
1625
|
+
is_frozen = bool(self.match_token(Tok.KW_LET))
|
|
1626
|
+
if first_expr := self.match(ast.Expr):
|
|
1627
|
+
assignees.append(first_expr)
|
|
1628
|
+
|
|
1629
|
+
token = self.match(ast.Token)
|
|
1630
|
+
if token and (token.name == Tok.EQ):
|
|
1631
|
+
assignees.append(token)
|
|
1632
|
+
while expr := self.match(ast.Expr):
|
|
1633
|
+
eq = self.match_token(Tok.EQ)
|
|
1634
|
+
assignees.append(expr)
|
|
1635
|
+
if eq:
|
|
1636
|
+
assignees.append(eq)
|
|
1637
|
+
value = assignees.pop()
|
|
1638
|
+
elif token and (token.name not in {Tok.COLON, Tok.EQ}):
|
|
1639
|
+
is_aug = token
|
|
1640
|
+
value = self.consume(ast.Expr)
|
|
1641
|
+
else:
|
|
1642
|
+
semstr = (
|
|
1643
|
+
self.match(ast.String)
|
|
1644
|
+
if (token and (token.name == Tok.COLON))
|
|
1645
|
+
else None
|
|
1646
|
+
)
|
|
1647
|
+
type_tag = self.consume(ast.SubTag)
|
|
1648
|
+
value = self.consume(ast.Expr) if self.match_token(Tok.EQ) else None
|
|
1649
|
+
|
|
1692
1650
|
valid_assignees = [i for i in assignees if isinstance(i, (ast.Expr))]
|
|
1693
1651
|
new_targ = ast.SubNodeList[ast.Expr](
|
|
1694
1652
|
items=valid_assignees,
|
|
1695
1653
|
delim=Tok.EQ,
|
|
1696
1654
|
kid=assignees,
|
|
1697
1655
|
)
|
|
1698
|
-
kid = [x for x in
|
|
1656
|
+
kid = [x for x in self.cur_nodes if x not in assignees]
|
|
1699
1657
|
kid.insert(1, new_targ) if is_frozen else kid.insert(0, new_targ)
|
|
1700
1658
|
if is_aug:
|
|
1701
1659
|
return ast.Assignment(
|
|
1702
1660
|
target=new_targ,
|
|
1703
|
-
type_tag=type_tag
|
|
1661
|
+
type_tag=type_tag,
|
|
1704
1662
|
value=value,
|
|
1705
1663
|
mutable=is_frozen,
|
|
1706
1664
|
aug_op=is_aug,
|
|
@@ -1708,11 +1666,11 @@ class JacParser(Pass):
|
|
|
1708
1666
|
)
|
|
1709
1667
|
return ast.Assignment(
|
|
1710
1668
|
target=new_targ,
|
|
1711
|
-
type_tag=type_tag
|
|
1669
|
+
type_tag=type_tag,
|
|
1712
1670
|
value=value,
|
|
1713
1671
|
mutable=is_frozen,
|
|
1714
1672
|
kid=kid,
|
|
1715
|
-
semstr=semstr
|
|
1673
|
+
semstr=semstr,
|
|
1716
1674
|
)
|
|
1717
1675
|
|
|
1718
1676
|
def expression(self, _: None) -> ast.Expr:
|
|
@@ -1742,24 +1700,19 @@ class JacParser(Pass):
|
|
|
1742
1700
|
"""
|
|
1743
1701
|
return self._binary_expr_unwind(self.cur_nodes)
|
|
1744
1702
|
|
|
1745
|
-
def lambda_expr(self,
|
|
1703
|
+
def lambda_expr(self, _: None) -> ast.LambdaExpr:
|
|
1746
1704
|
"""Grammar rule.
|
|
1747
1705
|
|
|
1748
1706
|
lamda_expr: KW_WITH func_decl_params? (RETURN_HINT expression)? KW_CAN expression
|
|
1749
1707
|
"""
|
|
1750
|
-
|
|
1751
|
-
params = chomp[0] if isinstance(chomp[0], ast.SubNodeList) else None
|
|
1752
|
-
chomp = chomp[1:] if params else chomp
|
|
1753
|
-
return_type = (
|
|
1754
|
-
chomp[1]
|
|
1755
|
-
if isinstance(chomp[0], ast.Token)
|
|
1756
|
-
and chomp[0].name == Tok.RETURN_HINT
|
|
1757
|
-
and isinstance(chomp[1], ast.Expr)
|
|
1758
|
-
else None
|
|
1759
|
-
)
|
|
1760
|
-
chomp = chomp[2:] if return_type else chomp
|
|
1761
|
-
chomp = chomp[1:]
|
|
1708
|
+
return_type: ast.Expr | None = None
|
|
1762
1709
|
sig_kid: list[ast.AstNode] = []
|
|
1710
|
+
self.consume_token(Tok.KW_WITH)
|
|
1711
|
+
params = self.match(ast.SubNodeList)
|
|
1712
|
+
if self.match_token(Tok.RETURN_HINT):
|
|
1713
|
+
return_type = self.consume(ast.Expr)
|
|
1714
|
+
self.consume_token(Tok.KW_CAN)
|
|
1715
|
+
body = self.consume(ast.Expr)
|
|
1763
1716
|
if params:
|
|
1764
1717
|
sig_kid.append(params)
|
|
1765
1718
|
if return_type:
|
|
@@ -1773,16 +1726,13 @@ class JacParser(Pass):
|
|
|
1773
1726
|
if params or return_type
|
|
1774
1727
|
else None
|
|
1775
1728
|
)
|
|
1776
|
-
new_kid = [i for i in
|
|
1729
|
+
new_kid = [i for i in self.cur_nodes if i != params and i != return_type]
|
|
1777
1730
|
new_kid.insert(1, signature) if signature else None
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
)
|
|
1784
|
-
else:
|
|
1785
|
-
raise self.ice()
|
|
1731
|
+
return ast.LambdaExpr(
|
|
1732
|
+
signature=signature,
|
|
1733
|
+
body=body,
|
|
1734
|
+
kid=new_kid,
|
|
1735
|
+
)
|
|
1786
1736
|
|
|
1787
1737
|
def pipe(self, _: None) -> ast.Expr:
|
|
1788
1738
|
"""Grammar rule.
|
|
@@ -1883,7 +1833,7 @@ class JacParser(Pass):
|
|
|
1883
1833
|
)
|
|
1884
1834
|
return self.consume(ast.Expr)
|
|
1885
1835
|
|
|
1886
|
-
def compare(self, _:
|
|
1836
|
+
def compare(self, _: None) -> ast.Expr:
|
|
1887
1837
|
"""Grammar rule.
|
|
1888
1838
|
|
|
1889
1839
|
compare: (arithmetic cmp_op)* arithmetic
|
|
@@ -2040,7 +1990,7 @@ class JacParser(Pass):
|
|
|
2040
1990
|
)
|
|
2041
1991
|
return self._binary_expr_unwind(self.cur_nodes)
|
|
2042
1992
|
|
|
2043
|
-
def aug_op(self,
|
|
1993
|
+
def aug_op(self, _: None) -> ast.Token:
|
|
2044
1994
|
"""Grammar rule.
|
|
2045
1995
|
|
|
2046
1996
|
aug_op: RSHIFT_EQ
|
|
@@ -2057,88 +2007,61 @@ class JacParser(Pass):
|
|
|
2057
2007
|
| ADD_EQ
|
|
2058
2008
|
| WALRUS_EQ
|
|
2059
2009
|
"""
|
|
2060
|
-
|
|
2061
|
-
return kid[0]
|
|
2062
|
-
else:
|
|
2063
|
-
raise self.ice()
|
|
2010
|
+
return self.consume(ast.Token)
|
|
2064
2011
|
|
|
2065
|
-
def atomic_chain(self,
|
|
2012
|
+
def atomic_chain(self, _: None) -> ast.Expr:
|
|
2066
2013
|
"""Grammar rule.
|
|
2067
2014
|
|
|
2068
2015
|
atomic_chain: atomic_chain NULL_OK? (filter_compr | assign_compr | index_slice)
|
|
2069
2016
|
| atomic_chain NULL_OK? (DOT_BKWD | DOT_FWD | DOT) named_ref
|
|
2070
2017
|
| (atomic_call | atom | edge_ref_chain)
|
|
2071
2018
|
"""
|
|
2072
|
-
if len(
|
|
2073
|
-
return
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
is_null_ok = False
|
|
2078
|
-
if isinstance(chomp[0], ast.Token) and chomp[0].name == Tok.NULL_OK:
|
|
2079
|
-
is_null_ok = True
|
|
2080
|
-
chomp = chomp[1:]
|
|
2081
|
-
if (
|
|
2082
|
-
len(chomp) == 1
|
|
2083
|
-
and isinstance(chomp[0], ast.AtomExpr)
|
|
2084
|
-
and isinstance(target, ast.Expr)
|
|
2085
|
-
):
|
|
2019
|
+
if len(self.cur_nodes) == 1:
|
|
2020
|
+
return self.consume(ast.Expr)
|
|
2021
|
+
target = self.consume(ast.Expr)
|
|
2022
|
+
is_null_ok = bool(self.match_token(Tok.NULL_OK))
|
|
2023
|
+
if right := self.match(ast.AtomExpr):
|
|
2086
2024
|
return ast.AtomTrailer(
|
|
2087
2025
|
target=target,
|
|
2088
|
-
right=
|
|
2026
|
+
right=right,
|
|
2089
2027
|
is_null_ok=is_null_ok,
|
|
2090
2028
|
is_attr=False,
|
|
2091
|
-
kid=
|
|
2092
|
-
)
|
|
2093
|
-
elif (
|
|
2094
|
-
len(chomp) > 1
|
|
2095
|
-
and isinstance(chomp[0], ast.Token)
|
|
2096
|
-
and isinstance(chomp[1], (ast.AtomExpr, ast.AtomTrailer))
|
|
2097
|
-
and isinstance(target, ast.Expr)
|
|
2098
|
-
):
|
|
2099
|
-
return ast.AtomTrailer(
|
|
2100
|
-
target=(target if chomp[0].name != Tok.DOT_BKWD else chomp[1]),
|
|
2101
|
-
right=(chomp[1] if chomp[0].name != Tok.DOT_BKWD else target),
|
|
2102
|
-
is_null_ok=is_null_ok,
|
|
2103
|
-
is_attr=True,
|
|
2104
|
-
kid=kid,
|
|
2029
|
+
kid=self.cur_nodes,
|
|
2105
2030
|
)
|
|
2106
|
-
|
|
2107
|
-
|
|
2031
|
+
token = (
|
|
2032
|
+
self.match_token(Tok.DOT_BKWD)
|
|
2033
|
+
or self.match_token(Tok.DOT_FWD)
|
|
2034
|
+
or self.consume_token(Tok.DOT)
|
|
2035
|
+
)
|
|
2036
|
+
name = self.match(ast.AtomExpr) or self.consume(ast.AtomTrailer)
|
|
2037
|
+
return ast.AtomTrailer(
|
|
2038
|
+
target=(target if token.name != Tok.DOT_BKWD else name),
|
|
2039
|
+
right=(name if token.name != Tok.DOT_BKWD else target),
|
|
2040
|
+
is_null_ok=is_null_ok,
|
|
2041
|
+
is_attr=True,
|
|
2042
|
+
kid=self.cur_nodes,
|
|
2043
|
+
)
|
|
2108
2044
|
|
|
2109
|
-
def atomic_call(self,
|
|
2045
|
+
def atomic_call(self, _: None) -> ast.FuncCall:
|
|
2110
2046
|
"""Grammar rule.
|
|
2111
2047
|
|
|
2112
2048
|
atomic_call: atomic_chain LPAREN param_list? (KW_BY atomic_call)? RPAREN
|
|
2113
2049
|
"""
|
|
2114
|
-
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2122
|
-
|
|
2123
|
-
|
|
2124
|
-
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
len(kid) == 4
|
|
2128
|
-
and isinstance(kid[0], ast.Expr)
|
|
2129
|
-
and isinstance(kid[2], ast.SubNodeList)
|
|
2130
|
-
):
|
|
2131
|
-
return ast.FuncCall(
|
|
2132
|
-
target=kid[0], params=kid[2], genai_call=None, kid=kid
|
|
2133
|
-
)
|
|
2134
|
-
elif len(kid) == 3 and isinstance(kid[0], ast.Expr):
|
|
2135
|
-
return ast.FuncCall(
|
|
2136
|
-
target=kid[0], params=None, genai_call=None, kid=kid
|
|
2137
|
-
)
|
|
2138
|
-
else:
|
|
2139
|
-
raise self.ice()
|
|
2050
|
+
genai_call: ast.FuncCall | None = None
|
|
2051
|
+
target = self.consume(ast.Expr)
|
|
2052
|
+
self.consume_token(Tok.LPAREN)
|
|
2053
|
+
params = self.match(ast.SubNodeList)
|
|
2054
|
+
if self.match_token(Tok.KW_BY):
|
|
2055
|
+
genai_call = self.consume(ast.FuncCall)
|
|
2056
|
+
self.consume_token(Tok.RPAREN)
|
|
2057
|
+
return ast.FuncCall(
|
|
2058
|
+
target=target,
|
|
2059
|
+
params=params,
|
|
2060
|
+
genai_call=genai_call,
|
|
2061
|
+
kid=self.cur_nodes,
|
|
2062
|
+
)
|
|
2140
2063
|
|
|
2141
|
-
def index_slice(self,
|
|
2064
|
+
def index_slice(self, _: None) -> ast.IndexSlice:
|
|
2142
2065
|
"""Grammar rule.
|
|
2143
2066
|
|
|
2144
2067
|
index_slice: LSQUARE \
|
|
@@ -2147,61 +2070,42 @@ class JacParser(Pass):
|
|
|
2147
2070
|
RSQUARE
|
|
2148
2071
|
| list_val
|
|
2149
2072
|
"""
|
|
2150
|
-
if len(
|
|
2151
|
-
index =
|
|
2152
|
-
if
|
|
2153
|
-
if not index.values:
|
|
2154
|
-
raise self.ice()
|
|
2155
|
-
if len(index.values.items) == 1:
|
|
2156
|
-
expr = index.values.items[0] if index.values else None
|
|
2157
|
-
else:
|
|
2158
|
-
sublist = ast.SubNodeList[ast.Expr | ast.KWPair](
|
|
2159
|
-
items=[*index.values.items], delim=Tok.COMMA, kid=index.kid
|
|
2160
|
-
)
|
|
2161
|
-
expr = ast.TupleVal(values=sublist, kid=[sublist])
|
|
2162
|
-
kid = [expr]
|
|
2163
|
-
return ast.IndexSlice(
|
|
2164
|
-
slices=[ast.IndexSlice.Slice(start=expr, stop=None, step=None)],
|
|
2165
|
-
is_range=False,
|
|
2166
|
-
kid=kid,
|
|
2167
|
-
)
|
|
2168
|
-
else:
|
|
2073
|
+
if len(self.cur_nodes) == 1:
|
|
2074
|
+
index = self.consume(ast.ListVal)
|
|
2075
|
+
if not index.values:
|
|
2169
2076
|
raise self.ice()
|
|
2077
|
+
if len(index.values.items) == 1:
|
|
2078
|
+
expr = index.values.items[0] if index.values else None
|
|
2079
|
+
kid = self.cur_nodes
|
|
2080
|
+
else:
|
|
2081
|
+
sublist = ast.SubNodeList[ast.Expr | ast.KWPair](
|
|
2082
|
+
items=[*index.values.items], delim=Tok.COMMA, kid=index.kid
|
|
2083
|
+
)
|
|
2084
|
+
expr = ast.TupleVal(values=sublist, kid=[sublist])
|
|
2085
|
+
kid = [expr]
|
|
2086
|
+
return ast.IndexSlice(
|
|
2087
|
+
slices=[ast.IndexSlice.Slice(start=expr, stop=None, step=None)],
|
|
2088
|
+
is_range=False,
|
|
2089
|
+
kid=kid,
|
|
2090
|
+
)
|
|
2170
2091
|
else:
|
|
2092
|
+
self.consume_token(Tok.LSQUARE)
|
|
2171
2093
|
slices: list[ast.IndexSlice.Slice] = []
|
|
2172
|
-
|
|
2173
|
-
|
|
2174
|
-
|
|
2175
|
-
|
|
2176
|
-
|
|
2177
|
-
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
|
-
expr1 = chomp[0]
|
|
2181
|
-
chomp.pop(0)
|
|
2182
|
-
chomp.pop(0)
|
|
2183
|
-
|
|
2184
|
-
if isinstance(chomp[0], ast.Expr):
|
|
2185
|
-
expr2 = chomp[0]
|
|
2186
|
-
chomp.pop(0)
|
|
2187
|
-
|
|
2188
|
-
if isinstance(chomp[0], ast.Token) and chomp[0].name == Tok.COLON:
|
|
2189
|
-
chomp.pop(0)
|
|
2190
|
-
if isinstance(chomp[0], ast.Expr):
|
|
2191
|
-
expr3 = chomp[0]
|
|
2192
|
-
chomp.pop(0)
|
|
2193
|
-
|
|
2194
|
-
if isinstance(chomp[0], ast.Token) and chomp[0].name == Tok.COMMA:
|
|
2195
|
-
chomp.pop(0)
|
|
2196
|
-
|
|
2094
|
+
while not self.match_token(Tok.RSQUARE):
|
|
2095
|
+
expr1 = self.match(ast.Expr)
|
|
2096
|
+
self.consume_token(Tok.COLON)
|
|
2097
|
+
expr2 = self.match(ast.Expr)
|
|
2098
|
+
expr3 = (
|
|
2099
|
+
self.match(ast.Expr) if self.match_token(Tok.COLON) else None
|
|
2100
|
+
)
|
|
2101
|
+
self.match_token(Tok.COMMA)
|
|
2197
2102
|
slices.append(
|
|
2198
2103
|
ast.IndexSlice.Slice(start=expr1, stop=expr2, step=expr3)
|
|
2199
2104
|
)
|
|
2200
|
-
|
|
2201
2105
|
return ast.IndexSlice(
|
|
2202
2106
|
slices=slices,
|
|
2203
2107
|
is_range=True,
|
|
2204
|
-
kid=
|
|
2108
|
+
kid=self.cur_nodes,
|
|
2205
2109
|
)
|
|
2206
2110
|
|
|
2207
2111
|
def atom(self, _: None) -> ast.Expr:
|
|
@@ -2262,42 +2166,34 @@ class JacParser(Pass):
|
|
|
2262
2166
|
"""
|
|
2263
2167
|
return self.consume(ast.AtomExpr)
|
|
2264
2168
|
|
|
2265
|
-
def multistring(self,
|
|
2169
|
+
def multistring(self, _: None) -> ast.AtomExpr:
|
|
2266
2170
|
"""Grammar rule.
|
|
2267
2171
|
|
|
2268
2172
|
multistring: (fstring | STRING)+
|
|
2269
2173
|
"""
|
|
2270
|
-
valid_strs = [
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
|
|
2275
|
-
|
|
2276
|
-
|
|
2277
|
-
raise self.ice()
|
|
2174
|
+
valid_strs = [self.match(ast.String) or self.consume(ast.FString)]
|
|
2175
|
+
while node := (self.match(ast.String) or self.match(ast.FString)):
|
|
2176
|
+
valid_strs.append(node)
|
|
2177
|
+
return ast.MultiString(
|
|
2178
|
+
strings=valid_strs,
|
|
2179
|
+
kid=self.cur_nodes,
|
|
2180
|
+
)
|
|
2278
2181
|
|
|
2279
|
-
def fstring(self,
|
|
2182
|
+
def fstring(self, _: None) -> ast.FString:
|
|
2280
2183
|
"""Grammar rule.
|
|
2281
2184
|
|
|
2282
2185
|
fstring: FSTR_START fstr_parts FSTR_END
|
|
2283
2186
|
| FSTR_SQ_START fstr_sq_parts FSTR_SQ_END
|
|
2284
2187
|
"""
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
2291
|
-
|
|
2292
|
-
parts=kid[1],
|
|
2293
|
-
kid=kid,
|
|
2294
|
-
)
|
|
2295
|
-
else:
|
|
2296
|
-
raise self.ice()
|
|
2188
|
+
self.match_token(Tok.FSTR_START) or self.consume_token(Tok.FSTR_SQ_START)
|
|
2189
|
+
target = self.match(ast.SubNodeList)
|
|
2190
|
+
self.match_token(Tok.FSTR_END) or self.consume_token(Tok.FSTR_SQ_END)
|
|
2191
|
+
return ast.FString(
|
|
2192
|
+
parts=target,
|
|
2193
|
+
kid=self.cur_nodes,
|
|
2194
|
+
)
|
|
2297
2195
|
|
|
2298
|
-
def fstr_parts(
|
|
2299
|
-
self, kid: list[ast.AstNode]
|
|
2300
|
-
) -> ast.SubNodeList[ast.String | ast.ExprStmt]:
|
|
2196
|
+
def fstr_parts(self, _: None) -> ast.SubNodeList[ast.String | ast.ExprStmt]:
|
|
2301
2197
|
"""Grammar rule.
|
|
2302
2198
|
|
|
2303
2199
|
fstr_parts: (FSTR_PIECE | FSTR_BESC | LBRACE expression RBRACE )*
|
|
@@ -2308,7 +2204,7 @@ class JacParser(Pass):
|
|
|
2308
2204
|
if isinstance(i, ast.String)
|
|
2309
2205
|
else ast.ExprStmt(expr=i, in_fstring=True, kid=[i])
|
|
2310
2206
|
)
|
|
2311
|
-
for i in
|
|
2207
|
+
for i in self.cur_nodes
|
|
2312
2208
|
if isinstance(i, ast.Expr)
|
|
2313
2209
|
]
|
|
2314
2210
|
return ast.SubNodeList[ast.String | ast.ExprStmt](
|
|
@@ -2317,9 +2213,7 @@ class JacParser(Pass):
|
|
|
2317
2213
|
kid=valid_parts,
|
|
2318
2214
|
)
|
|
2319
2215
|
|
|
2320
|
-
def fstr_sq_parts(
|
|
2321
|
-
self, kid: list[ast.AstNode]
|
|
2322
|
-
) -> ast.SubNodeList[ast.String | ast.ExprStmt]:
|
|
2216
|
+
def fstr_sq_parts(self, _: None) -> ast.SubNodeList[ast.String | ast.ExprStmt]:
|
|
2323
2217
|
"""Grammar rule.
|
|
2324
2218
|
|
|
2325
2219
|
fstr_sq_parts: (FSTR_SQ_PIECE | FSTR_BESC | LBRACE expression RBRACE )*
|
|
@@ -2330,7 +2224,7 @@ class JacParser(Pass):
|
|
|
2330
2224
|
if isinstance(i, ast.String)
|
|
2331
2225
|
else ast.ExprStmt(expr=i, in_fstring=True, kid=[i])
|
|
2332
2226
|
)
|
|
2333
|
-
for i in
|
|
2227
|
+
for i in self.cur_nodes
|
|
2334
2228
|
if isinstance(i, ast.Expr)
|
|
2335
2229
|
]
|
|
2336
2230
|
return ast.SubNodeList[ast.String | ast.ExprStmt](
|
|
@@ -2357,41 +2251,32 @@ class JacParser(Pass):
|
|
|
2357
2251
|
else:
|
|
2358
2252
|
raise self.ice()
|
|
2359
2253
|
|
|
2360
|
-
def tuple_val(self,
|
|
2254
|
+
def tuple_val(self, _: None) -> ast.TupleVal:
|
|
2361
2255
|
"""Grammar rule.
|
|
2362
2256
|
|
|
2363
2257
|
tuple_val: LPAREN tuple_list? RPAREN
|
|
2364
2258
|
"""
|
|
2365
|
-
|
|
2366
|
-
|
|
2367
|
-
|
|
2368
|
-
|
|
2369
|
-
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
values=kid[1],
|
|
2373
|
-
kid=kid,
|
|
2374
|
-
)
|
|
2375
|
-
else:
|
|
2376
|
-
raise self.ice()
|
|
2259
|
+
self.consume_token(Tok.LPAREN)
|
|
2260
|
+
target = self.match(ast.SubNodeList)
|
|
2261
|
+
self.consume_token(Tok.RPAREN)
|
|
2262
|
+
return ast.TupleVal(
|
|
2263
|
+
values=target,
|
|
2264
|
+
kid=self.cur_nodes,
|
|
2265
|
+
)
|
|
2377
2266
|
|
|
2378
|
-
def set_val(self,
|
|
2267
|
+
def set_val(self, _: None) -> ast.SetVal:
|
|
2379
2268
|
"""Grammar rule.
|
|
2380
2269
|
|
|
2381
2270
|
set_val: LBRACE expr_list COMMA? RBRACE
|
|
2382
2271
|
"""
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
kid=kid,
|
|
2392
|
-
)
|
|
2393
|
-
else:
|
|
2394
|
-
raise self.ice()
|
|
2272
|
+
self.match_token(Tok.LBRACE)
|
|
2273
|
+
expr_list = self.match(ast.SubNodeList)
|
|
2274
|
+
self.match_token(Tok.COMMA)
|
|
2275
|
+
self.match_token(Tok.RBRACE)
|
|
2276
|
+
return ast.SetVal(
|
|
2277
|
+
values=expr_list,
|
|
2278
|
+
kid=self.cur_nodes,
|
|
2279
|
+
)
|
|
2395
2280
|
|
|
2396
2281
|
def expr_list(self, kid: list[ast.AstNode]) -> ast.SubNodeList[ast.Expr]:
|
|
2397
2282
|
"""Grammar rule.
|
|
@@ -2421,16 +2306,12 @@ class JacParser(Pass):
|
|
|
2421
2306
|
|
|
2422
2307
|
kw_expr_list: (kw_expr_list COMMA)? kw_expr
|
|
2423
2308
|
"""
|
|
2424
|
-
consume
|
|
2425
|
-
|
|
2426
|
-
|
|
2427
|
-
if isinstance(kid[0], ast.SubNodeList):
|
|
2428
|
-
consume = kid[0]
|
|
2429
|
-
comma = kid[1]
|
|
2430
|
-
expr = kid[2]
|
|
2309
|
+
if consume := self.match(ast.SubNodeList):
|
|
2310
|
+
comma = self.consume_token(Tok.COMMA)
|
|
2311
|
+
expr = self.consume(ast.KWPair)
|
|
2431
2312
|
new_kid = [*consume.kid, comma, expr]
|
|
2432
2313
|
else:
|
|
2433
|
-
expr =
|
|
2314
|
+
expr = self.consume(ast.KWPair)
|
|
2434
2315
|
new_kid = [expr]
|
|
2435
2316
|
valid_kid = [i for i in new_kid if isinstance(i, ast.KWPair)]
|
|
2436
2317
|
return ast.SubNodeList[ast.KWPair](
|
|
@@ -2475,9 +2356,7 @@ class JacParser(Pass):
|
|
|
2475
2356
|
kid=kid,
|
|
2476
2357
|
)
|
|
2477
2358
|
|
|
2478
|
-
def tuple_list(
|
|
2479
|
-
self, kid: list[ast.AstNode]
|
|
2480
|
-
) -> ast.SubNodeList[ast.Expr | ast.KWPair]:
|
|
2359
|
+
def tuple_list(self, _: None) -> ast.SubNodeList[ast.Expr | ast.KWPair]:
|
|
2481
2360
|
"""Grammar rule.
|
|
2482
2361
|
|
|
2483
2362
|
tuple_list: expression COMMA expr_list COMMA kw_expr_list COMMA?
|
|
@@ -2486,202 +2365,176 @@ class JacParser(Pass):
|
|
|
2486
2365
|
| expression COMMA
|
|
2487
2366
|
| kw_expr_list COMMA?
|
|
2488
2367
|
"""
|
|
2489
|
-
|
|
2490
|
-
|
|
2491
|
-
|
|
2492
|
-
|
|
2493
|
-
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
|
|
2497
|
-
|
|
2498
|
-
|
|
2499
|
-
|
|
2500
|
-
|
|
2501
|
-
|
|
2502
|
-
|
|
2503
|
-
|
|
2504
|
-
|
|
2505
|
-
|
|
2506
|
-
# The chomp will be like this:
|
|
2507
|
-
# [subnode_list, [COMMA, [kw_expr_list, [COMMA]]]]
|
|
2508
|
-
expr_list = []
|
|
2509
|
-
if len(chomp):
|
|
2510
|
-
expr_list = chomp[0].kid # Get the kids subnode list.
|
|
2511
|
-
chomp = chomp[2:] # Get rid of the subnode list and a comma if exists.
|
|
2512
|
-
if len(chomp):
|
|
2513
|
-
# The chomp will be like this: [kw_expr_list, [COMMA]]
|
|
2514
|
-
expr_list = [*expr_list, *chomp[0].kid]
|
|
2515
|
-
expr_list = [first_expr, *expr_list]
|
|
2368
|
+
if first_expr := self.match(ast.SubNodeList):
|
|
2369
|
+
comma = self.match_token(Tok.COMMA)
|
|
2370
|
+
if comma:
|
|
2371
|
+
first_expr.kid.append(comma)
|
|
2372
|
+
return first_expr
|
|
2373
|
+
expr = self.consume(ast.Expr)
|
|
2374
|
+
self.consume_token(Tok.COMMA)
|
|
2375
|
+
second_expr = self.match(ast.SubNodeList)
|
|
2376
|
+
self.match_token(Tok.COMMA)
|
|
2377
|
+
kw_expr_list = self.match(ast.SubNodeList)
|
|
2378
|
+
self.match_token(Tok.COMMA)
|
|
2379
|
+
expr_list: list = []
|
|
2380
|
+
if second_expr:
|
|
2381
|
+
expr_list = second_expr.kid
|
|
2382
|
+
if kw_expr_list:
|
|
2383
|
+
expr_list = [*expr_list, *kw_expr_list.kid]
|
|
2384
|
+
expr_list = [expr, *expr_list]
|
|
2516
2385
|
valid_kid = [i for i in expr_list if isinstance(i, (ast.Expr, ast.KWPair))]
|
|
2517
2386
|
return ast.SubNodeList[ast.Expr | ast.KWPair](
|
|
2518
2387
|
items=valid_kid,
|
|
2519
2388
|
delim=Tok.COMMA,
|
|
2520
|
-
kid=
|
|
2389
|
+
kid=self.cur_nodes,
|
|
2521
2390
|
)
|
|
2522
2391
|
|
|
2523
|
-
def dict_val(self,
|
|
2392
|
+
def dict_val(self, _: None) -> ast.DictVal:
|
|
2524
2393
|
"""Grammar rule.
|
|
2525
2394
|
|
|
2526
2395
|
dict_val: LBRACE ((kv_pair COMMA)* kv_pair COMMA?)? RBRACE
|
|
2527
2396
|
"""
|
|
2528
|
-
|
|
2529
|
-
|
|
2530
|
-
|
|
2397
|
+
self.consume_token(Tok.LBRACE)
|
|
2398
|
+
kv_pairs: list = []
|
|
2399
|
+
while item := self.match(ast.KVPair):
|
|
2400
|
+
kv_pairs.append(item)
|
|
2401
|
+
self.match_token(Tok.COMMA)
|
|
2402
|
+
self.consume_token(Tok.RBRACE)
|
|
2403
|
+
return ast.DictVal(
|
|
2404
|
+
kv_pairs=kv_pairs,
|
|
2405
|
+
kid=self.cur_nodes,
|
|
2531
2406
|
)
|
|
2532
|
-
ret.kv_pairs = [i for i in kid if isinstance(i, ast.KVPair)]
|
|
2533
|
-
return ret
|
|
2534
2407
|
|
|
2535
|
-
def kv_pair(self,
|
|
2408
|
+
def kv_pair(self, _: None) -> ast.KVPair:
|
|
2536
2409
|
"""Grammar rule.
|
|
2537
2410
|
|
|
2538
2411
|
kv_pair: expression COLON expression | STAR_POW expression
|
|
2539
2412
|
"""
|
|
2540
|
-
if (
|
|
2541
|
-
|
|
2542
|
-
and isinstance(kid[0], ast.Expr)
|
|
2543
|
-
and isinstance(kid[2], ast.Expr)
|
|
2544
|
-
):
|
|
2545
|
-
return ast.KVPair(
|
|
2546
|
-
key=kid[0],
|
|
2547
|
-
value=kid[2],
|
|
2548
|
-
kid=kid,
|
|
2549
|
-
)
|
|
2550
|
-
elif len(kid) == 2 and isinstance(kid[1], ast.Expr):
|
|
2413
|
+
if self.match_token(Tok.STAR_POW):
|
|
2414
|
+
value = self.consume(ast.Expr)
|
|
2551
2415
|
return ast.KVPair(
|
|
2552
2416
|
key=None,
|
|
2553
|
-
value=
|
|
2554
|
-
kid=
|
|
2417
|
+
value=value,
|
|
2418
|
+
kid=self.cur_nodes,
|
|
2555
2419
|
)
|
|
2556
|
-
|
|
2557
|
-
|
|
2420
|
+
key = self.consume(ast.Expr)
|
|
2421
|
+
self.consume_token(Tok.COLON)
|
|
2422
|
+
value = self.consume(ast.Expr)
|
|
2423
|
+
return ast.KVPair(
|
|
2424
|
+
key=key,
|
|
2425
|
+
value=value,
|
|
2426
|
+
kid=self.cur_nodes,
|
|
2427
|
+
)
|
|
2558
2428
|
|
|
2559
|
-
def list_compr(self,
|
|
2429
|
+
def list_compr(self, _: None) -> ast.ListCompr:
|
|
2560
2430
|
"""Grammar rule.
|
|
2561
2431
|
|
|
2562
2432
|
list_compr: LSQUARE expression inner_compr+ RSQUARE
|
|
2563
2433
|
"""
|
|
2564
|
-
|
|
2565
|
-
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
|
|
2572
|
-
|
|
2434
|
+
self.consume_token(Tok.LSQUARE)
|
|
2435
|
+
out_expr = self.consume(ast.Expr)
|
|
2436
|
+
comprs = self.consume_many(ast.InnerCompr)
|
|
2437
|
+
self.consume_token(Tok.RSQUARE)
|
|
2438
|
+
return ast.ListCompr(
|
|
2439
|
+
out_expr=out_expr,
|
|
2440
|
+
compr=comprs,
|
|
2441
|
+
kid=self.cur_nodes,
|
|
2442
|
+
)
|
|
2573
2443
|
|
|
2574
|
-
def gen_compr(self,
|
|
2444
|
+
def gen_compr(self, _: None) -> ast.GenCompr:
|
|
2575
2445
|
"""Grammar rule.
|
|
2576
2446
|
|
|
2577
2447
|
gen_compr: LPAREN expression inner_compr+ RPAREN
|
|
2578
2448
|
"""
|
|
2579
|
-
|
|
2580
|
-
|
|
2581
|
-
|
|
2582
|
-
|
|
2583
|
-
|
|
2584
|
-
|
|
2585
|
-
|
|
2586
|
-
|
|
2587
|
-
|
|
2449
|
+
self.consume_token(Tok.LPAREN)
|
|
2450
|
+
out_expr = self.consume(ast.Expr)
|
|
2451
|
+
comprs = self.consume_many(ast.InnerCompr)
|
|
2452
|
+
self.consume_token(Tok.RPAREN)
|
|
2453
|
+
return ast.GenCompr(
|
|
2454
|
+
out_expr=out_expr,
|
|
2455
|
+
compr=comprs,
|
|
2456
|
+
kid=self.cur_nodes,
|
|
2457
|
+
)
|
|
2588
2458
|
|
|
2589
|
-
def set_compr(self,
|
|
2459
|
+
def set_compr(self, _: None) -> ast.SetCompr:
|
|
2590
2460
|
"""Grammar rule.
|
|
2591
2461
|
|
|
2592
2462
|
set_compr: LBRACE expression inner_compr+ RBRACE
|
|
2593
2463
|
"""
|
|
2594
|
-
|
|
2595
|
-
|
|
2596
|
-
|
|
2597
|
-
|
|
2598
|
-
|
|
2599
|
-
|
|
2600
|
-
|
|
2601
|
-
|
|
2602
|
-
|
|
2464
|
+
self.consume_token(Tok.LBRACE)
|
|
2465
|
+
out_expr = self.consume(ast.Expr)
|
|
2466
|
+
comprs = self.consume_many(ast.InnerCompr)
|
|
2467
|
+
self.consume_token(Tok.RBRACE)
|
|
2468
|
+
return ast.SetCompr(
|
|
2469
|
+
out_expr=out_expr,
|
|
2470
|
+
compr=comprs,
|
|
2471
|
+
kid=self.cur_nodes,
|
|
2472
|
+
)
|
|
2603
2473
|
|
|
2604
|
-
def dict_compr(self,
|
|
2474
|
+
def dict_compr(self, _: None) -> ast.DictCompr:
|
|
2605
2475
|
"""Grammar rule.
|
|
2606
2476
|
|
|
2607
2477
|
dict_compr: LBRACE kv_pair inner_compr+ RBRACE
|
|
2608
2478
|
"""
|
|
2609
|
-
|
|
2610
|
-
|
|
2611
|
-
|
|
2612
|
-
|
|
2613
|
-
|
|
2614
|
-
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
|
|
2479
|
+
self.consume_token(Tok.LBRACE)
|
|
2480
|
+
kv_pair = self.consume(ast.KVPair)
|
|
2481
|
+
comprs = self.consume_many(ast.InnerCompr)
|
|
2482
|
+
self.consume_token(Tok.RBRACE)
|
|
2483
|
+
return ast.DictCompr(
|
|
2484
|
+
kv_pair=kv_pair,
|
|
2485
|
+
compr=comprs,
|
|
2486
|
+
kid=self.cur_nodes,
|
|
2487
|
+
)
|
|
2618
2488
|
|
|
2619
|
-
def inner_compr(self,
|
|
2489
|
+
def inner_compr(self, _: None) -> ast.InnerCompr:
|
|
2620
2490
|
"""Grammar rule.
|
|
2621
2491
|
|
|
2622
2492
|
inner_compr: KW_ASYNC? KW_FOR atomic_chain KW_IN pipe_call (KW_IF walrus_assign)*
|
|
2623
2493
|
"""
|
|
2624
|
-
|
|
2625
|
-
is_async = bool(
|
|
2626
|
-
|
|
2494
|
+
conditional: list = []
|
|
2495
|
+
is_async = bool(self.match_token(Tok.KW_ASYNC))
|
|
2496
|
+
self.consume_token(Tok.KW_FOR)
|
|
2497
|
+
target = self.consume(ast.Expr)
|
|
2498
|
+
self.consume_token(Tok.KW_IN)
|
|
2499
|
+
collection = self.consume(ast.Expr)
|
|
2500
|
+
while self.match_token(Tok.KW_IF):
|
|
2501
|
+
conditional.append(self.consume(ast.Expr))
|
|
2502
|
+
return ast.InnerCompr(
|
|
2503
|
+
is_async=is_async,
|
|
2504
|
+
target=target,
|
|
2505
|
+
collection=collection,
|
|
2506
|
+
conditional=conditional,
|
|
2507
|
+
kid=self.cur_nodes,
|
|
2627
2508
|
)
|
|
2628
|
-
chomp = chomp[1:] if is_async else chomp
|
|
2629
|
-
chomp = chomp[1:]
|
|
2630
|
-
if isinstance(chomp[0], ast.Expr) and isinstance(chomp[2], ast.Expr):
|
|
2631
|
-
return ast.InnerCompr(
|
|
2632
|
-
is_async=is_async,
|
|
2633
|
-
target=chomp[0],
|
|
2634
|
-
collection=chomp[2],
|
|
2635
|
-
conditional=(
|
|
2636
|
-
[i for i in chomp[4:] if isinstance(i, ast.Expr)]
|
|
2637
|
-
if len(chomp) > 4 and isinstance(chomp[4], ast.Expr)
|
|
2638
|
-
else None
|
|
2639
|
-
),
|
|
2640
|
-
kid=chomp,
|
|
2641
|
-
)
|
|
2642
|
-
else:
|
|
2643
|
-
raise self.ice()
|
|
2644
2509
|
|
|
2645
|
-
def param_list(
|
|
2646
|
-
self, kid: list[ast.AstNode]
|
|
2647
|
-
) -> ast.SubNodeList[ast.Expr | ast.KWPair]:
|
|
2510
|
+
def param_list(self, _: None) -> ast.SubNodeList[ast.Expr | ast.KWPair]:
|
|
2648
2511
|
"""Grammar rule.
|
|
2649
2512
|
|
|
2650
2513
|
param_list: expr_list COMMA kw_expr_list COMMA?
|
|
2651
2514
|
| kw_expr_list COMMA?
|
|
2652
2515
|
| expr_list COMMA?
|
|
2653
2516
|
"""
|
|
2654
|
-
|
|
2655
|
-
|
|
2656
|
-
|
|
2657
|
-
|
|
2658
|
-
|
|
2659
|
-
|
|
2660
|
-
|
|
2661
|
-
if (
|
|
2662
|
-
ends_with_comma
|
|
2663
|
-
): # Append the trailing comma to the subnode list.
|
|
2664
|
-
kid[0].kid.append(kid[1])
|
|
2665
|
-
return kid[0]
|
|
2666
|
-
else:
|
|
2667
|
-
raise self.ice()
|
|
2668
|
-
elif isinstance(kid[0], ast.SubNodeList) and isinstance(
|
|
2669
|
-
kid[2], ast.SubNodeList
|
|
2670
|
-
):
|
|
2517
|
+
kw_expr_list: ast.SubNodeList | None = None
|
|
2518
|
+
expr_list = self.consume(ast.SubNodeList)
|
|
2519
|
+
if len(self.cur_nodes) > 2:
|
|
2520
|
+
self.consume_token(Tok.COMMA)
|
|
2521
|
+
kw_expr_list = self.consume(ast.SubNodeList)
|
|
2522
|
+
ends_comma = self.match_token(Tok.COMMA)
|
|
2523
|
+
if kw_expr_list:
|
|
2671
2524
|
valid_kid = [
|
|
2672
2525
|
i
|
|
2673
|
-
for i in [*
|
|
2526
|
+
for i in [*expr_list.items, *kw_expr_list.items]
|
|
2674
2527
|
if isinstance(i, (ast.Expr, ast.KWPair))
|
|
2675
2528
|
]
|
|
2676
|
-
|
|
2677
|
-
|
|
2678
|
-
|
|
2679
|
-
|
|
2680
|
-
|
|
2681
|
-
|
|
2682
|
-
|
|
2683
|
-
|
|
2684
|
-
|
|
2529
|
+
return ast.SubNodeList[ast.Expr | ast.KWPair](
|
|
2530
|
+
items=valid_kid,
|
|
2531
|
+
delim=Tok.COMMA,
|
|
2532
|
+
kid=self.cur_nodes,
|
|
2533
|
+
)
|
|
2534
|
+
else:
|
|
2535
|
+
if ends_comma:
|
|
2536
|
+
expr_list.kid.append(ends_comma)
|
|
2537
|
+
return expr_list
|
|
2685
2538
|
|
|
2686
2539
|
def assignment_list(
|
|
2687
2540
|
self, kid: list[ast.AstNode]
|
|
@@ -2928,51 +2781,60 @@ class JacParser(Pass):
|
|
|
2928
2781
|
"""
|
|
2929
2782
|
return self.consume(ast.EdgeOpRef)
|
|
2930
2783
|
|
|
2931
|
-
def edge_to(self,
|
|
2784
|
+
def edge_to(self, _: None) -> ast.EdgeOpRef:
|
|
2932
2785
|
"""Grammar rule.
|
|
2933
2786
|
|
|
2934
2787
|
edge_to: ARROW_R_P1 typed_filter_compare_list ARROW_R_P2
|
|
2935
2788
|
| ARROW_R
|
|
2936
2789
|
"""
|
|
2937
|
-
|
|
2938
|
-
|
|
2939
|
-
return ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.OUT, kid=kid)
|
|
2790
|
+
if self.match_token(Tok.ARROW_R):
|
|
2791
|
+
fcond = None
|
|
2940
2792
|
else:
|
|
2941
|
-
|
|
2793
|
+
self.consume_token(Tok.ARROW_R_P1)
|
|
2794
|
+
fcond = self.consume(ast.FilterCompr)
|
|
2795
|
+
self.consume_token(Tok.ARROW_R_P2)
|
|
2796
|
+
return ast.EdgeOpRef(
|
|
2797
|
+
filter_cond=fcond, edge_dir=EdgeDir.OUT, kid=self.cur_nodes
|
|
2798
|
+
)
|
|
2942
2799
|
|
|
2943
|
-
def edge_from(self,
|
|
2800
|
+
def edge_from(self, _: None) -> ast.EdgeOpRef:
|
|
2944
2801
|
"""Grammar rule.
|
|
2945
2802
|
|
|
2946
2803
|
edge_from: ARROW_L_P1 typed_filter_compare_list ARROW_L_P2
|
|
2947
2804
|
| ARROW_L
|
|
2948
2805
|
"""
|
|
2949
|
-
|
|
2950
|
-
|
|
2951
|
-
return ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.IN, kid=kid)
|
|
2806
|
+
if self.match_token(Tok.ARROW_L):
|
|
2807
|
+
fcond = None
|
|
2952
2808
|
else:
|
|
2953
|
-
|
|
2809
|
+
self.consume_token(Tok.ARROW_L_P1)
|
|
2810
|
+
fcond = self.consume(ast.FilterCompr)
|
|
2811
|
+
self.consume_token(Tok.ARROW_L_P2)
|
|
2812
|
+
return ast.EdgeOpRef(
|
|
2813
|
+
filter_cond=fcond, edge_dir=EdgeDir.IN, kid=self.cur_nodes
|
|
2814
|
+
)
|
|
2954
2815
|
|
|
2955
|
-
def edge_any(self,
|
|
2816
|
+
def edge_any(self, _: None) -> ast.EdgeOpRef:
|
|
2956
2817
|
"""Grammar rule.
|
|
2957
2818
|
|
|
2958
2819
|
edge_any: ARROW_L_P1 typed_filter_compare_list ARROW_R_P2
|
|
2959
2820
|
| ARROW_BI
|
|
2960
2821
|
"""
|
|
2961
|
-
|
|
2962
|
-
|
|
2963
|
-
return ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.ANY, kid=kid)
|
|
2822
|
+
if self.match_token(Tok.ARROW_BI):
|
|
2823
|
+
fcond = None
|
|
2964
2824
|
else:
|
|
2965
|
-
|
|
2825
|
+
self.consume_token(Tok.ARROW_L_P1)
|
|
2826
|
+
fcond = self.consume(ast.FilterCompr)
|
|
2827
|
+
self.consume_token(Tok.ARROW_R_P2)
|
|
2828
|
+
return ast.EdgeOpRef(
|
|
2829
|
+
filter_cond=fcond, edge_dir=EdgeDir.ANY, kid=self.cur_nodes
|
|
2830
|
+
)
|
|
2966
2831
|
|
|
2967
|
-
def connect_op(self,
|
|
2832
|
+
def connect_op(self, _: None) -> ast.ConnectOp:
|
|
2968
2833
|
"""Grammar rule.
|
|
2969
2834
|
|
|
2970
2835
|
connect_op: connect_from | connect_to | connect_any
|
|
2971
2836
|
"""
|
|
2972
|
-
|
|
2973
|
-
return kid[0]
|
|
2974
|
-
else:
|
|
2975
|
-
raise self.ice()
|
|
2837
|
+
return self.consume(ast.ConnectOp)
|
|
2976
2838
|
|
|
2977
2839
|
def disconnect_op(self, kid: list[ast.AstNode]) -> ast.DisconnectOp:
|
|
2978
2840
|
"""Grammar rule.
|
|
@@ -2987,118 +2849,136 @@ class JacParser(Pass):
|
|
|
2987
2849
|
else:
|
|
2988
2850
|
raise self.ice()
|
|
2989
2851
|
|
|
2990
|
-
def connect_to(self,
|
|
2852
|
+
def connect_to(self, _: None) -> ast.ConnectOp:
|
|
2991
2853
|
"""Grammar rule.
|
|
2992
2854
|
|
|
2993
2855
|
connect_to: CARROW_R_P1 expression (COLON kw_expr_list)? CARROW_R_P2
|
|
2994
2856
|
| CARROW_R
|
|
2995
2857
|
"""
|
|
2996
|
-
conn_type
|
|
2997
|
-
|
|
2998
|
-
if (
|
|
2999
|
-
|
|
3000
|
-
|
|
3001
|
-
|
|
3002
|
-
|
|
3003
|
-
if conn_assign
|
|
2858
|
+
conn_type: ast.Expr | None = None
|
|
2859
|
+
conn_assign_sub: ast.SubNodeList | None = None
|
|
2860
|
+
if self.match_token(Tok.CARROW_R_P1):
|
|
2861
|
+
conn_type = self.consume(ast.Expr)
|
|
2862
|
+
conn_assign_sub = (
|
|
2863
|
+
self.consume(ast.SubNodeList)
|
|
2864
|
+
if self.match_token(Tok.COLON)
|
|
3004
2865
|
else None
|
|
3005
2866
|
)
|
|
3006
|
-
|
|
3007
|
-
kid[3] = conn_assign
|
|
3008
|
-
return ast.ConnectOp(
|
|
3009
|
-
conn_type=conn_type,
|
|
3010
|
-
conn_assign=conn_assign,
|
|
3011
|
-
edge_dir=EdgeDir.OUT,
|
|
3012
|
-
kid=kid,
|
|
3013
|
-
)
|
|
2867
|
+
self.consume_token(Tok.CARROW_R_P2)
|
|
3014
2868
|
else:
|
|
3015
|
-
|
|
2869
|
+
self.consume_token(Tok.CARROW_R)
|
|
2870
|
+
conn_assign = (
|
|
2871
|
+
ast.AssignCompr(assigns=conn_assign_sub, kid=[conn_assign_sub])
|
|
2872
|
+
if conn_assign_sub
|
|
2873
|
+
else None
|
|
2874
|
+
)
|
|
2875
|
+
if conn_assign:
|
|
2876
|
+
self.cur_nodes[3] = conn_assign
|
|
2877
|
+
return ast.ConnectOp(
|
|
2878
|
+
conn_type=conn_type,
|
|
2879
|
+
conn_assign=conn_assign,
|
|
2880
|
+
edge_dir=EdgeDir.OUT,
|
|
2881
|
+
kid=self.cur_nodes,
|
|
2882
|
+
)
|
|
3016
2883
|
|
|
3017
|
-
def connect_from(self,
|
|
2884
|
+
def connect_from(self, _: None) -> ast.ConnectOp:
|
|
3018
2885
|
"""Grammar rule.
|
|
3019
2886
|
|
|
3020
2887
|
connect_from: CARROW_L_P1 expression (COLON kw_expr_list)? CARROW_L_P2
|
|
3021
2888
|
| CARROW_L
|
|
3022
2889
|
"""
|
|
3023
|
-
conn_type
|
|
3024
|
-
|
|
3025
|
-
if (
|
|
3026
|
-
|
|
3027
|
-
|
|
3028
|
-
|
|
3029
|
-
|
|
3030
|
-
if conn_assign
|
|
2890
|
+
conn_type: ast.Expr | None = None
|
|
2891
|
+
conn_assign_sub: ast.SubNodeList | None = None
|
|
2892
|
+
if self.match_token(Tok.CARROW_L_P1):
|
|
2893
|
+
conn_type = self.consume(ast.Expr)
|
|
2894
|
+
conn_assign_sub = (
|
|
2895
|
+
self.consume(ast.SubNodeList)
|
|
2896
|
+
if self.match_token(Tok.COLON)
|
|
3031
2897
|
else None
|
|
3032
2898
|
)
|
|
3033
|
-
|
|
3034
|
-
kid[3] = conn_assign
|
|
3035
|
-
return ast.ConnectOp(
|
|
3036
|
-
conn_type=conn_type,
|
|
3037
|
-
conn_assign=conn_assign,
|
|
3038
|
-
edge_dir=EdgeDir.IN,
|
|
3039
|
-
kid=kid,
|
|
3040
|
-
)
|
|
2899
|
+
self.consume_token(Tok.CARROW_L_P2)
|
|
3041
2900
|
else:
|
|
3042
|
-
|
|
2901
|
+
self.consume_token(Tok.CARROW_L)
|
|
2902
|
+
conn_assign = (
|
|
2903
|
+
ast.AssignCompr(assigns=conn_assign_sub, kid=[conn_assign_sub])
|
|
2904
|
+
if conn_assign_sub
|
|
2905
|
+
else None
|
|
2906
|
+
)
|
|
2907
|
+
if conn_assign:
|
|
2908
|
+
self.cur_nodes[3] = conn_assign
|
|
2909
|
+
return ast.ConnectOp(
|
|
2910
|
+
conn_type=conn_type,
|
|
2911
|
+
conn_assign=conn_assign,
|
|
2912
|
+
edge_dir=EdgeDir.IN,
|
|
2913
|
+
kid=self.cur_nodes,
|
|
2914
|
+
)
|
|
3043
2915
|
|
|
3044
|
-
def connect_any(self,
|
|
2916
|
+
def connect_any(self, _: None) -> ast.ConnectOp:
|
|
3045
2917
|
"""Grammar rule.
|
|
3046
2918
|
|
|
3047
2919
|
connect_any: CARROW_BI | CARROW_L_P1 expression (COLON kw_expr_list)? CARROW_R_P2
|
|
3048
2920
|
"""
|
|
3049
|
-
conn_type
|
|
3050
|
-
|
|
3051
|
-
if (
|
|
3052
|
-
|
|
3053
|
-
|
|
3054
|
-
|
|
3055
|
-
|
|
3056
|
-
if conn_assign
|
|
2921
|
+
conn_type: ast.Expr | None = None
|
|
2922
|
+
conn_assign_sub: ast.SubNodeList | None = None
|
|
2923
|
+
if self.match_token(Tok.CARROW_L_P1):
|
|
2924
|
+
conn_type = self.consume(ast.Expr)
|
|
2925
|
+
conn_assign_sub = (
|
|
2926
|
+
self.consume(ast.SubNodeList)
|
|
2927
|
+
if self.match_token(Tok.COLON)
|
|
3057
2928
|
else None
|
|
3058
2929
|
)
|
|
3059
|
-
|
|
3060
|
-
kid[3] = conn_assign
|
|
3061
|
-
return ast.ConnectOp(
|
|
3062
|
-
conn_type=conn_type,
|
|
3063
|
-
conn_assign=conn_assign,
|
|
3064
|
-
edge_dir=EdgeDir.ANY,
|
|
3065
|
-
kid=kid,
|
|
3066
|
-
)
|
|
2930
|
+
self.consume_token(Tok.CARROW_R_P2)
|
|
3067
2931
|
else:
|
|
3068
|
-
|
|
2932
|
+
self.consume_token(Tok.CARROW_BI)
|
|
2933
|
+
conn_assign = (
|
|
2934
|
+
ast.AssignCompr(assigns=conn_assign_sub, kid=[conn_assign_sub])
|
|
2935
|
+
if conn_assign_sub
|
|
2936
|
+
else None
|
|
2937
|
+
)
|
|
2938
|
+
if conn_assign:
|
|
2939
|
+
self.cur_nodes[3] = conn_assign
|
|
2940
|
+
return ast.ConnectOp(
|
|
2941
|
+
conn_type=conn_type,
|
|
2942
|
+
conn_assign=conn_assign,
|
|
2943
|
+
edge_dir=EdgeDir.ANY,
|
|
2944
|
+
kid=self.cur_nodes,
|
|
2945
|
+
)
|
|
3069
2946
|
|
|
3070
|
-
def filter_compr(self,
|
|
2947
|
+
def filter_compr(self, _: None) -> ast.FilterCompr:
|
|
3071
2948
|
"""Grammar rule.
|
|
3072
2949
|
|
|
3073
2950
|
filter_compr: LPAREN NULL_OK filter_compare_list RPAREN
|
|
3074
2951
|
| LPAREN TYPE_OP NULL_OK typed_filter_compare_list RPAREN
|
|
3075
2952
|
"""
|
|
3076
|
-
|
|
3077
|
-
|
|
3078
|
-
|
|
3079
|
-
|
|
3080
|
-
|
|
3081
|
-
|
|
3082
|
-
|
|
3083
|
-
|
|
2953
|
+
kid = self.cur_nodes
|
|
2954
|
+
self.consume_token(Tok.LPAREN)
|
|
2955
|
+
if self.match_token(Tok.TYPE_OP):
|
|
2956
|
+
self.consume_token(Tok.NULL_OK)
|
|
2957
|
+
f_type = self.consume(ast.FilterCompr)
|
|
2958
|
+
f_type.add_kids_left(kid[:3])
|
|
2959
|
+
f_type.add_kids_right(kid[4:])
|
|
2960
|
+
self.consume_token(Tok.RPAREN)
|
|
2961
|
+
return f_type
|
|
2962
|
+
self.consume_token(Tok.NULL_OK)
|
|
2963
|
+
compares = self.consume(ast.SubNodeList)
|
|
2964
|
+
self.consume_token(Tok.RPAREN)
|
|
2965
|
+
return ast.FilterCompr(
|
|
2966
|
+
compares=compares,
|
|
2967
|
+
f_type=None,
|
|
2968
|
+
kid=self.cur_nodes,
|
|
2969
|
+
)
|
|
3084
2970
|
|
|
3085
|
-
def filter_compare_list(
|
|
3086
|
-
self, kid: list[ast.AstNode]
|
|
3087
|
-
) -> ast.SubNodeList[ast.CompareExpr]:
|
|
2971
|
+
def filter_compare_list(self, _: None) -> ast.SubNodeList[ast.CompareExpr]:
|
|
3088
2972
|
"""Grammar rule.
|
|
3089
2973
|
|
|
3090
2974
|
filter_compare_list: (filter_compare_list COMMA)? filter_compare_item
|
|
3091
2975
|
"""
|
|
3092
|
-
consume
|
|
3093
|
-
|
|
3094
|
-
|
|
3095
|
-
if isinstance(kid[0], ast.SubNodeList):
|
|
3096
|
-
consume = kid[0]
|
|
3097
|
-
comma = kid[1]
|
|
3098
|
-
expr = kid[2]
|
|
2976
|
+
if consume := self.match(ast.SubNodeList):
|
|
2977
|
+
comma = self.consume_token(Tok.COMMA)
|
|
2978
|
+
expr = self.consume(ast.CompareExpr)
|
|
3099
2979
|
new_kid = [*consume.kid, comma, expr]
|
|
3100
2980
|
else:
|
|
3101
|
-
expr =
|
|
2981
|
+
expr = self.consume(ast.CompareExpr)
|
|
3102
2982
|
new_kid = [expr]
|
|
3103
2983
|
valid_kid = [i for i in new_kid if isinstance(i, ast.CompareExpr)]
|
|
3104
2984
|
return ast.SubNodeList[ast.CompareExpr](
|
|
@@ -3129,16 +3009,17 @@ class JacParser(Pass):
|
|
|
3129
3009
|
else:
|
|
3130
3010
|
raise self.ice()
|
|
3131
3011
|
|
|
3132
|
-
def filter_compare_item(self,
|
|
3012
|
+
def filter_compare_item(self, _: None) -> ast.CompareExpr:
|
|
3133
3013
|
"""Grammar rule.
|
|
3134
3014
|
|
|
3135
3015
|
filter_compare_item: name_ref cmp_op expression
|
|
3136
3016
|
"""
|
|
3137
|
-
|
|
3138
|
-
|
|
3139
|
-
|
|
3140
|
-
|
|
3141
|
-
|
|
3017
|
+
name_ref = self.consume(ast.Name)
|
|
3018
|
+
cmp_op = self.consume(ast.Token)
|
|
3019
|
+
expr = self.consume(ast.Expr)
|
|
3020
|
+
return ast.CompareExpr(
|
|
3021
|
+
left=name_ref, ops=[cmp_op], rights=[expr], kid=self.cur_nodes
|
|
3022
|
+
)
|
|
3142
3023
|
|
|
3143
3024
|
def assign_compr(self, _: None) -> ast.AssignCompr:
|
|
3144
3025
|
"""Grammar rule.
|
|
@@ -3339,153 +3220,105 @@ class JacParser(Pass):
|
|
|
3339
3220
|
value = self.consume(ast.MatchPattern)
|
|
3340
3221
|
return ast.MatchKVPair(key=pattern, value=value, kid=self.cur_nodes)
|
|
3341
3222
|
|
|
3342
|
-
def class_pattern(self,
|
|
3223
|
+
def class_pattern(self, _: None) -> ast.MatchArch:
|
|
3343
3224
|
"""Grammar rule.
|
|
3344
3225
|
|
|
3345
3226
|
class_pattern: NAME (DOT NAME)* LPAREN kw_pattern_list? RPAREN
|
|
3346
3227
|
| NAME (DOT NAME)* LPAREN pattern_list (COMMA kw_pattern_list)? RPAREN
|
|
3347
3228
|
"""
|
|
3348
|
-
|
|
3349
|
-
|
|
3350
|
-
|
|
3351
|
-
|
|
3352
|
-
|
|
3353
|
-
|
|
3354
|
-
|
|
3355
|
-
|
|
3356
|
-
|
|
3357
|
-
|
|
3358
|
-
|
|
3359
|
-
|
|
3360
|
-
|
|
3361
|
-
|
|
3362
|
-
|
|
3363
|
-
|
|
3364
|
-
|
|
3365
|
-
|
|
3366
|
-
|
|
3367
|
-
|
|
3368
|
-
|
|
3369
|
-
|
|
3370
|
-
else
|
|
3371
|
-
|
|
3372
|
-
|
|
3373
|
-
lparen = chomp[0]
|
|
3374
|
-
rapren = chomp[-1]
|
|
3375
|
-
first = chomp[1]
|
|
3376
|
-
if len(chomp) > 4:
|
|
3377
|
-
second = chomp[3]
|
|
3378
|
-
comma = chomp[2]
|
|
3379
|
-
else:
|
|
3380
|
-
second = None
|
|
3381
|
-
comma = None
|
|
3229
|
+
cur_element = self.consume(ast.NameAtom)
|
|
3230
|
+
trailer: ast.AtomTrailer | None = None
|
|
3231
|
+
while dot := self.match_token(Tok.DOT):
|
|
3232
|
+
target = trailer if trailer else cur_element
|
|
3233
|
+
right = self.consume(ast.Expr)
|
|
3234
|
+
trailer = ast.AtomTrailer(
|
|
3235
|
+
target=target,
|
|
3236
|
+
right=right,
|
|
3237
|
+
is_attr=True,
|
|
3238
|
+
is_null_ok=False,
|
|
3239
|
+
kid=[target, dot, right],
|
|
3240
|
+
)
|
|
3241
|
+
name = trailer if trailer else cur_element
|
|
3242
|
+
if not isinstance(name, (ast.NameAtom, ast.AtomTrailer)):
|
|
3243
|
+
raise TypeError(
|
|
3244
|
+
f"Expected name to be either NameAtom or AtomTrailer, got {type(name)}"
|
|
3245
|
+
)
|
|
3246
|
+
lparen = self.consume_token(Tok.LPAREN)
|
|
3247
|
+
first = self.match(ast.SubNodeList)
|
|
3248
|
+
second = (
|
|
3249
|
+
self.consume(ast.SubNodeList)
|
|
3250
|
+
if (comma := self.match_token(Tok.COMMA))
|
|
3251
|
+
else None
|
|
3252
|
+
)
|
|
3253
|
+
rparen = self.consume_token(Tok.RPAREN)
|
|
3382
3254
|
arg = (
|
|
3383
3255
|
first
|
|
3384
|
-
if isinstance(first, ast.
|
|
3385
|
-
and isinstance(first.items[0], ast.MatchPattern)
|
|
3256
|
+
if (first and isinstance(first.items[0], ast.MatchPattern))
|
|
3386
3257
|
else None
|
|
3387
3258
|
)
|
|
3388
3259
|
kw = (
|
|
3389
3260
|
second
|
|
3390
|
-
if isinstance(second, ast.
|
|
3391
|
-
and isinstance(second.items[0], ast.MatchKVPair)
|
|
3261
|
+
if (second and isinstance(second.items[0], ast.MatchKVPair))
|
|
3392
3262
|
else (
|
|
3393
3263
|
first
|
|
3394
|
-
if isinstance(first, ast.
|
|
3395
|
-
and isinstance(first.items[0], ast.MatchKVPair)
|
|
3264
|
+
if (first and isinstance(first.items[0], ast.MatchKVPair))
|
|
3396
3265
|
else None
|
|
3397
3266
|
)
|
|
3398
3267
|
)
|
|
3399
|
-
|
|
3400
|
-
|
|
3401
|
-
|
|
3402
|
-
|
|
3403
|
-
if kw
|
|
3404
|
-
|
|
3405
|
-
|
|
3406
|
-
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
|
|
3410
|
-
|
|
3411
|
-
|
|
3412
|
-
|
|
3413
|
-
kid=kid_nodes,
|
|
3414
|
-
)
|
|
3415
|
-
else:
|
|
3416
|
-
raise self.ice()
|
|
3268
|
+
kid_nodes: list = [name, lparen]
|
|
3269
|
+
if arg:
|
|
3270
|
+
kid_nodes.append(arg)
|
|
3271
|
+
if kw:
|
|
3272
|
+
kid_nodes.extend([comma, kw]) if comma else kid_nodes.append(kw)
|
|
3273
|
+
elif kw:
|
|
3274
|
+
kid_nodes.append(kw)
|
|
3275
|
+
kid_nodes.append(rparen)
|
|
3276
|
+
return ast.MatchArch(
|
|
3277
|
+
name=name,
|
|
3278
|
+
arg_patterns=arg,
|
|
3279
|
+
kw_patterns=kw,
|
|
3280
|
+
kid=kid_nodes,
|
|
3281
|
+
)
|
|
3417
3282
|
|
|
3418
|
-
def pattern_list(
|
|
3419
|
-
self, kid: list[ast.AstNode]
|
|
3420
|
-
) -> ast.SubNodeList[ast.MatchPattern]:
|
|
3283
|
+
def pattern_list(self, _: None) -> ast.SubNodeList[ast.MatchPattern]:
|
|
3421
3284
|
"""Grammar rule.
|
|
3422
3285
|
|
|
3423
3286
|
pattern_list: (pattern_list COMMA)? pattern_seq
|
|
3424
3287
|
"""
|
|
3425
|
-
consume
|
|
3426
|
-
|
|
3427
|
-
|
|
3428
|
-
if isinstance(kid[0], ast.SubNodeList):
|
|
3429
|
-
consume = kid[0]
|
|
3430
|
-
comma = kid[1]
|
|
3431
|
-
pattern = kid[2]
|
|
3288
|
+
if consume := self.match(ast.SubNodeList):
|
|
3289
|
+
comma = self.consume_token(Tok.COMMA)
|
|
3290
|
+
pattern = self.consume(ast.MatchPattern)
|
|
3432
3291
|
else:
|
|
3433
|
-
pattern =
|
|
3292
|
+
pattern = self.consume(ast.MatchPattern)
|
|
3434
3293
|
new_kid = [*consume.kid, comma, pattern] if consume else [pattern]
|
|
3435
3294
|
valid_kid = [i for i in new_kid if isinstance(i, ast.MatchPattern)]
|
|
3436
3295
|
return ast.SubNodeList[ast.MatchPattern](
|
|
3437
3296
|
items=valid_kid,
|
|
3438
3297
|
delim=Tok.COMMA,
|
|
3439
|
-
kid=
|
|
3298
|
+
kid=new_kid,
|
|
3440
3299
|
)
|
|
3441
3300
|
|
|
3442
|
-
def kw_pattern_list(
|
|
3443
|
-
self, kid: list[ast.AstNode]
|
|
3444
|
-
) -> ast.SubNodeList[ast.MatchKVPair]:
|
|
3301
|
+
def kw_pattern_list(self, _: None) -> ast.SubNodeList[ast.MatchKVPair]:
|
|
3445
3302
|
"""Grammar rule.
|
|
3446
3303
|
|
|
3447
3304
|
kw_pattern_list: (kw_pattern_list COMMA)? named_ref EQ pattern_seq
|
|
3448
3305
|
"""
|
|
3449
|
-
|
|
3450
|
-
|
|
3451
|
-
|
|
3452
|
-
|
|
3453
|
-
|
|
3454
|
-
|
|
3455
|
-
|
|
3456
|
-
|
|
3457
|
-
name = kid[
|
|
3458
|
-
|
|
3459
|
-
|
|
3460
|
-
|
|
3461
|
-
|
|
3462
|
-
|
|
3463
|
-
|
|
3464
|
-
|
|
3465
|
-
*consume.kid,
|
|
3466
|
-
comma,
|
|
3467
|
-
ast.MatchKVPair(key=name, value=value, kid=[name, eq, value]),
|
|
3468
|
-
]
|
|
3469
|
-
else:
|
|
3470
|
-
name = kid[0]
|
|
3471
|
-
eq = kid[1]
|
|
3472
|
-
value = kid[2]
|
|
3473
|
-
if not isinstance(name, ast.NameAtom) or not isinstance(
|
|
3474
|
-
value, ast.MatchPattern
|
|
3475
|
-
):
|
|
3476
|
-
raise self.ice()
|
|
3477
|
-
new_kid = [
|
|
3478
|
-
ast.MatchKVPair(key=name, value=value, kid=[name, eq, value])
|
|
3479
|
-
]
|
|
3480
|
-
if isinstance(name, ast.NameAtom) and isinstance(value, ast.MatchPattern):
|
|
3481
|
-
valid_kid = [i for i in new_kid if isinstance(i, ast.MatchKVPair)]
|
|
3482
|
-
return ast.SubNodeList[ast.MatchKVPair](
|
|
3483
|
-
items=valid_kid,
|
|
3484
|
-
delim=Tok.COMMA,
|
|
3485
|
-
kid=new_kid,
|
|
3486
|
-
)
|
|
3487
|
-
else:
|
|
3488
|
-
raise self.ice()
|
|
3306
|
+
new_kid: list = []
|
|
3307
|
+
if consume := self.match(ast.SubNodeList):
|
|
3308
|
+
comma = self.consume_token(Tok.COMMA)
|
|
3309
|
+
new_kid.extend([*consume.kid, comma])
|
|
3310
|
+
name = self.consume(ast.NameAtom)
|
|
3311
|
+
eq = self.consume_token(Tok.EQ)
|
|
3312
|
+
value = self.consume(ast.MatchPattern)
|
|
3313
|
+
new_kid.extend(
|
|
3314
|
+
[ast.MatchKVPair(key=name, value=value, kid=[name, eq, value])]
|
|
3315
|
+
)
|
|
3316
|
+
valid_kid = [i for i in new_kid if isinstance(i, ast.MatchKVPair)]
|
|
3317
|
+
return ast.SubNodeList[ast.MatchKVPair](
|
|
3318
|
+
items=valid_kid,
|
|
3319
|
+
delim=Tok.COMMA,
|
|
3320
|
+
kid=new_kid,
|
|
3321
|
+
)
|
|
3489
3322
|
|
|
3490
3323
|
def __default_token__(self, token: jl.Token) -> ast.Token:
|
|
3491
3324
|
"""Token handler."""
|