jaclang 0.7.30__py3-none-any.whl → 0.7.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jaclang might be problematic. Click here for more details.
- jaclang/__init__.py +419 -3
- jaclang/compiler/absyntree.py +3 -3
- jaclang/compiler/constant.py +4 -4
- jaclang/compiler/jac.lark +226 -175
- jaclang/compiler/parser.py +1336 -1819
- jaclang/compiler/passes/main/fuse_typeinfo_pass.py +2 -2
- jaclang/compiler/passes/main/import_pass.py +2 -1
- jaclang/compiler/passes/main/pyast_gen_pass.py +565 -723
- jaclang/compiler/passes/main/tests/test_type_check_pass.py +6 -3
- jaclang/compiler/tests/test_parser.py +13 -5
- jaclang/plugin/builtin.py +11 -0
- jaclang/plugin/default.py +51 -8
- jaclang/plugin/feature.py +14 -0
- jaclang/plugin/spec.py +16 -0
- jaclang/plugin/tests/fixtures/graph_purger.jac +2 -0
- jaclang/plugin/tests/fixtures/other_root_access.jac +1 -0
- jaclang/plugin/tests/fixtures/savable_object.jac +2 -0
- jaclang/plugin/tests/test_jaseci.py +1 -1
- jaclang/runtimelib/architype.py +9 -19
- jaclang/runtimelib/context.py +25 -9
- jaclang/settings.py +2 -0
- jaclang/tests/fixtures/create_dynamic_architype.jac +1 -1
- jaclang/tests/fixtures/nested_impls.jac +55 -0
- jaclang/tests/test_language.py +27 -13
- jaclang/tests/test_reference.py +2 -2
- jaclang/utils/helpers.py +4 -3
- {jaclang-0.7.30.dist-info → jaclang-0.7.31.dist-info}/METADATA +1 -1
- {jaclang-0.7.30.dist-info → jaclang-0.7.31.dist-info}/RECORD +30 -29
- {jaclang-0.7.30.dist-info → jaclang-0.7.31.dist-info}/WHEEL +1 -1
- {jaclang-0.7.30.dist-info → jaclang-0.7.31.dist-info}/entry_points.txt +0 -0
jaclang/compiler/parser.py
CHANGED
|
@@ -5,8 +5,7 @@ from __future__ import annotations
|
|
|
5
5
|
import keyword
|
|
6
6
|
import logging
|
|
7
7
|
import os
|
|
8
|
-
from typing import Callable, TypeAlias
|
|
9
|
-
|
|
8
|
+
from typing import Callable, TypeAlias, TypeVar
|
|
10
9
|
|
|
11
10
|
import jaclang.compiler.absyntree as ast
|
|
12
11
|
from jaclang.compiler import jac_lark as jl # type: ignore
|
|
@@ -15,6 +14,9 @@ from jaclang.compiler.passes.ir_pass import Pass
|
|
|
15
14
|
from jaclang.vendor.lark import Lark, Transformer, Tree, logger
|
|
16
15
|
|
|
17
16
|
|
|
17
|
+
T = TypeVar("T", bound=ast.AstNode)
|
|
18
|
+
|
|
19
|
+
|
|
18
20
|
class JacParser(Pass):
|
|
19
21
|
"""Jac Parser."""
|
|
20
22
|
|
|
@@ -76,12 +78,12 @@ class JacParser(Pass):
|
|
|
76
78
|
orig_src=mod.loc.orig_src,
|
|
77
79
|
name=token.type,
|
|
78
80
|
value=token.value,
|
|
79
|
-
line=token.line
|
|
80
|
-
end_line=token.end_line
|
|
81
|
-
col_start=token.column
|
|
82
|
-
col_end=token.end_column
|
|
83
|
-
pos_start=token.start_pos
|
|
84
|
-
pos_end=token.end_pos
|
|
81
|
+
line=token.line or 0,
|
|
82
|
+
end_line=token.end_line or 0,
|
|
83
|
+
col_start=token.column or 0,
|
|
84
|
+
col_end=token.end_column or 0,
|
|
85
|
+
pos_start=token.start_pos or 0,
|
|
86
|
+
pos_end=token.end_pos or 0,
|
|
85
87
|
kid=[],
|
|
86
88
|
)
|
|
87
89
|
|
|
@@ -130,6 +132,10 @@ class JacParser(Pass):
|
|
|
130
132
|
super().__init__(*args, **kwargs)
|
|
131
133
|
self.parse_ref = parser
|
|
132
134
|
self.terminals: list[ast.Token] = []
|
|
135
|
+
# TODO: Once the kid is removed from the ast, we can get rid of this
|
|
136
|
+
# node_idx and directly pop(0) kid as we process the nodes.
|
|
137
|
+
self.node_idx = 0
|
|
138
|
+
self.cur_nodes: list[ast.AstNode] = []
|
|
133
139
|
|
|
134
140
|
def ice(self) -> Exception:
|
|
135
141
|
"""Raise internal compiler error."""
|
|
@@ -138,32 +144,119 @@ class JacParser(Pass):
|
|
|
138
144
|
f"{self.parse_ref.__class__.__name__} - Internal Compiler Error, Invalid Parse Tree!"
|
|
139
145
|
)
|
|
140
146
|
|
|
141
|
-
def
|
|
142
|
-
"""Update node."""
|
|
147
|
+
def _node_update(self, node: T) -> T:
|
|
143
148
|
self.parse_ref.cur_node = node
|
|
144
149
|
if node not in self.parse_ref.node_list:
|
|
145
150
|
self.parse_ref.node_list.append(node)
|
|
146
151
|
return node
|
|
147
152
|
|
|
148
|
-
def
|
|
153
|
+
def _call_userfunc(
|
|
154
|
+
self, tree: jl.Tree, new_children: None | list[ast.AstNode] = None
|
|
155
|
+
) -> ast.AstNode:
|
|
156
|
+
self.cur_nodes = new_children or tree.children # type: ignore[assignment]
|
|
157
|
+
try:
|
|
158
|
+
return self._node_update(super()._call_userfunc(tree, new_children))
|
|
159
|
+
finally:
|
|
160
|
+
self.cur_nodes = []
|
|
161
|
+
self.node_idx = 0
|
|
162
|
+
|
|
163
|
+
def _call_userfunc_token(self, token: jl.Token) -> ast.AstNode:
|
|
164
|
+
return self._node_update(super()._call_userfunc_token(token))
|
|
165
|
+
|
|
166
|
+
def _binary_expr_unwind(self, kid: list[ast.AstNode]) -> ast.Expr:
|
|
167
|
+
"""Binary expression helper."""
|
|
168
|
+
if len(kid) > 1:
|
|
169
|
+
if (
|
|
170
|
+
isinstance(kid[0], ast.Expr)
|
|
171
|
+
and isinstance(
|
|
172
|
+
kid[1],
|
|
173
|
+
(ast.Token, ast.DisconnectOp, ast.ConnectOp),
|
|
174
|
+
)
|
|
175
|
+
and isinstance(kid[2], ast.Expr)
|
|
176
|
+
):
|
|
177
|
+
return ast.BinaryExpr(
|
|
178
|
+
left=kid[0],
|
|
179
|
+
op=kid[1],
|
|
180
|
+
right=kid[2],
|
|
181
|
+
kid=kid,
|
|
182
|
+
)
|
|
183
|
+
else:
|
|
184
|
+
raise self.ice()
|
|
185
|
+
elif isinstance(kid[0], ast.Expr):
|
|
186
|
+
return kid[0]
|
|
187
|
+
else:
|
|
188
|
+
raise self.ice()
|
|
189
|
+
|
|
190
|
+
# ******************************************************************* #
|
|
191
|
+
# Parser Helper functions. #
|
|
192
|
+
# ******************************************************************* #
|
|
193
|
+
|
|
194
|
+
def match(self, ty: type[T]) -> T | None:
|
|
195
|
+
"""Return a node matching type 'ty' if possible from the current nodes."""
|
|
196
|
+
if (self.node_idx < len(self.cur_nodes)) and isinstance(
|
|
197
|
+
self.cur_nodes[self.node_idx], ty
|
|
198
|
+
):
|
|
199
|
+
self.node_idx += 1
|
|
200
|
+
return self.cur_nodes[self.node_idx - 1] # type: ignore[return-value]
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
def consume(self, ty: type[T]) -> T:
|
|
204
|
+
"""Consume and return the specified type, if it's not exists, will be an internal compiler error."""
|
|
205
|
+
if node := self.match(ty):
|
|
206
|
+
return node
|
|
207
|
+
raise self.ice()
|
|
208
|
+
|
|
209
|
+
def match_token(self, tok: Tok) -> ast.Token | None:
|
|
210
|
+
"""Match a token with the given type and return it."""
|
|
211
|
+
if token := self.match(ast.Token):
|
|
212
|
+
if token.name == tok.name:
|
|
213
|
+
return token
|
|
214
|
+
self.node_idx -= (
|
|
215
|
+
1 # We're already matched but wrong token so undo matching it.
|
|
216
|
+
)
|
|
217
|
+
return None
|
|
218
|
+
|
|
219
|
+
def consume_token(self, tok: Tok) -> ast.Token:
|
|
220
|
+
"""Consume a token with the given type and return it."""
|
|
221
|
+
if token := self.match_token(tok):
|
|
222
|
+
return token
|
|
223
|
+
raise self.ice()
|
|
224
|
+
|
|
225
|
+
def match_many(self, ty: type[T]) -> list[T]:
|
|
226
|
+
"""Match 0 or more of the given type and return the list."""
|
|
227
|
+
nodes: list[ast.AstNode] = []
|
|
228
|
+
while node := self.match(ty):
|
|
229
|
+
nodes.append(node)
|
|
230
|
+
return nodes # type: ignore[return-value]
|
|
231
|
+
|
|
232
|
+
def consume_many(self, ty: type[T]) -> list[T]:
|
|
233
|
+
"""Match 1 or more of the given type and return the list."""
|
|
234
|
+
nodes: list[ast.AstNode] = [self.consume(ty)]
|
|
235
|
+
while node := self.match(ty):
|
|
236
|
+
nodes.append(node)
|
|
237
|
+
return nodes # type: ignore[return-value]
|
|
238
|
+
|
|
239
|
+
# ******************************************************************* #
|
|
240
|
+
# Parsing Rules #
|
|
241
|
+
# ******************************************************************* #
|
|
242
|
+
|
|
243
|
+
def start(self, _: None) -> ast.Module:
|
|
149
244
|
"""Grammar rule.
|
|
150
245
|
|
|
151
246
|
start: module
|
|
152
247
|
"""
|
|
153
|
-
|
|
154
|
-
|
|
248
|
+
module = self.consume(ast.Module)
|
|
249
|
+
module._in_mod_nodes = self.parse_ref.node_list
|
|
250
|
+
return module
|
|
155
251
|
|
|
156
|
-
def module(
|
|
157
|
-
self, kid: list[ast.ElementStmt | ast.String | ast.EmptyToken]
|
|
158
|
-
) -> ast.Module:
|
|
252
|
+
def module(self, _: None) -> ast.Module:
|
|
159
253
|
"""Grammar rule.
|
|
160
254
|
|
|
161
|
-
module: (
|
|
162
|
-
|
|
255
|
+
module: (toplevel_stmt (tl_stmt_with_doc | toplevel_stmt)*)?
|
|
256
|
+
| STRING (tl_stmt_with_doc | toplevel_stmt)*
|
|
163
257
|
"""
|
|
164
|
-
doc =
|
|
165
|
-
body =
|
|
166
|
-
body = [i for i in body if isinstance(i, ast.ElementStmt)]
|
|
258
|
+
doc = self.match(ast.String)
|
|
259
|
+
body = self.match_many(ast.ElementStmt)
|
|
167
260
|
mod = ast.Module(
|
|
168
261
|
name=self.parse_ref.mod_path.split(os.path.sep)[-1].rstrip(".jac"),
|
|
169
262
|
source=self.parse_ref.source,
|
|
@@ -172,28 +265,24 @@ class JacParser(Pass):
|
|
|
172
265
|
is_imported=False,
|
|
173
266
|
terminals=self.terminals,
|
|
174
267
|
kid=(
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
else [ast.EmptyToken(ast.JacSource("", self.parse_ref.mod_path))]
|
|
268
|
+
self.cur_nodes
|
|
269
|
+
or [ast.EmptyToken(ast.JacSource("", self.parse_ref.mod_path))]
|
|
178
270
|
),
|
|
179
271
|
)
|
|
180
|
-
return
|
|
272
|
+
return mod
|
|
181
273
|
|
|
182
|
-
def
|
|
183
|
-
self, kid: list[ast.ElementStmt | ast.String]
|
|
184
|
-
) -> ast.ElementStmt:
|
|
274
|
+
def tl_stmt_with_doc(self, _: None) -> ast.ElementStmt:
|
|
185
275
|
"""Grammar rule.
|
|
186
276
|
|
|
187
|
-
|
|
277
|
+
tl_stmt_with_doc: doc_tag toplevel_stmt
|
|
188
278
|
"""
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
raise self.ice()
|
|
279
|
+
doc = self.consume(ast.String)
|
|
280
|
+
element = self.consume(ast.ElementStmt)
|
|
281
|
+
element.doc = doc
|
|
282
|
+
element.add_kids_left([doc])
|
|
283
|
+
return element
|
|
195
284
|
|
|
196
|
-
def
|
|
285
|
+
def toplevel_stmt(self, _: None) -> ast.ElementStmt:
|
|
197
286
|
"""Grammar rule.
|
|
198
287
|
|
|
199
288
|
element: py_code_block
|
|
@@ -204,270 +293,219 @@ class JacParser(Pass):
|
|
|
204
293
|
| test
|
|
205
294
|
| global_var
|
|
206
295
|
"""
|
|
207
|
-
|
|
208
|
-
return self.nu(kid[0])
|
|
209
|
-
else:
|
|
210
|
-
raise self.ice()
|
|
296
|
+
return self.consume(ast.ElementStmt)
|
|
211
297
|
|
|
212
|
-
def global_var(self,
|
|
298
|
+
def global_var(self, _: None) -> ast.GlobalVars:
|
|
213
299
|
"""Grammar rule.
|
|
214
300
|
|
|
215
301
|
global_var: (KW_LET | KW_GLOBAL) access_tag? assignment_list SEMI
|
|
216
302
|
"""
|
|
217
|
-
is_frozen =
|
|
218
|
-
|
|
219
|
-
assignments =
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
kid=kid,
|
|
227
|
-
)
|
|
228
|
-
)
|
|
229
|
-
else:
|
|
230
|
-
raise self.ice()
|
|
303
|
+
is_frozen = self.consume(ast.Token).name == Tok.KW_LET
|
|
304
|
+
access_tag = self.match(ast.SubTag)
|
|
305
|
+
assignments = self.consume(ast.SubNodeList)
|
|
306
|
+
return ast.GlobalVars(
|
|
307
|
+
access=access_tag,
|
|
308
|
+
assignments=assignments,
|
|
309
|
+
is_frozen=is_frozen,
|
|
310
|
+
kid=self.cur_nodes,
|
|
311
|
+
)
|
|
231
312
|
|
|
232
|
-
def access_tag(self,
|
|
313
|
+
def access_tag(self, _: None) -> ast.SubTag[ast.Token]:
|
|
233
314
|
"""Grammar rule.
|
|
234
315
|
|
|
235
316
|
access_tag: COLON ( KW_PROT | KW_PUB | KW_PRIV )
|
|
236
317
|
"""
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
tag=kid[1],
|
|
241
|
-
kid=kid,
|
|
242
|
-
)
|
|
243
|
-
)
|
|
244
|
-
else:
|
|
245
|
-
raise self.ice()
|
|
318
|
+
self.consume_token(Tok.COLON)
|
|
319
|
+
access = self.consume(ast.Token)
|
|
320
|
+
return ast.SubTag[ast.Token](tag=access, kid=self.cur_nodes)
|
|
246
321
|
|
|
247
|
-
def test(self,
|
|
322
|
+
def test(self, _: None) -> ast.Test:
|
|
248
323
|
"""Grammar rule.
|
|
249
324
|
|
|
250
325
|
test: KW_TEST NAME? code_block
|
|
251
326
|
"""
|
|
252
|
-
name
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
kid=kid,
|
|
262
|
-
)
|
|
263
|
-
)
|
|
264
|
-
else:
|
|
265
|
-
raise self.ice()
|
|
327
|
+
# Q(thakee): Why the name should be KW_TEST if no name present?
|
|
328
|
+
test_tok = self.consume_token(Tok.KW_TEST)
|
|
329
|
+
name = self.match(ast.Name) or test_tok
|
|
330
|
+
codeblock = self.consume(ast.SubNodeList)
|
|
331
|
+
return ast.Test(
|
|
332
|
+
name=name,
|
|
333
|
+
body=codeblock,
|
|
334
|
+
kid=self.cur_nodes,
|
|
335
|
+
)
|
|
266
336
|
|
|
267
|
-
def free_code(self,
|
|
337
|
+
def free_code(self, _: None) -> ast.ModuleCode:
|
|
268
338
|
"""Grammar rule.
|
|
269
339
|
|
|
270
340
|
free_code: KW_WITH KW_ENTRY sub_name? code_block
|
|
271
341
|
"""
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
)
|
|
282
|
-
else:
|
|
283
|
-
raise self.ice()
|
|
284
|
-
|
|
285
|
-
def doc_tag(self, kid: list[ast.AstNode]) -> ast.String:
|
|
286
|
-
"""Grammar rule.
|
|
287
|
-
|
|
288
|
-
doc_tag: ( STRING | DOC_STRING )
|
|
289
|
-
"""
|
|
290
|
-
if isinstance(kid[0], ast.String):
|
|
291
|
-
return self.nu(kid[0])
|
|
292
|
-
else:
|
|
293
|
-
raise self.ice()
|
|
342
|
+
self.consume_token(Tok.KW_WITH)
|
|
343
|
+
self.consume_token(Tok.KW_ENTRY)
|
|
344
|
+
name = self.match(ast.SubTag)
|
|
345
|
+
codeblock = self.consume(ast.SubNodeList)
|
|
346
|
+
return ast.ModuleCode(
|
|
347
|
+
name=name,
|
|
348
|
+
body=codeblock,
|
|
349
|
+
kid=self.cur_nodes,
|
|
350
|
+
)
|
|
294
351
|
|
|
295
|
-
def py_code_block(self,
|
|
352
|
+
def py_code_block(self, _: None) -> ast.PyInlineCode:
|
|
296
353
|
"""Grammar rule.
|
|
297
354
|
|
|
298
355
|
py_code_block: PYNLINE
|
|
299
356
|
"""
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
)
|
|
306
|
-
)
|
|
307
|
-
else:
|
|
308
|
-
raise self.ice()
|
|
357
|
+
pyinline = self.consume_token(Tok.PYNLINE)
|
|
358
|
+
return ast.PyInlineCode(
|
|
359
|
+
code=pyinline,
|
|
360
|
+
kid=self.cur_nodes,
|
|
361
|
+
)
|
|
309
362
|
|
|
310
|
-
def import_stmt(self,
|
|
363
|
+
def import_stmt(self, _: None) -> ast.Import:
|
|
311
364
|
"""Grammar rule.
|
|
312
365
|
|
|
313
366
|
import_stmt: KW_IMPORT sub_name? KW_FROM from_path LBRACE import_items RBRACE
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
367
|
+
| KW_IMPORT sub_name? KW_FROM from_path COMMA import_items SEMI //Deprecated
|
|
368
|
+
| KW_IMPORT sub_name? import_path (COMMA import_path)* SEMI
|
|
369
|
+
| include_stmt
|
|
317
370
|
"""
|
|
318
|
-
if
|
|
319
|
-
return
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
371
|
+
if import_stmt := self.match(ast.Import): # Include Statement.
|
|
372
|
+
return import_stmt
|
|
373
|
+
|
|
374
|
+
# TODO: kid will be removed so let's keep as it is for now.
|
|
375
|
+
kid = self.cur_nodes
|
|
376
|
+
|
|
377
|
+
from_path: ast.ModulePath | None = None
|
|
378
|
+
self.consume_token(Tok.KW_IMPORT)
|
|
379
|
+
lang = self.match(ast.SubTag)
|
|
380
|
+
|
|
381
|
+
if self.match_token(Tok.KW_FROM):
|
|
382
|
+
from_path = self.consume(ast.ModulePath)
|
|
383
|
+
self.consume(ast.Token) # LBRACE or COMMA
|
|
384
|
+
items = self.consume(ast.SubNodeList)
|
|
385
|
+
if self.consume(ast.Token).name == Tok.SEMI: # RBRACE or SEMI
|
|
386
|
+
self.parse_ref.warning(
|
|
387
|
+
"Deprecated syntax, use braces for multiple imports (e.g, import from mymod {a, b, c})",
|
|
388
|
+
)
|
|
326
389
|
else:
|
|
327
|
-
paths = [
|
|
390
|
+
paths = [self.consume(ast.ModulePath)]
|
|
391
|
+
while self.match_token(Tok.COMMA):
|
|
392
|
+
paths.append(self.consume(ast.ModulePath))
|
|
393
|
+
self.consume_token(Tok.SEMI)
|
|
328
394
|
items = ast.SubNodeList[ast.ModulePath](
|
|
329
|
-
items=paths,
|
|
395
|
+
items=paths,
|
|
396
|
+
delim=Tok.COMMA,
|
|
397
|
+
# TODO: kid will be removed so let's keep as it is for now.
|
|
398
|
+
kid=self.cur_nodes[2 if lang else 1 : -1],
|
|
330
399
|
)
|
|
331
400
|
kid = (kid[:2] if lang else kid[:1]) + [items] + kid[-1:]
|
|
332
401
|
|
|
333
402
|
is_absorb = False
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
kid=kid,
|
|
342
|
-
)
|
|
343
|
-
)
|
|
344
|
-
if (
|
|
345
|
-
from_path
|
|
346
|
-
and isinstance(kid[-1], ast.Token)
|
|
347
|
-
and kid[-1].name == Tok.SEMI
|
|
348
|
-
):
|
|
349
|
-
self.parse_ref.warning(
|
|
350
|
-
"Deprecated syntax, use braces for multiple imports (e.g, import from mymod {a, b, c})",
|
|
351
|
-
)
|
|
352
|
-
return ret
|
|
353
|
-
else:
|
|
354
|
-
raise self.ice()
|
|
403
|
+
return ast.Import(
|
|
404
|
+
hint=lang,
|
|
405
|
+
from_loc=from_path,
|
|
406
|
+
items=items,
|
|
407
|
+
is_absorb=is_absorb,
|
|
408
|
+
kid=kid,
|
|
409
|
+
)
|
|
355
410
|
|
|
356
|
-
def from_path(self,
|
|
411
|
+
def from_path(self, _: None) -> ast.ModulePath:
|
|
357
412
|
"""Grammar rule.
|
|
358
413
|
|
|
359
414
|
from_path: (DOT | ELLIPSIS)* import_path
|
|
360
|
-
|
|
415
|
+
| (DOT | ELLIPSIS)+
|
|
361
416
|
"""
|
|
362
417
|
level = 0
|
|
363
|
-
|
|
364
|
-
if
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
return
|
|
375
|
-
else:
|
|
376
|
-
return self.nu(
|
|
377
|
-
ast.ModulePath(
|
|
378
|
-
path=None,
|
|
379
|
-
level=level,
|
|
380
|
-
alias=None,
|
|
381
|
-
kid=kid,
|
|
382
|
-
)
|
|
383
|
-
)
|
|
418
|
+
while True:
|
|
419
|
+
if self.match_token(Tok.DOT):
|
|
420
|
+
level += 1
|
|
421
|
+
elif self.match_token(Tok.ELLIPSIS):
|
|
422
|
+
level += 3
|
|
423
|
+
else:
|
|
424
|
+
break
|
|
425
|
+
if import_path := self.match(ast.ModulePath):
|
|
426
|
+
kids = [i for i in self.cur_nodes if isinstance(i, ast.Token)]
|
|
427
|
+
import_path.level = level
|
|
428
|
+
import_path.add_kids_left(kids)
|
|
429
|
+
return import_path
|
|
384
430
|
|
|
385
|
-
|
|
431
|
+
return ast.ModulePath(
|
|
432
|
+
path=None,
|
|
433
|
+
level=level,
|
|
434
|
+
alias=None,
|
|
435
|
+
kid=self.cur_nodes,
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
def include_stmt(self, _: None) -> ast.Import:
|
|
386
439
|
"""Grammar rule.
|
|
387
440
|
|
|
388
|
-
include_stmt: KW_INCLUDE sub_name import_path SEMI
|
|
441
|
+
include_stmt: KW_INCLUDE sub_name? import_path SEMI
|
|
389
442
|
"""
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
443
|
+
kid = self.cur_nodes # TODO: Will be removed.
|
|
444
|
+
self.consume_token(Tok.KW_INCLUDE)
|
|
445
|
+
lang = self.match(ast.SubTag)
|
|
446
|
+
from_path = self.consume(ast.ModulePath)
|
|
394
447
|
items = ast.SubNodeList[ast.ModulePath](
|
|
395
448
|
items=[from_path], delim=Tok.COMMA, kid=[from_path]
|
|
396
449
|
)
|
|
397
|
-
kid = (
|
|
450
|
+
kid = (
|
|
451
|
+
(kid[:2] if lang else kid[:1]) + [items] + kid[-1:]
|
|
452
|
+
) # TODO: Will be removed.
|
|
398
453
|
is_absorb = True
|
|
399
|
-
return
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
kid=kid,
|
|
406
|
-
)
|
|
454
|
+
return ast.Import(
|
|
455
|
+
hint=lang,
|
|
456
|
+
from_loc=None,
|
|
457
|
+
items=items,
|
|
458
|
+
is_absorb=is_absorb,
|
|
459
|
+
kid=kid,
|
|
407
460
|
)
|
|
408
461
|
|
|
409
|
-
def import_path(self,
|
|
462
|
+
def import_path(self, _: None) -> ast.ModulePath:
|
|
410
463
|
"""Grammar rule.
|
|
411
464
|
|
|
412
465
|
import_path: named_ref (DOT named_ref)* (KW_AS NAME)?
|
|
413
466
|
"""
|
|
414
|
-
valid_path = [
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
467
|
+
valid_path = [self.consume(ast.Name)]
|
|
468
|
+
while self.match_token(Tok.DOT):
|
|
469
|
+
valid_path.append(self.consume(ast.Name))
|
|
470
|
+
alias = self.consume(ast.Name) if self.match_token(Tok.KW_AS) else None
|
|
471
|
+
return ast.ModulePath(
|
|
472
|
+
path=valid_path,
|
|
473
|
+
level=0,
|
|
474
|
+
alias=alias,
|
|
475
|
+
kid=self.cur_nodes,
|
|
422
476
|
)
|
|
423
|
-
if alias is not None:
|
|
424
|
-
valid_path = valid_path[:-1]
|
|
425
477
|
|
|
426
|
-
|
|
427
|
-
ast.ModulePath(
|
|
428
|
-
path=valid_path,
|
|
429
|
-
level=0,
|
|
430
|
-
alias=alias,
|
|
431
|
-
kid=kid,
|
|
432
|
-
)
|
|
433
|
-
)
|
|
434
|
-
|
|
435
|
-
def import_items(
|
|
436
|
-
self, kid: list[ast.AstNode]
|
|
437
|
-
) -> ast.SubNodeList[ast.ModuleItem]:
|
|
478
|
+
def import_items(self, _: None) -> ast.SubNodeList[ast.ModuleItem]:
|
|
438
479
|
"""Grammar rule.
|
|
439
480
|
|
|
440
481
|
import_items: (import_item COMMA)* import_item COMMA?
|
|
441
482
|
"""
|
|
483
|
+
items = [self.consume(ast.ModuleItem)]
|
|
484
|
+
while self.match_token(Tok.COMMA):
|
|
485
|
+
if module_item := self.match(ast.ModuleItem):
|
|
486
|
+
items.append(module_item)
|
|
442
487
|
ret = ast.SubNodeList[ast.ModuleItem](
|
|
443
|
-
items=
|
|
488
|
+
items=items,
|
|
444
489
|
delim=Tok.COMMA,
|
|
445
|
-
kid=
|
|
490
|
+
kid=self.cur_nodes,
|
|
446
491
|
)
|
|
447
|
-
return
|
|
492
|
+
return ret
|
|
448
493
|
|
|
449
|
-
def import_item(self,
|
|
494
|
+
def import_item(self, _: None) -> ast.ModuleItem:
|
|
450
495
|
"""Grammar rule.
|
|
451
496
|
|
|
452
497
|
import_item: named_ref (KW_AS NAME)?
|
|
453
498
|
"""
|
|
454
|
-
name =
|
|
455
|
-
alias =
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
name=name,
|
|
462
|
-
alias=alias,
|
|
463
|
-
kid=kid,
|
|
464
|
-
)
|
|
465
|
-
)
|
|
466
|
-
else:
|
|
467
|
-
raise self.ice()
|
|
499
|
+
name = self.consume(ast.Name)
|
|
500
|
+
alias = self.consume(ast.Name) if self.match_token(Tok.KW_AS) else None
|
|
501
|
+
return ast.ModuleItem(
|
|
502
|
+
name=name,
|
|
503
|
+
alias=alias,
|
|
504
|
+
kid=self.cur_nodes,
|
|
505
|
+
)
|
|
468
506
|
|
|
469
507
|
def architype(
|
|
470
|
-
self,
|
|
508
|
+
self, _: None
|
|
471
509
|
) -> ast.ArchSpec | ast.ArchDef | ast.Enum | ast.EnumDef:
|
|
472
510
|
"""Grammar rule.
|
|
473
511
|
|
|
@@ -475,52 +513,49 @@ class JacParser(Pass):
|
|
|
475
513
|
| architype_def
|
|
476
514
|
| enum
|
|
477
515
|
"""
|
|
478
|
-
|
|
479
|
-
if isinstance(kid[1], ast.ArchSpec):
|
|
480
|
-
kid[1].decorators = kid[0]
|
|
481
|
-
kid[1].add_kids_left([kid[0]])
|
|
482
|
-
return self.nu(kid[1])
|
|
483
|
-
else:
|
|
484
|
-
raise self.ice()
|
|
516
|
+
archspec: ast.ArchSpec | ast.ArchDef | ast.Enum | ast.EnumDef | None = None
|
|
485
517
|
|
|
486
|
-
|
|
487
|
-
|
|
518
|
+
decorators = self.match(ast.SubNodeList)
|
|
519
|
+
if decorators is not None:
|
|
520
|
+
archspec = self.consume(ast.ArchSpec)
|
|
521
|
+
archspec.decorators = decorators
|
|
522
|
+
archspec.add_kids_left([decorators])
|
|
488
523
|
else:
|
|
489
|
-
|
|
524
|
+
archspec = (
|
|
525
|
+
self.match(ast.ArchSpec)
|
|
526
|
+
or self.match(ast.ArchDef)
|
|
527
|
+
or self.match(ast.Enum)
|
|
528
|
+
or self.consume(ast.EnumDef)
|
|
529
|
+
)
|
|
530
|
+
return archspec
|
|
490
531
|
|
|
491
|
-
def architype_decl(self,
|
|
532
|
+
def architype_decl(self, _: None) -> ast.ArchSpec:
|
|
492
533
|
"""Grammar rule.
|
|
493
534
|
|
|
494
535
|
architype_decl: arch_type access_tag? STRING? NAME inherited_archs? (member_block | SEMI)
|
|
495
536
|
"""
|
|
496
|
-
arch_type =
|
|
497
|
-
access =
|
|
498
|
-
semstr = (
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
537
|
+
arch_type = self.consume(ast.Token)
|
|
538
|
+
access = self.match(ast.SubTag)
|
|
539
|
+
semstr = self.match(ast.String)
|
|
540
|
+
name = self.consume(ast.Name)
|
|
541
|
+
sub_list1 = self.match(ast.SubNodeList)
|
|
542
|
+
sub_list2 = self.match(ast.SubNodeList)
|
|
543
|
+
if self.match_token(Tok.SEMI):
|
|
544
|
+
inh, body = sub_list1, None
|
|
545
|
+
else:
|
|
546
|
+
body = (
|
|
547
|
+
sub_list2 or sub_list1
|
|
548
|
+
) # if sub_list2 is None then body is sub_list1
|
|
549
|
+
inh = sub_list2 and sub_list1 # if sub_list2 is None then inh is None.
|
|
550
|
+
return ast.Architype(
|
|
551
|
+
arch_type=arch_type,
|
|
552
|
+
name=name,
|
|
553
|
+
semstr=semstr,
|
|
554
|
+
access=access,
|
|
555
|
+
base_classes=inh,
|
|
556
|
+
body=body,
|
|
557
|
+
kid=self.cur_nodes,
|
|
502
558
|
)
|
|
503
|
-
name = (
|
|
504
|
-
kid[3]
|
|
505
|
-
if (access and semstr)
|
|
506
|
-
else kid[2] if (access or semstr) else kid[1]
|
|
507
|
-
)
|
|
508
|
-
inh = kid[-2] if isinstance(kid[-2], ast.SubNodeList) else None
|
|
509
|
-
body = kid[-1] if isinstance(kid[-1], ast.SubNodeList) else None
|
|
510
|
-
if isinstance(arch_type, ast.Token) and isinstance(name, ast.Name):
|
|
511
|
-
return self.nu(
|
|
512
|
-
ast.Architype(
|
|
513
|
-
arch_type=arch_type,
|
|
514
|
-
name=name,
|
|
515
|
-
semstr=semstr,
|
|
516
|
-
access=access,
|
|
517
|
-
base_classes=inh,
|
|
518
|
-
body=body,
|
|
519
|
-
kid=kid,
|
|
520
|
-
)
|
|
521
|
-
)
|
|
522
|
-
else:
|
|
523
|
-
raise self.ice()
|
|
524
559
|
|
|
525
560
|
def architype_def(self, kid: list[ast.AstNode]) -> ast.ArchDef:
|
|
526
561
|
"""Grammar rule.
|
|
@@ -530,12 +565,10 @@ class JacParser(Pass):
|
|
|
530
565
|
if isinstance(kid[0], ast.ArchRefChain) and isinstance(
|
|
531
566
|
kid[1], ast.SubNodeList
|
|
532
567
|
):
|
|
533
|
-
return
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
kid=kid,
|
|
538
|
-
)
|
|
568
|
+
return ast.ArchDef(
|
|
569
|
+
target=kid[0],
|
|
570
|
+
body=kid[1],
|
|
571
|
+
kid=kid,
|
|
539
572
|
)
|
|
540
573
|
else:
|
|
541
574
|
raise self.ice()
|
|
@@ -549,7 +582,7 @@ class JacParser(Pass):
|
|
|
549
582
|
| KW_NODE
|
|
550
583
|
"""
|
|
551
584
|
if isinstance(kid[0], ast.Token):
|
|
552
|
-
return
|
|
585
|
+
return kid[0]
|
|
553
586
|
else:
|
|
554
587
|
raise self.ice()
|
|
555
588
|
|
|
@@ -560,12 +593,10 @@ class JacParser(Pass):
|
|
|
560
593
|
"""
|
|
561
594
|
valid_decors = [i for i in kid if isinstance(i, ast.Expr)]
|
|
562
595
|
if len(valid_decors) == len(kid) / 2:
|
|
563
|
-
return
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
kid=kid,
|
|
568
|
-
)
|
|
596
|
+
return ast.SubNodeList[ast.Expr](
|
|
597
|
+
items=valid_decors,
|
|
598
|
+
delim=Tok.DECOR_OP,
|
|
599
|
+
kid=kid,
|
|
569
600
|
)
|
|
570
601
|
else:
|
|
571
602
|
raise self.ice()
|
|
@@ -577,12 +608,10 @@ class JacParser(Pass):
|
|
|
577
608
|
| COLON (atomic_chain COMMA)* atomic_chain COLON
|
|
578
609
|
"""
|
|
579
610
|
valid_inh = [i for i in kid if isinstance(i, ast.Expr)]
|
|
580
|
-
return
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
kid=kid,
|
|
585
|
-
)
|
|
611
|
+
return ast.SubNodeList[ast.Expr](
|
|
612
|
+
items=valid_inh,
|
|
613
|
+
delim=Tok.COMMA,
|
|
614
|
+
kid=kid,
|
|
586
615
|
)
|
|
587
616
|
|
|
588
617
|
def sub_name(self, kid: list[ast.AstNode]) -> ast.SubTag[ast.Name]:
|
|
@@ -591,11 +620,9 @@ class JacParser(Pass):
|
|
|
591
620
|
sub_name: COLON NAME
|
|
592
621
|
"""
|
|
593
622
|
if isinstance(kid[1], ast.Name):
|
|
594
|
-
return
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
kid=kid,
|
|
598
|
-
)
|
|
623
|
+
return ast.SubTag[ast.Name](
|
|
624
|
+
tag=kid[1],
|
|
625
|
+
kid=kid,
|
|
599
626
|
)
|
|
600
627
|
else:
|
|
601
628
|
raise self.ice()
|
|
@@ -608,7 +635,7 @@ class JacParser(Pass):
|
|
|
608
635
|
| NAME
|
|
609
636
|
"""
|
|
610
637
|
if isinstance(kid[0], ast.NameAtom):
|
|
611
|
-
return
|
|
638
|
+
return kid[0]
|
|
612
639
|
else:
|
|
613
640
|
raise self.ice()
|
|
614
641
|
|
|
@@ -623,60 +650,47 @@ class JacParser(Pass):
|
|
|
623
650
|
| KW_HERE
|
|
624
651
|
"""
|
|
625
652
|
if isinstance(kid[0], ast.Name):
|
|
626
|
-
return
|
|
653
|
+
return ast.SpecialVarRef(var=kid[0])
|
|
627
654
|
else:
|
|
628
655
|
raise self.ice()
|
|
629
656
|
|
|
630
|
-
def enum(self,
|
|
657
|
+
def enum(self, _: None) -> ast.Enum | ast.EnumDef:
|
|
631
658
|
"""Grammar rule.
|
|
632
659
|
|
|
633
660
|
enum: decorators? enum_decl
|
|
634
661
|
| enum_def
|
|
635
662
|
"""
|
|
636
|
-
if
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
raise self.ice()
|
|
643
|
-
elif isinstance(kid[0], (ast.Enum, ast.EnumDef)):
|
|
644
|
-
return self.nu(kid[0])
|
|
645
|
-
else:
|
|
646
|
-
|
|
647
|
-
raise self.ice()
|
|
663
|
+
if decorator := self.match(ast.SubNodeList):
|
|
664
|
+
enum_decl = self.consume(ast.Enum)
|
|
665
|
+
enum_decl.decorators = decorator
|
|
666
|
+
enum_decl.add_kids_left([decorator])
|
|
667
|
+
return enum_decl
|
|
668
|
+
return self.match(ast.Enum) or self.consume(ast.EnumDef)
|
|
648
669
|
|
|
649
|
-
def enum_decl(self,
|
|
670
|
+
def enum_decl(self, _: None) -> ast.Enum:
|
|
650
671
|
"""Grammar rule.
|
|
651
672
|
|
|
652
673
|
enum_decl: KW_ENUM access_tag? STRING? NAME inherited_archs? (enum_block | SEMI)
|
|
653
674
|
"""
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
675
|
+
self.consume_token(Tok.KW_ENUM)
|
|
676
|
+
access = self.match(ast.SubTag)
|
|
677
|
+
semstr = self.match(ast.String)
|
|
678
|
+
name = self.consume(ast.Name)
|
|
679
|
+
sub_list1 = self.match(ast.SubNodeList)
|
|
680
|
+
sub_list2 = self.match(ast.SubNodeList)
|
|
681
|
+
if self.match_token(Tok.SEMI):
|
|
682
|
+
inh, body = sub_list1, None
|
|
683
|
+
else:
|
|
684
|
+
body = sub_list2 or sub_list1
|
|
685
|
+
inh = sub_list2 and sub_list1
|
|
686
|
+
return ast.Enum(
|
|
687
|
+
semstr=semstr,
|
|
688
|
+
name=name,
|
|
689
|
+
access=access,
|
|
690
|
+
base_classes=inh,
|
|
691
|
+
body=body,
|
|
692
|
+
kid=self.cur_nodes,
|
|
659
693
|
)
|
|
660
|
-
name = (
|
|
661
|
-
kid[3]
|
|
662
|
-
if (access and semstr)
|
|
663
|
-
else kid[2] if (access or semstr) else kid[1]
|
|
664
|
-
)
|
|
665
|
-
inh = kid[-2] if isinstance(kid[-2], ast.SubNodeList) else None
|
|
666
|
-
body = kid[-1] if isinstance(kid[-1], ast.SubNodeList) else None
|
|
667
|
-
if isinstance(name, ast.Name):
|
|
668
|
-
return self.nu(
|
|
669
|
-
ast.Enum(
|
|
670
|
-
semstr=semstr,
|
|
671
|
-
name=name,
|
|
672
|
-
access=access,
|
|
673
|
-
base_classes=inh,
|
|
674
|
-
body=body,
|
|
675
|
-
kid=kid,
|
|
676
|
-
)
|
|
677
|
-
)
|
|
678
|
-
else:
|
|
679
|
-
raise self.ice()
|
|
680
694
|
|
|
681
695
|
def enum_def(self, kid: list[ast.AstNode]) -> ast.EnumDef:
|
|
682
696
|
"""Grammar rule.
|
|
@@ -686,12 +700,10 @@ class JacParser(Pass):
|
|
|
686
700
|
if isinstance(kid[0], ast.ArchRefChain) and isinstance(
|
|
687
701
|
kid[1], ast.SubNodeList
|
|
688
702
|
):
|
|
689
|
-
return
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
kid=kid,
|
|
694
|
-
)
|
|
703
|
+
return ast.EnumDef(
|
|
704
|
+
target=kid[0],
|
|
705
|
+
body=kid[1],
|
|
706
|
+
kid=kid,
|
|
695
707
|
)
|
|
696
708
|
else:
|
|
697
709
|
raise self.ice()
|
|
@@ -705,9 +717,9 @@ class JacParser(Pass):
|
|
|
705
717
|
"""
|
|
706
718
|
ret = ast.SubNodeList[ast.EnumBlockStmt](items=[], delim=Tok.COMMA, kid=kid)
|
|
707
719
|
ret.items = [i for i in kid if isinstance(i, ast.EnumBlockStmt)]
|
|
708
|
-
return
|
|
720
|
+
return ret
|
|
709
721
|
|
|
710
|
-
def enum_stmt(self,
|
|
722
|
+
def enum_stmt(self, _: None) -> ast.EnumBlockStmt:
|
|
711
723
|
"""Grammar rule.
|
|
712
724
|
|
|
713
725
|
enum_stmt: NAME (COLON STRING)? EQ expression
|
|
@@ -717,81 +729,43 @@ class JacParser(Pass):
|
|
|
717
729
|
| abstract_ability
|
|
718
730
|
| ability
|
|
719
731
|
"""
|
|
720
|
-
if
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
type_tag=None,
|
|
742
|
-
kid=kid,
|
|
743
|
-
semstr=semstr,
|
|
744
|
-
is_enum_stmt=True,
|
|
745
|
-
)
|
|
746
|
-
)
|
|
747
|
-
else:
|
|
748
|
-
semstr = (
|
|
749
|
-
kid[2]
|
|
750
|
-
if len(kid) == 3 and isinstance(kid[2], ast.String)
|
|
751
|
-
else None
|
|
752
|
-
)
|
|
753
|
-
targ = ast.SubNodeList[ast.Expr](
|
|
754
|
-
items=[kid[0]], delim=Tok.COMMA, kid=[kid[0]]
|
|
755
|
-
)
|
|
756
|
-
kid[0] = targ
|
|
757
|
-
return self.nu(
|
|
758
|
-
ast.Assignment(
|
|
759
|
-
target=targ,
|
|
760
|
-
value=None,
|
|
761
|
-
type_tag=None,
|
|
762
|
-
kid=kid,
|
|
763
|
-
semstr=semstr,
|
|
764
|
-
is_enum_stmt=True,
|
|
765
|
-
)
|
|
766
|
-
)
|
|
767
|
-
elif isinstance(kid[0], (ast.PyInlineCode, ast.ModuleCode)):
|
|
768
|
-
return self.nu(kid[0])
|
|
769
|
-
raise self.ice()
|
|
770
|
-
|
|
771
|
-
def ability(
|
|
772
|
-
self, kid: list[ast.AstNode]
|
|
773
|
-
) -> ast.Ability | ast.AbilityDef | ast.FuncCall:
|
|
732
|
+
if stmt := (
|
|
733
|
+
self.match(ast.PyInlineCode)
|
|
734
|
+
or self.match(ast.ModuleCode)
|
|
735
|
+
or self.match(ast.Ability)
|
|
736
|
+
):
|
|
737
|
+
return stmt
|
|
738
|
+
name = self.consume(ast.Name)
|
|
739
|
+
semstr = self.consume(ast.String) if self.match_token(Tok.COLON) else None
|
|
740
|
+
expr = self.consume(ast.Expr) if self.match_token(Tok.EQ) else None
|
|
741
|
+
targ = ast.SubNodeList[ast.Expr](items=[name], delim=Tok.COMMA, kid=[name])
|
|
742
|
+
self.cur_nodes[0] = targ
|
|
743
|
+
return ast.Assignment(
|
|
744
|
+
target=targ,
|
|
745
|
+
value=expr,
|
|
746
|
+
type_tag=None,
|
|
747
|
+
kid=self.cur_nodes,
|
|
748
|
+
semstr=semstr,
|
|
749
|
+
is_enum_stmt=True,
|
|
750
|
+
)
|
|
751
|
+
|
|
752
|
+
def ability(self, _: None) -> ast.Ability | ast.AbilityDef | ast.FuncCall:
|
|
774
753
|
"""Grammer rule.
|
|
775
754
|
|
|
776
755
|
ability: decorators? KW_ASYNC? ability_decl
|
|
777
756
|
| decorators? genai_ability
|
|
778
757
|
| ability_def
|
|
779
758
|
"""
|
|
780
|
-
|
|
781
|
-
decorators =
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
if isinstance(chomp[0], ast.Token) and chomp[0].name == Tok.KW_ASYNC
|
|
786
|
-
else None
|
|
787
|
-
)
|
|
788
|
-
ability = chomp[1] if is_async else chomp[0]
|
|
789
|
-
if not isinstance(ability, (ast.Ability, ast.AbilityDef)):
|
|
790
|
-
raise self.ice()
|
|
791
|
-
if is_async and isinstance(ability, ast.Ability):
|
|
759
|
+
ability: ast.Ability | ast.AbilityDef | None = None
|
|
760
|
+
decorators = self.match(ast.SubNodeList)
|
|
761
|
+
is_async = self.match_token(Tok.KW_ASYNC)
|
|
762
|
+
ability = self.match(ast.Ability)
|
|
763
|
+
if is_async and ability:
|
|
792
764
|
ability.is_async = True
|
|
793
765
|
ability.add_kids_left([is_async])
|
|
794
|
-
if
|
|
766
|
+
if ability is None:
|
|
767
|
+
ability = self.consume(ast.AbilityDef)
|
|
768
|
+
if decorators:
|
|
795
769
|
for dec in decorators.items:
|
|
796
770
|
if (
|
|
797
771
|
isinstance(dec, ast.NameAtom)
|
|
@@ -801,55 +775,43 @@ class JacParser(Pass):
|
|
|
801
775
|
ability.is_static = True
|
|
802
776
|
decorators.items.remove(dec) # noqa: B038
|
|
803
777
|
break
|
|
804
|
-
if
|
|
778
|
+
if decorators.items:
|
|
805
779
|
ability.decorators = decorators
|
|
806
780
|
ability.add_kids_left([decorators])
|
|
807
|
-
return
|
|
808
|
-
return
|
|
781
|
+
return ability
|
|
782
|
+
return ability
|
|
809
783
|
|
|
810
|
-
def ability_decl(self,
|
|
784
|
+
def ability_decl(self, _: None) -> ast.Ability:
|
|
811
785
|
"""Grammar rule.
|
|
812
786
|
|
|
813
787
|
ability_decl: KW_OVERRIDE? KW_STATIC? KW_CAN access_tag? STRING?
|
|
814
788
|
named_ref (func_decl | event_clause) (code_block | SEMI)
|
|
815
789
|
"""
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
)
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
790
|
+
signature: ast.FuncSignature | ast.EventSignature | None = None
|
|
791
|
+
body: ast.SubNodeList | None = None
|
|
792
|
+
is_override = self.match_token(Tok.KW_OVERRIDE) is not None
|
|
793
|
+
is_static = self.match_token(Tok.KW_STATIC) is not None
|
|
794
|
+
self.consume_token(Tok.KW_CAN)
|
|
795
|
+
access = self.match(ast.SubTag)
|
|
796
|
+
semstr = self.match(ast.String)
|
|
797
|
+
name = self.consume(ast.NameAtom)
|
|
798
|
+
signature = self.match(ast.FuncSignature) or self.consume(
|
|
799
|
+
ast.EventSignature
|
|
800
|
+
)
|
|
801
|
+
if (body := self.match(ast.SubNodeList)) is None:
|
|
802
|
+
self.consume_token(Tok.SEMI)
|
|
803
|
+
return ast.Ability(
|
|
804
|
+
name_ref=name,
|
|
805
|
+
is_async=False,
|
|
806
|
+
is_override=is_override,
|
|
807
|
+
is_static=is_static,
|
|
808
|
+
is_abstract=False,
|
|
809
|
+
access=access,
|
|
810
|
+
semstr=semstr,
|
|
811
|
+
signature=signature,
|
|
812
|
+
body=body,
|
|
813
|
+
kid=self.cur_nodes,
|
|
823
814
|
)
|
|
824
|
-
chomp = chomp[2:] if is_static else chomp[1:]
|
|
825
|
-
access = chomp[0] if isinstance(chomp[0], ast.SubTag) else None
|
|
826
|
-
chomp = chomp[1:] if access else chomp
|
|
827
|
-
semstr = chomp[0] if isinstance(chomp[0], ast.String) else None
|
|
828
|
-
chomp = chomp[1:] if semstr else chomp
|
|
829
|
-
name = chomp[0]
|
|
830
|
-
chomp = chomp[1:]
|
|
831
|
-
signature = chomp[0]
|
|
832
|
-
chomp = chomp[1:]
|
|
833
|
-
body = chomp[0] if isinstance(chomp[0], ast.SubNodeList) else None
|
|
834
|
-
if isinstance(name, ast.NameAtom) and isinstance(
|
|
835
|
-
signature, (ast.FuncSignature, ast.EventSignature)
|
|
836
|
-
):
|
|
837
|
-
return self.nu(
|
|
838
|
-
ast.Ability(
|
|
839
|
-
name_ref=name,
|
|
840
|
-
is_async=False,
|
|
841
|
-
is_override=is_override,
|
|
842
|
-
is_static=is_static,
|
|
843
|
-
is_abstract=False,
|
|
844
|
-
access=access,
|
|
845
|
-
semstr=semstr,
|
|
846
|
-
signature=signature,
|
|
847
|
-
body=body,
|
|
848
|
-
kid=kid,
|
|
849
|
-
)
|
|
850
|
-
)
|
|
851
|
-
else:
|
|
852
|
-
raise self.ice()
|
|
853
815
|
|
|
854
816
|
def ability_def(self, kid: list[ast.AstNode]) -> ast.AbilityDef:
|
|
855
817
|
"""Grammar rule.
|
|
@@ -861,137 +823,99 @@ class JacParser(Pass):
|
|
|
861
823
|
and isinstance(kid[1], (ast.FuncSignature, ast.EventSignature))
|
|
862
824
|
and isinstance(kid[2], ast.SubNodeList)
|
|
863
825
|
):
|
|
864
|
-
return
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
kid=kid,
|
|
870
|
-
)
|
|
826
|
+
return ast.AbilityDef(
|
|
827
|
+
target=kid[0],
|
|
828
|
+
signature=kid[1],
|
|
829
|
+
body=kid[2],
|
|
830
|
+
kid=kid,
|
|
871
831
|
)
|
|
872
832
|
else:
|
|
873
833
|
raise self.ice()
|
|
874
834
|
|
|
875
835
|
# We need separate production rule for abstract_ability because we don't
|
|
876
836
|
# want to allow regular abilities outside of classed to be abstract.
|
|
877
|
-
def abstract_ability(self,
|
|
837
|
+
def abstract_ability(self, _: None) -> ast.Ability:
|
|
878
838
|
"""Grammar rule.
|
|
879
839
|
|
|
880
840
|
abstract_ability: KW_OVERRIDE? KW_STATIC? KW_CAN access_tag? STRING?
|
|
881
841
|
named_ref (func_decl | event_clause) KW_ABSTRACT SEMI
|
|
882
842
|
"""
|
|
883
|
-
|
|
884
|
-
is_override = (
|
|
885
|
-
|
|
886
|
-
)
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
)
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
is_static=is_static,
|
|
910
|
-
is_abstract=True,
|
|
911
|
-
access=access,
|
|
912
|
-
semstr=semstr,
|
|
913
|
-
signature=signature,
|
|
914
|
-
body=None,
|
|
915
|
-
kid=kid,
|
|
916
|
-
)
|
|
917
|
-
)
|
|
918
|
-
else:
|
|
919
|
-
raise self.ice()
|
|
920
|
-
|
|
921
|
-
def genai_ability(self, kid: list[ast.AstNode]) -> ast.Ability:
|
|
843
|
+
signature: ast.FuncSignature | ast.EventSignature | None = None
|
|
844
|
+
is_override = self.match_token(Tok.KW_OVERRIDE) is not None
|
|
845
|
+
is_static = self.match_token(Tok.KW_STATIC) is not None
|
|
846
|
+
self.consume_token(Tok.KW_CAN)
|
|
847
|
+
access = self.match(ast.SubTag)
|
|
848
|
+
semstr = self.match(ast.String)
|
|
849
|
+
name = self.consume(ast.NameAtom)
|
|
850
|
+
signature = self.match(ast.FuncSignature) or self.consume(
|
|
851
|
+
ast.EventSignature
|
|
852
|
+
)
|
|
853
|
+
self.consume_token(Tok.KW_ABSTRACT)
|
|
854
|
+
self.consume_token(Tok.SEMI)
|
|
855
|
+
return ast.Ability(
|
|
856
|
+
name_ref=name,
|
|
857
|
+
is_async=False,
|
|
858
|
+
is_override=is_override,
|
|
859
|
+
is_static=is_static,
|
|
860
|
+
is_abstract=True,
|
|
861
|
+
access=access,
|
|
862
|
+
semstr=semstr,
|
|
863
|
+
signature=signature,
|
|
864
|
+
body=None,
|
|
865
|
+
kid=self.cur_nodes,
|
|
866
|
+
)
|
|
867
|
+
|
|
868
|
+
def genai_ability(self, _: None) -> ast.Ability:
|
|
922
869
|
"""Grammar rule.
|
|
923
870
|
|
|
924
871
|
genai_ability: KW_OVERRIDE? KW_STATIC? KW_CAN access_tag? STRING?
|
|
925
872
|
named_ref (func_decl) KW_BY atomic_call SEMI
|
|
926
873
|
"""
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
)
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
874
|
+
is_override = self.match_token(Tok.KW_OVERRIDE) is not None
|
|
875
|
+
is_static = self.match_token(Tok.KW_STATIC) is not None
|
|
876
|
+
self.consume_token(Tok.KW_CAN)
|
|
877
|
+
access = self.match(ast.SubTag)
|
|
878
|
+
semstr = self.match(ast.String)
|
|
879
|
+
name = self.consume(ast.NameAtom)
|
|
880
|
+
signature = self.match(ast.FuncSignature) or self.consume(
|
|
881
|
+
ast.EventSignature
|
|
882
|
+
)
|
|
883
|
+
self.consume_token(Tok.KW_BY)
|
|
884
|
+
body = self.consume(ast.FuncCall)
|
|
885
|
+
self.consume_token(Tok.SEMI)
|
|
886
|
+
return ast.Ability(
|
|
887
|
+
name_ref=name,
|
|
888
|
+
is_async=False,
|
|
889
|
+
is_override=is_override,
|
|
890
|
+
is_static=is_static,
|
|
891
|
+
is_abstract=False,
|
|
892
|
+
access=access,
|
|
893
|
+
semstr=semstr,
|
|
894
|
+
signature=signature,
|
|
895
|
+
body=body,
|
|
896
|
+
kid=self.cur_nodes,
|
|
934
897
|
)
|
|
935
|
-
chomp = chomp[1:] if is_static else chomp
|
|
936
|
-
chomp = chomp[1:]
|
|
937
|
-
access = chomp[0] if isinstance(chomp[0], ast.SubTag) else None
|
|
938
|
-
chomp = chomp[1:] if access else chomp
|
|
939
|
-
semstr = chomp[0] if isinstance(chomp[0], ast.String) else None
|
|
940
|
-
chomp = chomp[1:] if semstr else chomp
|
|
941
|
-
name = chomp[0]
|
|
942
|
-
chomp = chomp[1:]
|
|
943
|
-
signature = chomp[0]
|
|
944
|
-
chomp = chomp[1:]
|
|
945
|
-
has_by = isinstance(chomp[0], ast.Token) and chomp[0].name == Tok.KW_BY
|
|
946
|
-
chomp = chomp[1:] if has_by else chomp
|
|
947
|
-
if (
|
|
948
|
-
isinstance(name, ast.NameAtom)
|
|
949
|
-
and isinstance(signature, (ast.FuncSignature, ast.EventSignature))
|
|
950
|
-
and isinstance(chomp[0], ast.FuncCall)
|
|
951
|
-
and has_by
|
|
952
|
-
):
|
|
953
|
-
return self.nu(
|
|
954
|
-
ast.Ability(
|
|
955
|
-
name_ref=name,
|
|
956
|
-
is_async=False,
|
|
957
|
-
is_override=is_override,
|
|
958
|
-
is_static=is_static,
|
|
959
|
-
is_abstract=False,
|
|
960
|
-
access=access,
|
|
961
|
-
semstr=semstr,
|
|
962
|
-
signature=signature,
|
|
963
|
-
body=chomp[0],
|
|
964
|
-
kid=kid,
|
|
965
|
-
)
|
|
966
|
-
)
|
|
967
|
-
else:
|
|
968
|
-
raise self.ice()
|
|
969
898
|
|
|
970
|
-
def event_clause(self,
|
|
899
|
+
def event_clause(self, _: None) -> ast.EventSignature:
|
|
971
900
|
"""Grammar rule.
|
|
972
901
|
|
|
973
902
|
event_clause: KW_WITH expression? (KW_EXIT | KW_ENTRY) (STRING? RETURN_HINT expression)?
|
|
974
903
|
"""
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
904
|
+
return_spec: ast.Expr | None = None
|
|
905
|
+
semstr: ast.String | None = None
|
|
906
|
+
self.consume_token(Tok.KW_WITH)
|
|
907
|
+
type_specs = self.match(ast.Expr)
|
|
908
|
+
event = self.match_token(Tok.KW_EXIT) or self.consume_token(Tok.KW_ENTRY)
|
|
909
|
+
if semstr := self.match(ast.String):
|
|
910
|
+
self.consume_token(Tok.RETURN_HINT)
|
|
911
|
+
return_spec = self.consume(ast.Expr)
|
|
912
|
+
return ast.EventSignature(
|
|
913
|
+
semstr=semstr,
|
|
914
|
+
event=event,
|
|
915
|
+
arch_tag_info=type_specs,
|
|
916
|
+
return_type=return_spec,
|
|
917
|
+
kid=self.cur_nodes,
|
|
979
918
|
)
|
|
980
|
-
event = kid[2] if type_specs else kid[1]
|
|
981
|
-
if isinstance(event, ast.Token) and (
|
|
982
|
-
isinstance(return_spec, ast.Expr) or return_spec is None
|
|
983
|
-
):
|
|
984
|
-
return self.nu(
|
|
985
|
-
ast.EventSignature(
|
|
986
|
-
semstr=semstr,
|
|
987
|
-
event=event,
|
|
988
|
-
arch_tag_info=type_specs,
|
|
989
|
-
return_type=return_spec,
|
|
990
|
-
kid=kid,
|
|
991
|
-
)
|
|
992
|
-
)
|
|
993
|
-
else:
|
|
994
|
-
raise self.ice()
|
|
995
919
|
|
|
996
920
|
def func_decl(self, kid: list[ast.AstNode]) -> ast.FuncSignature:
|
|
997
921
|
"""Grammar rule.
|
|
@@ -1012,21 +936,17 @@ class JacParser(Pass):
|
|
|
1012
936
|
if (isinstance(params, ast.SubNodeList) or params is None) and (
|
|
1013
937
|
isinstance(return_spec, ast.Expr) or return_spec is None
|
|
1014
938
|
):
|
|
1015
|
-
return
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
kid
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
)
|
|
1027
|
-
]
|
|
1028
|
-
),
|
|
1029
|
-
)
|
|
939
|
+
return ast.FuncSignature(
|
|
940
|
+
semstr=semstr,
|
|
941
|
+
params=params,
|
|
942
|
+
return_type=return_spec,
|
|
943
|
+
kid=(
|
|
944
|
+
kid
|
|
945
|
+
if len(kid)
|
|
946
|
+
else [
|
|
947
|
+
ast.EmptyToken(ast.JacSource("", self.parse_ref.mod_path))
|
|
948
|
+
]
|
|
949
|
+
),
|
|
1030
950
|
)
|
|
1031
951
|
else:
|
|
1032
952
|
raise self.ice()
|
|
@@ -1043,7 +963,7 @@ class JacParser(Pass):
|
|
|
1043
963
|
delim=Tok.COMMA,
|
|
1044
964
|
kid=kid,
|
|
1045
965
|
)
|
|
1046
|
-
return
|
|
966
|
+
return ret
|
|
1047
967
|
|
|
1048
968
|
def param_var(self, kid: list[ast.AstNode]) -> ast.ParamVar:
|
|
1049
969
|
"""Grammar rule.
|
|
@@ -1074,15 +994,13 @@ class JacParser(Pass):
|
|
|
1074
994
|
)
|
|
1075
995
|
)
|
|
1076
996
|
if isinstance(name, ast.Name) and isinstance(type_tag, ast.SubTag):
|
|
1077
|
-
return
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
kid=kid,
|
|
1085
|
-
)
|
|
997
|
+
return ast.ParamVar(
|
|
998
|
+
semstr=semstr,
|
|
999
|
+
name=name,
|
|
1000
|
+
type_tag=type_tag,
|
|
1001
|
+
value=value,
|
|
1002
|
+
unpack=star,
|
|
1003
|
+
kid=kid,
|
|
1086
1004
|
)
|
|
1087
1005
|
else:
|
|
1088
1006
|
raise self.ice()
|
|
@@ -1102,7 +1020,7 @@ class JacParser(Pass):
|
|
|
1102
1020
|
ret.items = [i for i in kid if isinstance(i, ast.ArchBlockStmt)]
|
|
1103
1021
|
ret.left_enc = kid[0] if isinstance(kid[0], ast.Token) else None
|
|
1104
1022
|
ret.right_enc = kid[-1] if isinstance(kid[-1], ast.Token) else None
|
|
1105
|
-
return
|
|
1023
|
+
return ret
|
|
1106
1024
|
|
|
1107
1025
|
def member_stmt(self, kid: list[ast.AstNode]) -> ast.ArchBlockStmt:
|
|
1108
1026
|
"""Grammar rule.
|
|
@@ -1114,7 +1032,7 @@ class JacParser(Pass):
|
|
|
1114
1032
|
| doc_tag? has_stmt
|
|
1115
1033
|
"""
|
|
1116
1034
|
if isinstance(kid[0], ast.ArchBlockStmt):
|
|
1117
|
-
ret =
|
|
1035
|
+
ret = kid[0]
|
|
1118
1036
|
elif (
|
|
1119
1037
|
isinstance(kid[1], ast.ArchBlockStmt)
|
|
1120
1038
|
and isinstance(kid[1], ast.AstDocNode)
|
|
@@ -1122,7 +1040,7 @@ class JacParser(Pass):
|
|
|
1122
1040
|
):
|
|
1123
1041
|
kid[1].doc = kid[0]
|
|
1124
1042
|
kid[1].add_kids_left([kid[0]])
|
|
1125
|
-
ret =
|
|
1043
|
+
ret = kid[1]
|
|
1126
1044
|
else:
|
|
1127
1045
|
raise self.ice()
|
|
1128
1046
|
if isinstance(ret, ast.Ability):
|
|
@@ -1145,14 +1063,12 @@ class JacParser(Pass):
|
|
|
1145
1063
|
chomp = chomp[1:] if access else chomp
|
|
1146
1064
|
assign = chomp[0]
|
|
1147
1065
|
if isinstance(assign, ast.SubNodeList):
|
|
1148
|
-
return
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
kid=kid,
|
|
1155
|
-
)
|
|
1066
|
+
return ast.ArchHas(
|
|
1067
|
+
vars=assign,
|
|
1068
|
+
is_static=is_static,
|
|
1069
|
+
is_frozen=is_freeze,
|
|
1070
|
+
access=access,
|
|
1071
|
+
kid=kid,
|
|
1156
1072
|
)
|
|
1157
1073
|
else:
|
|
1158
1074
|
raise self.ice()
|
|
@@ -1176,12 +1092,10 @@ class JacParser(Pass):
|
|
|
1176
1092
|
assign = kid[0]
|
|
1177
1093
|
new_kid = [assign]
|
|
1178
1094
|
valid_kid = [i for i in new_kid if isinstance(i, ast.HasVar)]
|
|
1179
|
-
return
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
kid=new_kid,
|
|
1184
|
-
)
|
|
1095
|
+
return ast.SubNodeList[ast.HasVar](
|
|
1096
|
+
items=valid_kid,
|
|
1097
|
+
delim=Tok.COMMA,
|
|
1098
|
+
kid=new_kid,
|
|
1185
1099
|
)
|
|
1186
1100
|
|
|
1187
1101
|
def typed_has_clause(self, kid: list[ast.AstNode]) -> ast.HasVar:
|
|
@@ -1195,35 +1109,27 @@ class JacParser(Pass):
|
|
|
1195
1109
|
defer = isinstance(kid[-1], ast.Token) and kid[-1].name == Tok.KW_POST_INIT
|
|
1196
1110
|
value = kid[-1] if not defer and isinstance(kid[-1], ast.Expr) else None
|
|
1197
1111
|
if isinstance(name, ast.Name) and isinstance(type_tag, ast.SubTag):
|
|
1198
|
-
return
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
kid=kid,
|
|
1206
|
-
)
|
|
1112
|
+
return ast.HasVar(
|
|
1113
|
+
semstr=semstr,
|
|
1114
|
+
name=name,
|
|
1115
|
+
type_tag=type_tag,
|
|
1116
|
+
defer=defer,
|
|
1117
|
+
value=value,
|
|
1118
|
+
kid=kid,
|
|
1207
1119
|
)
|
|
1208
1120
|
else:
|
|
1209
1121
|
raise self.ice()
|
|
1210
1122
|
|
|
1211
|
-
def type_tag(self,
|
|
1123
|
+
def type_tag(self, _: None) -> ast.SubTag[ast.Expr]:
|
|
1212
1124
|
"""Grammar rule.
|
|
1213
1125
|
|
|
1214
1126
|
type_tag: COLON expression
|
|
1215
1127
|
"""
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
tag=kid[1],
|
|
1220
|
-
kid=kid,
|
|
1221
|
-
)
|
|
1222
|
-
)
|
|
1223
|
-
else:
|
|
1224
|
-
raise self.ice()
|
|
1128
|
+
self.consume_token(Tok.COLON)
|
|
1129
|
+
tag = self.consume(ast.Expr)
|
|
1130
|
+
return ast.SubTag[ast.Expr](tag=tag, kid=self.cur_nodes)
|
|
1225
1131
|
|
|
1226
|
-
def builtin_type(self,
|
|
1132
|
+
def builtin_type(self, _: None) -> ast.Token:
|
|
1227
1133
|
"""Grammar rule.
|
|
1228
1134
|
|
|
1229
1135
|
builtin_type: TYP_TYPE
|
|
@@ -1238,22 +1144,18 @@ class JacParser(Pass):
|
|
|
1238
1144
|
| TYP_BYTES
|
|
1239
1145
|
| TYP_STRING
|
|
1240
1146
|
"""
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
)
|
|
1254
|
-
)
|
|
1255
|
-
else:
|
|
1256
|
-
raise self.ice()
|
|
1147
|
+
token = self.consume(ast.Token)
|
|
1148
|
+
return ast.BuiltinType(
|
|
1149
|
+
name=token.name,
|
|
1150
|
+
orig_src=self.parse_ref.source,
|
|
1151
|
+
value=token.value,
|
|
1152
|
+
line=token.loc.first_line,
|
|
1153
|
+
end_line=token.loc.last_line,
|
|
1154
|
+
col_start=token.loc.col_start,
|
|
1155
|
+
col_end=token.loc.col_end,
|
|
1156
|
+
pos_start=token.pos_start,
|
|
1157
|
+
pos_end=token.pos_end,
|
|
1158
|
+
)
|
|
1257
1159
|
|
|
1258
1160
|
def code_block(
|
|
1259
1161
|
self, kid: list[ast.AstNode]
|
|
@@ -1266,14 +1168,12 @@ class JacParser(Pass):
|
|
|
1266
1168
|
right_enc = kid[-1] if isinstance(kid[-1], ast.Token) else None
|
|
1267
1169
|
valid_stmt = [i for i in kid if isinstance(i, ast.CodeBlockStmt)]
|
|
1268
1170
|
if len(valid_stmt) == len(kid) - 2:
|
|
1269
|
-
return
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
kid=kid,
|
|
1276
|
-
)
|
|
1171
|
+
return ast.SubNodeList[ast.CodeBlockStmt](
|
|
1172
|
+
items=valid_stmt,
|
|
1173
|
+
delim=Tok.WS,
|
|
1174
|
+
left_enc=left_enc,
|
|
1175
|
+
right_enc=right_enc,
|
|
1176
|
+
kid=kid,
|
|
1277
1177
|
)
|
|
1278
1178
|
else:
|
|
1279
1179
|
raise self.ice()
|
|
@@ -1308,7 +1208,7 @@ class JacParser(Pass):
|
|
|
1308
1208
|
| SEMI
|
|
1309
1209
|
"""
|
|
1310
1210
|
if isinstance(kid[0], ast.CodeBlockStmt) and len(kid) < 2:
|
|
1311
|
-
return
|
|
1211
|
+
return kid[0]
|
|
1312
1212
|
elif isinstance(kid[0], ast.Token) and kid[0].name == Tok.KW_YIELD:
|
|
1313
1213
|
return ast.ExprStmt(
|
|
1314
1214
|
expr=(
|
|
@@ -1329,7 +1229,7 @@ class JacParser(Pass):
|
|
|
1329
1229
|
)
|
|
1330
1230
|
elif isinstance(kid[0], ast.CodeBlockStmt):
|
|
1331
1231
|
kid[0].add_kids_right([kid[1]])
|
|
1332
|
-
return
|
|
1232
|
+
return kid[0]
|
|
1333
1233
|
else:
|
|
1334
1234
|
raise self.ice()
|
|
1335
1235
|
|
|
@@ -1339,294 +1239,215 @@ class JacParser(Pass):
|
|
|
1339
1239
|
typed_ctx_block: RETURN_HINT expression code_block
|
|
1340
1240
|
"""
|
|
1341
1241
|
if isinstance(kid[1], ast.Expr) and isinstance(kid[2], ast.SubNodeList):
|
|
1342
|
-
return
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
kid=kid,
|
|
1347
|
-
)
|
|
1242
|
+
return ast.TypedCtxBlock(
|
|
1243
|
+
type_ctx=kid[1],
|
|
1244
|
+
body=kid[2],
|
|
1245
|
+
kid=kid,
|
|
1348
1246
|
)
|
|
1349
1247
|
else:
|
|
1350
1248
|
raise self.ice()
|
|
1351
1249
|
|
|
1352
|
-
def if_stmt(self,
|
|
1250
|
+
def if_stmt(self, _: None) -> ast.IfStmt:
|
|
1353
1251
|
"""Grammar rule.
|
|
1354
1252
|
|
|
1355
1253
|
if_stmt: KW_IF expression code_block (elif_stmt | else_stmt)?
|
|
1356
1254
|
"""
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
),
|
|
1368
|
-
kid=kid,
|
|
1369
|
-
)
|
|
1370
|
-
)
|
|
1371
|
-
else:
|
|
1372
|
-
raise self.ice()
|
|
1255
|
+
self.consume_token(Tok.KW_IF)
|
|
1256
|
+
condition = self.consume(ast.Expr)
|
|
1257
|
+
body = self.consume(ast.SubNodeList)
|
|
1258
|
+
else_body = self.match(ast.ElseStmt) or self.match(ast.ElseIf)
|
|
1259
|
+
return ast.IfStmt(
|
|
1260
|
+
condition=condition,
|
|
1261
|
+
body=body,
|
|
1262
|
+
else_body=else_body,
|
|
1263
|
+
kid=self.cur_nodes,
|
|
1264
|
+
)
|
|
1373
1265
|
|
|
1374
|
-
def elif_stmt(self,
|
|
1266
|
+
def elif_stmt(self, _: None) -> ast.ElseIf:
|
|
1375
1267
|
"""Grammar rule.
|
|
1376
1268
|
|
|
1377
1269
|
elif_stmt: KW_ELIF expression code_block (elif_stmt | else_stmt)?
|
|
1378
1270
|
"""
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
),
|
|
1390
|
-
kid=kid,
|
|
1391
|
-
)
|
|
1392
|
-
)
|
|
1393
|
-
else:
|
|
1394
|
-
raise self.ice()
|
|
1271
|
+
self.consume_token(Tok.KW_ELIF)
|
|
1272
|
+
condition = self.consume(ast.Expr)
|
|
1273
|
+
body = self.consume(ast.SubNodeList)
|
|
1274
|
+
else_body = self.match(ast.ElseStmt) or self.match(ast.ElseIf)
|
|
1275
|
+
return ast.ElseIf(
|
|
1276
|
+
condition=condition,
|
|
1277
|
+
body=body,
|
|
1278
|
+
else_body=else_body,
|
|
1279
|
+
kid=self.cur_nodes,
|
|
1280
|
+
)
|
|
1395
1281
|
|
|
1396
|
-
def else_stmt(self,
|
|
1282
|
+
def else_stmt(self, _: None) -> ast.ElseStmt:
|
|
1397
1283
|
"""Grammar rule.
|
|
1398
1284
|
|
|
1399
1285
|
else_stmt: KW_ELSE code_block
|
|
1400
1286
|
"""
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
)
|
|
1408
|
-
else:
|
|
1409
|
-
raise self.ice()
|
|
1287
|
+
self.consume_token(Tok.KW_ELSE)
|
|
1288
|
+
body = self.consume(ast.SubNodeList)
|
|
1289
|
+
return ast.ElseStmt(
|
|
1290
|
+
body=body,
|
|
1291
|
+
kid=self.cur_nodes,
|
|
1292
|
+
)
|
|
1410
1293
|
|
|
1411
|
-
def try_stmt(self,
|
|
1294
|
+
def try_stmt(self, _: None) -> ast.TryStmt:
|
|
1412
1295
|
"""Grammar rule.
|
|
1413
1296
|
|
|
1414
1297
|
try_stmt: KW_TRY code_block except_list? else_stmt? finally_stmt?
|
|
1415
1298
|
"""
|
|
1416
|
-
|
|
1417
|
-
block =
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1299
|
+
self.consume_token(Tok.KW_TRY)
|
|
1300
|
+
block = self.consume(ast.SubNodeList)
|
|
1301
|
+
except_list = self.match(ast.SubNodeList)
|
|
1302
|
+
else_stmt = self.match(ast.ElseStmt)
|
|
1303
|
+
finally_stmt = self.match(ast.FinallyStmt)
|
|
1304
|
+
return ast.TryStmt(
|
|
1305
|
+
body=block,
|
|
1306
|
+
excepts=except_list,
|
|
1307
|
+
else_body=else_stmt,
|
|
1308
|
+
finally_body=finally_stmt,
|
|
1309
|
+
kid=self.cur_nodes,
|
|
1427
1310
|
)
|
|
1428
|
-
chomp = chomp[1:] if else_stmt else chomp
|
|
1429
|
-
finally_stmt = (
|
|
1430
|
-
chomp[0]
|
|
1431
|
-
if len(chomp) and isinstance(chomp[0], ast.FinallyStmt)
|
|
1432
|
-
else None
|
|
1433
|
-
)
|
|
1434
|
-
if isinstance(block, ast.SubNodeList):
|
|
1435
|
-
return self.nu(
|
|
1436
|
-
ast.TryStmt(
|
|
1437
|
-
body=block,
|
|
1438
|
-
excepts=except_list,
|
|
1439
|
-
else_body=else_stmt,
|
|
1440
|
-
finally_body=finally_stmt,
|
|
1441
|
-
kid=kid,
|
|
1442
|
-
)
|
|
1443
|
-
)
|
|
1444
|
-
else:
|
|
1445
|
-
raise self.ice()
|
|
1446
1311
|
|
|
1447
|
-
def except_list(self,
|
|
1312
|
+
def except_list(self, _: None) -> ast.SubNodeList[ast.Except]:
|
|
1448
1313
|
"""Grammar rule.
|
|
1449
1314
|
|
|
1450
1315
|
except_list: except_def+
|
|
1451
1316
|
"""
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
)
|
|
1461
|
-
else:
|
|
1462
|
-
raise self.ice()
|
|
1317
|
+
items = [self.consume(ast.Except)]
|
|
1318
|
+
while expt := self.match(ast.Except):
|
|
1319
|
+
items.append(expt)
|
|
1320
|
+
return ast.SubNodeList[ast.Except](
|
|
1321
|
+
items=items,
|
|
1322
|
+
delim=Tok.WS,
|
|
1323
|
+
kid=self.cur_nodes,
|
|
1324
|
+
)
|
|
1463
1325
|
|
|
1464
|
-
def except_def(self,
|
|
1326
|
+
def except_def(self, _: None) -> ast.Except:
|
|
1465
1327
|
"""Grammar rule.
|
|
1466
1328
|
|
|
1467
1329
|
except_def: KW_EXCEPT expression (KW_AS NAME)? code_block
|
|
1468
1330
|
"""
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
if
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
|
|
1481
|
-
else:
|
|
1482
|
-
raise self.ice()
|
|
1331
|
+
name: ast.Name | None = None
|
|
1332
|
+
self.consume_token(Tok.KW_EXCEPT)
|
|
1333
|
+
ex_type = self.consume(ast.Expr)
|
|
1334
|
+
if self.match_token(Tok.KW_AS):
|
|
1335
|
+
name = self.consume(ast.Name)
|
|
1336
|
+
body = self.consume(ast.SubNodeList)
|
|
1337
|
+
return ast.Except(
|
|
1338
|
+
ex_type=ex_type,
|
|
1339
|
+
name=name,
|
|
1340
|
+
body=body,
|
|
1341
|
+
kid=self.cur_nodes,
|
|
1342
|
+
)
|
|
1483
1343
|
|
|
1484
|
-
def finally_stmt(self,
|
|
1344
|
+
def finally_stmt(self, _: None) -> ast.FinallyStmt:
|
|
1485
1345
|
"""Grammar rule.
|
|
1486
1346
|
|
|
1487
1347
|
finally_stmt: KW_FINALLY code_block
|
|
1488
1348
|
"""
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
)
|
|
1496
|
-
else:
|
|
1497
|
-
raise self.ice()
|
|
1349
|
+
self.consume_token(Tok.KW_FINALLY)
|
|
1350
|
+
body = self.consume(ast.SubNodeList)
|
|
1351
|
+
return ast.FinallyStmt(
|
|
1352
|
+
body=body,
|
|
1353
|
+
kid=self.cur_nodes,
|
|
1354
|
+
)
|
|
1498
1355
|
|
|
1499
|
-
def for_stmt(self,
|
|
1356
|
+
def for_stmt(self, _: None) -> ast.IterForStmt | ast.InForStmt:
|
|
1500
1357
|
"""Grammar rule.
|
|
1501
1358
|
|
|
1502
1359
|
for_stmt: KW_ASYNC? KW_FOR assignment KW_TO expression KW_BY
|
|
1503
1360
|
expression code_block else_stmt?
|
|
1504
1361
|
| KW_ASYNC? KW_FOR expression KW_IN expression code_block else_stmt?
|
|
1505
1362
|
"""
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1363
|
+
is_async = bool(self.match_token(Tok.KW_ASYNC))
|
|
1364
|
+
self.consume_token(Tok.KW_FOR)
|
|
1365
|
+
if iter := self.match(ast.Assignment):
|
|
1366
|
+
self.consume_token(Tok.KW_TO)
|
|
1367
|
+
condition = self.consume(ast.Expr)
|
|
1368
|
+
self.consume_token(Tok.KW_BY)
|
|
1369
|
+
count_by = self.consume(ast.Assignment)
|
|
1370
|
+
body = self.consume(ast.SubNodeList)
|
|
1371
|
+
else_body = self.match(ast.ElseStmt)
|
|
1372
|
+
return ast.IterForStmt(
|
|
1373
|
+
is_async=is_async,
|
|
1374
|
+
iter=iter,
|
|
1375
|
+
condition=condition,
|
|
1376
|
+
count_by=count_by,
|
|
1377
|
+
body=body,
|
|
1378
|
+
else_body=else_body,
|
|
1379
|
+
kid=self.cur_nodes,
|
|
1380
|
+
)
|
|
1381
|
+
target = self.consume(ast.Expr)
|
|
1382
|
+
self.consume_token(Tok.KW_IN)
|
|
1383
|
+
collection = self.consume(ast.Expr)
|
|
1384
|
+
body = self.consume(ast.SubNodeList)
|
|
1385
|
+
else_body = self.match(ast.ElseStmt)
|
|
1386
|
+
return ast.InForStmt(
|
|
1387
|
+
is_async=is_async,
|
|
1388
|
+
target=target,
|
|
1389
|
+
collection=collection,
|
|
1390
|
+
body=body,
|
|
1391
|
+
else_body=else_body,
|
|
1392
|
+
kid=self.cur_nodes,
|
|
1509
1393
|
)
|
|
1510
|
-
chomp = chomp[1:] if is_async else chomp
|
|
1511
|
-
if isinstance(chomp[1], ast.Assignment):
|
|
1512
|
-
if (
|
|
1513
|
-
isinstance(chomp[3], ast.Expr)
|
|
1514
|
-
and isinstance(chomp[5], ast.Assignment)
|
|
1515
|
-
and isinstance(chomp[6], ast.SubNodeList)
|
|
1516
|
-
):
|
|
1517
|
-
return self.nu(
|
|
1518
|
-
ast.IterForStmt(
|
|
1519
|
-
is_async=is_async,
|
|
1520
|
-
iter=chomp[1],
|
|
1521
|
-
condition=chomp[3],
|
|
1522
|
-
count_by=chomp[5],
|
|
1523
|
-
body=chomp[6],
|
|
1524
|
-
else_body=(
|
|
1525
|
-
chomp[-1]
|
|
1526
|
-
if isinstance(chomp[-1], ast.ElseStmt)
|
|
1527
|
-
else None
|
|
1528
|
-
),
|
|
1529
|
-
kid=kid,
|
|
1530
|
-
)
|
|
1531
|
-
)
|
|
1532
|
-
else:
|
|
1533
|
-
raise self.ice()
|
|
1534
|
-
elif isinstance(chomp[1], ast.Expr):
|
|
1535
|
-
if isinstance(chomp[3], ast.Expr) and isinstance(
|
|
1536
|
-
chomp[4], ast.SubNodeList
|
|
1537
|
-
):
|
|
1538
|
-
return self.nu(
|
|
1539
|
-
ast.InForStmt(
|
|
1540
|
-
is_async=is_async,
|
|
1541
|
-
target=chomp[1],
|
|
1542
|
-
collection=chomp[3],
|
|
1543
|
-
body=chomp[4],
|
|
1544
|
-
else_body=(
|
|
1545
|
-
chomp[-1]
|
|
1546
|
-
if isinstance(chomp[-1], ast.ElseStmt)
|
|
1547
|
-
else None
|
|
1548
|
-
),
|
|
1549
|
-
kid=kid,
|
|
1550
|
-
)
|
|
1551
|
-
)
|
|
1552
|
-
else:
|
|
1553
|
-
raise self.ice()
|
|
1554
|
-
else:
|
|
1555
|
-
raise self.ice()
|
|
1556
1394
|
|
|
1557
|
-
def while_stmt(self,
|
|
1395
|
+
def while_stmt(self, _: None) -> ast.WhileStmt:
|
|
1558
1396
|
"""Grammar rule.
|
|
1559
1397
|
|
|
1560
1398
|
while_stmt: KW_WHILE expression code_block
|
|
1561
1399
|
"""
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
|
|
1566
|
-
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
else:
|
|
1571
|
-
raise self.ice()
|
|
1400
|
+
self.consume_token(Tok.KW_WHILE)
|
|
1401
|
+
condition = self.consume(ast.Expr)
|
|
1402
|
+
body = self.consume(ast.SubNodeList)
|
|
1403
|
+
return ast.WhileStmt(
|
|
1404
|
+
condition=condition,
|
|
1405
|
+
body=body,
|
|
1406
|
+
kid=self.cur_nodes,
|
|
1407
|
+
)
|
|
1572
1408
|
|
|
1573
|
-
def with_stmt(self,
|
|
1409
|
+
def with_stmt(self, _: None) -> ast.WithStmt:
|
|
1574
1410
|
"""Grammar rule.
|
|
1575
1411
|
|
|
1576
|
-
with_stmt: KW_ASYNC? KW_WITH expr_as_list code_block
|
|
1577
|
-
"""
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
)
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
is_async=is_async,
|
|
1589
|
-
exprs=chomp[1],
|
|
1590
|
-
body=chomp[2],
|
|
1591
|
-
kid=kid,
|
|
1592
|
-
)
|
|
1593
|
-
)
|
|
1594
|
-
else:
|
|
1595
|
-
raise self.ice()
|
|
1412
|
+
with_stmt: KW_ASYNC? KW_WITH expr_as_list code_block
|
|
1413
|
+
"""
|
|
1414
|
+
is_async = bool(self.match_token(Tok.KW_ASYNC))
|
|
1415
|
+
self.consume_token(Tok.KW_WITH)
|
|
1416
|
+
exprs = self.consume(ast.SubNodeList)
|
|
1417
|
+
body = self.consume(ast.SubNodeList)
|
|
1418
|
+
return ast.WithStmt(
|
|
1419
|
+
is_async=is_async,
|
|
1420
|
+
exprs=exprs,
|
|
1421
|
+
body=body,
|
|
1422
|
+
kid=self.cur_nodes,
|
|
1423
|
+
)
|
|
1596
1424
|
|
|
1597
|
-
def expr_as_list(
|
|
1598
|
-
self, kid: list[ast.AstNode]
|
|
1599
|
-
) -> ast.SubNodeList[ast.ExprAsItem]:
|
|
1425
|
+
def expr_as_list(self, _: None) -> ast.SubNodeList[ast.ExprAsItem]:
|
|
1600
1426
|
"""Grammar rule.
|
|
1601
1427
|
|
|
1602
1428
|
expr_as_list: (expr_as COMMA)* expr_as
|
|
1603
1429
|
"""
|
|
1604
|
-
|
|
1605
|
-
|
|
1430
|
+
items = [self.consume(ast.ExprAsItem)]
|
|
1431
|
+
while self.match_token(Tok.COMMA):
|
|
1432
|
+
items.append(self.consume(ast.ExprAsItem))
|
|
1433
|
+
return ast.SubNodeList[ast.ExprAsItem](
|
|
1434
|
+
items=items,
|
|
1606
1435
|
delim=Tok.COMMA,
|
|
1607
|
-
kid=
|
|
1436
|
+
kid=self.cur_nodes,
|
|
1608
1437
|
)
|
|
1609
|
-
return self.nu(ret)
|
|
1610
1438
|
|
|
1611
|
-
def expr_as(self,
|
|
1439
|
+
def expr_as(self, _: None) -> ast.ExprAsItem:
|
|
1612
1440
|
"""Grammar rule.
|
|
1613
1441
|
|
|
1614
1442
|
expr_as: expression (KW_AS expression)?
|
|
1615
1443
|
"""
|
|
1616
|
-
expr =
|
|
1617
|
-
alias =
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
expr=expr,
|
|
1624
|
-
alias=alias,
|
|
1625
|
-
kid=kid,
|
|
1626
|
-
)
|
|
1627
|
-
)
|
|
1628
|
-
else:
|
|
1629
|
-
raise self.ice()
|
|
1444
|
+
expr = self.consume(ast.Expr)
|
|
1445
|
+
alias = self.consume(ast.Expr) if self.match_token(Tok.KW_AS) else None
|
|
1446
|
+
return ast.ExprAsItem(
|
|
1447
|
+
expr=expr,
|
|
1448
|
+
alias=alias,
|
|
1449
|
+
kid=self.cur_nodes,
|
|
1450
|
+
)
|
|
1630
1451
|
|
|
1631
1452
|
def raise_stmt(self, kid: list[ast.AstNode]) -> ast.RaiseStmt:
|
|
1632
1453
|
"""Grammar rule.
|
|
@@ -1639,113 +1460,90 @@ class JacParser(Pass):
|
|
|
1639
1460
|
)
|
|
1640
1461
|
chomp = chomp[2:] if e_type and len(chomp) > 1 else chomp[1:]
|
|
1641
1462
|
e = chomp[0] if len(chomp) > 0 and isinstance(chomp[0], ast.Expr) else None
|
|
1642
|
-
return
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
kid=kid,
|
|
1647
|
-
)
|
|
1463
|
+
return ast.RaiseStmt(
|
|
1464
|
+
cause=e_type,
|
|
1465
|
+
from_target=e,
|
|
1466
|
+
kid=kid,
|
|
1648
1467
|
)
|
|
1649
1468
|
|
|
1650
|
-
def assert_stmt(self,
|
|
1469
|
+
def assert_stmt(self, _: None) -> ast.AssertStmt:
|
|
1651
1470
|
"""Grammar rule.
|
|
1652
1471
|
|
|
1653
1472
|
assert_stmt: KW_ASSERT expression (COMMA expression)?
|
|
1654
1473
|
"""
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
)
|
|
1666
|
-
)
|
|
1667
|
-
else:
|
|
1668
|
-
raise self.ice()
|
|
1474
|
+
error_msg: ast.Expr | None = None
|
|
1475
|
+
self.consume_token(Tok.KW_ASSERT)
|
|
1476
|
+
condition = self.consume(ast.Expr)
|
|
1477
|
+
if self.match_token(Tok.COMMA):
|
|
1478
|
+
error_msg = self.consume(ast.Expr)
|
|
1479
|
+
return ast.AssertStmt(
|
|
1480
|
+
condition=condition,
|
|
1481
|
+
error_msg=error_msg,
|
|
1482
|
+
kid=self.cur_nodes,
|
|
1483
|
+
)
|
|
1669
1484
|
|
|
1670
|
-
def check_stmt(self,
|
|
1485
|
+
def check_stmt(self, _: None) -> ast.CheckStmt:
|
|
1671
1486
|
"""Grammar rule.
|
|
1672
1487
|
|
|
1673
1488
|
check_stmt: KW_CHECK expression
|
|
1674
1489
|
"""
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
)
|
|
1682
|
-
else:
|
|
1683
|
-
raise self.ice()
|
|
1490
|
+
self.consume_token(Tok.KW_CHECK)
|
|
1491
|
+
target = self.consume(ast.Expr)
|
|
1492
|
+
return ast.CheckStmt(
|
|
1493
|
+
target=target,
|
|
1494
|
+
kid=self.cur_nodes,
|
|
1495
|
+
)
|
|
1684
1496
|
|
|
1685
|
-
def ctrl_stmt(self,
|
|
1497
|
+
def ctrl_stmt(self, _: None) -> ast.CtrlStmt:
|
|
1686
1498
|
"""Grammar rule.
|
|
1687
1499
|
|
|
1688
1500
|
ctrl_stmt: KW_SKIP | KW_BREAK | KW_CONTINUE
|
|
1689
1501
|
"""
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
1502
|
+
tok = (
|
|
1503
|
+
self.match_token(Tok.KW_SKIP)
|
|
1504
|
+
or self.match_token(Tok.KW_BREAK)
|
|
1505
|
+
or self.consume_token(Tok.KW_CONTINUE)
|
|
1506
|
+
)
|
|
1507
|
+
return ast.CtrlStmt(
|
|
1508
|
+
ctrl=tok,
|
|
1509
|
+
kid=self.cur_nodes,
|
|
1510
|
+
)
|
|
1699
1511
|
|
|
1700
|
-
def delete_stmt(self,
|
|
1512
|
+
def delete_stmt(self, _: None) -> ast.DeleteStmt:
|
|
1701
1513
|
"""Grammar rule.
|
|
1702
1514
|
|
|
1703
1515
|
delete_stmt: KW_DELETE expression
|
|
1704
1516
|
"""
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
)
|
|
1712
|
-
else:
|
|
1713
|
-
raise self.ice()
|
|
1517
|
+
self.consume_token(Tok.KW_DELETE)
|
|
1518
|
+
target = self.consume(ast.Expr)
|
|
1519
|
+
return ast.DeleteStmt(
|
|
1520
|
+
target=target,
|
|
1521
|
+
kid=self.cur_nodes,
|
|
1522
|
+
)
|
|
1714
1523
|
|
|
1715
|
-
def report_stmt(self,
|
|
1524
|
+
def report_stmt(self, _: None) -> ast.ReportStmt:
|
|
1716
1525
|
"""Grammar rule.
|
|
1717
1526
|
|
|
1718
1527
|
report_stmt: KW_REPORT expression
|
|
1719
1528
|
"""
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
)
|
|
1727
|
-
else:
|
|
1728
|
-
raise self.ice()
|
|
1529
|
+
self.consume_token(Tok.KW_REPORT)
|
|
1530
|
+
target = self.consume(ast.Expr)
|
|
1531
|
+
return ast.ReportStmt(
|
|
1532
|
+
expr=target,
|
|
1533
|
+
kid=self.cur_nodes,
|
|
1534
|
+
)
|
|
1729
1535
|
|
|
1730
|
-
def return_stmt(self,
|
|
1536
|
+
def return_stmt(self, _: None) -> ast.ReturnStmt:
|
|
1731
1537
|
"""Grammar rule.
|
|
1732
1538
|
|
|
1733
1539
|
return_stmt: KW_RETURN expression?
|
|
1734
1540
|
"""
|
|
1735
|
-
|
|
1736
|
-
|
|
1737
|
-
|
|
1738
|
-
|
|
1739
|
-
|
|
1740
|
-
|
|
1741
|
-
)
|
|
1742
|
-
else:
|
|
1743
|
-
return self.nu(
|
|
1744
|
-
ast.ReturnStmt(
|
|
1745
|
-
expr=None,
|
|
1746
|
-
kid=kid,
|
|
1747
|
-
)
|
|
1748
|
-
)
|
|
1541
|
+
self.consume_token(Tok.KW_RETURN)
|
|
1542
|
+
expr = self.match(ast.Expr)
|
|
1543
|
+
return ast.ReturnStmt(
|
|
1544
|
+
expr=expr,
|
|
1545
|
+
kid=self.cur_nodes,
|
|
1546
|
+
)
|
|
1749
1547
|
|
|
1750
1548
|
def walker_stmt(self, kid: list[ast.AstNode]) -> ast.CodeBlockStmt:
|
|
1751
1549
|
"""Grammar rule.
|
|
@@ -1753,90 +1551,91 @@ class JacParser(Pass):
|
|
|
1753
1551
|
walker_stmt: disengage_stmt | revisit_stmt | visit_stmt | ignore_stmt
|
|
1754
1552
|
"""
|
|
1755
1553
|
if isinstance(kid[0], ast.CodeBlockStmt):
|
|
1756
|
-
return
|
|
1554
|
+
return kid[0]
|
|
1757
1555
|
else:
|
|
1758
1556
|
raise self.ice()
|
|
1759
1557
|
|
|
1760
|
-
def ignore_stmt(self,
|
|
1558
|
+
def ignore_stmt(self, _: None) -> ast.IgnoreStmt:
|
|
1761
1559
|
"""Grammar rule.
|
|
1762
1560
|
|
|
1763
1561
|
ignore_stmt: KW_IGNORE expression SEMI
|
|
1764
1562
|
"""
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
else:
|
|
1773
|
-
raise self.ice()
|
|
1563
|
+
self.consume_token(Tok.KW_IGNORE)
|
|
1564
|
+
target = self.consume(ast.Expr)
|
|
1565
|
+
self.consume_token(Tok.SEMI)
|
|
1566
|
+
return ast.IgnoreStmt(
|
|
1567
|
+
target=target,
|
|
1568
|
+
kid=self.cur_nodes,
|
|
1569
|
+
)
|
|
1774
1570
|
|
|
1775
|
-
def visit_stmt(self,
|
|
1571
|
+
def visit_stmt(self, _: None) -> ast.VisitStmt:
|
|
1776
1572
|
"""Grammar rule.
|
|
1777
1573
|
|
|
1778
1574
|
visit_stmt: KW_VISIT (inherited_archs)? expression (else_stmt | SEMI)
|
|
1779
1575
|
"""
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
else:
|
|
1793
|
-
raise self.ice()
|
|
1576
|
+
self.consume_token(Tok.KW_VISIT)
|
|
1577
|
+
sub_name = self.match(ast.SubNodeList)
|
|
1578
|
+
target = self.consume(ast.Expr)
|
|
1579
|
+
else_body = self.match(ast.ElseStmt)
|
|
1580
|
+
if else_body is None:
|
|
1581
|
+
self.consume_token(Tok.SEMI)
|
|
1582
|
+
return ast.VisitStmt(
|
|
1583
|
+
vis_type=sub_name,
|
|
1584
|
+
target=target,
|
|
1585
|
+
else_body=else_body,
|
|
1586
|
+
kid=self.cur_nodes,
|
|
1587
|
+
)
|
|
1794
1588
|
|
|
1795
|
-
def revisit_stmt(self,
|
|
1589
|
+
def revisit_stmt(self, _: None) -> ast.RevisitStmt:
|
|
1796
1590
|
"""Grammar rule.
|
|
1797
1591
|
|
|
1798
1592
|
revisit_stmt: KW_REVISIT expression? (else_stmt | SEMI)
|
|
1799
1593
|
"""
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
1594
|
+
self.consume_token(Tok.KW_REVISIT)
|
|
1595
|
+
target = self.match(ast.Expr)
|
|
1596
|
+
else_body = self.match(ast.ElseStmt)
|
|
1597
|
+
if else_body is None:
|
|
1598
|
+
self.consume_token(Tok.SEMI)
|
|
1599
|
+
return ast.RevisitStmt(
|
|
1600
|
+
hops=target,
|
|
1601
|
+
else_body=else_body,
|
|
1602
|
+
kid=self.cur_nodes,
|
|
1808
1603
|
)
|
|
1809
1604
|
|
|
1810
|
-
def disengage_stmt(self,
|
|
1605
|
+
def disengage_stmt(self, _: None) -> ast.DisengageStmt:
|
|
1811
1606
|
"""Grammar rule.
|
|
1812
1607
|
|
|
1813
1608
|
disengage_stmt: KW_DISENGAGE SEMI
|
|
1814
1609
|
"""
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
|
|
1610
|
+
kw = self.consume_token(Tok.KW_DISENGAGE)
|
|
1611
|
+
semi = self.consume_token(Tok.SEMI)
|
|
1612
|
+
return ast.DisengageStmt(
|
|
1613
|
+
kid=[kw, semi],
|
|
1819
1614
|
)
|
|
1820
1615
|
|
|
1821
|
-
def global_ref(self,
|
|
1616
|
+
def global_ref(self, _: None) -> ast.GlobalStmt:
|
|
1822
1617
|
"""Grammar rule.
|
|
1823
1618
|
|
|
1824
1619
|
global_ref: GLOBAL_OP name_list
|
|
1825
1620
|
"""
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1621
|
+
self.consume_token(Tok.GLOBAL_OP)
|
|
1622
|
+
target = self.consume(ast.SubNodeList)
|
|
1623
|
+
return ast.GlobalStmt(
|
|
1624
|
+
target=target,
|
|
1625
|
+
kid=self.cur_nodes,
|
|
1626
|
+
)
|
|
1830
1627
|
|
|
1831
|
-
def nonlocal_ref(self,
|
|
1628
|
+
def nonlocal_ref(self, _: None) -> ast.NonLocalStmt:
|
|
1832
1629
|
"""Grammar rule.
|
|
1833
1630
|
|
|
1834
1631
|
nonlocal_ref: NONLOCAL_OP name_list
|
|
1835
1632
|
"""
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1633
|
+
self.consume_token(Tok.NONLOCAL_OP)
|
|
1634
|
+
target = self.consume(ast.SubNodeList)
|
|
1635
|
+
return ast.NonLocalStmt(
|
|
1636
|
+
target=target,
|
|
1637
|
+
kid=self.cur_nodes,
|
|
1638
|
+
)
|
|
1840
1639
|
|
|
1841
1640
|
def assignment(self, kid: list[ast.AstNode]) -> ast.Assignment:
|
|
1842
1641
|
"""Grammar rule.
|
|
@@ -1899,88 +1698,49 @@ class JacParser(Pass):
|
|
|
1899
1698
|
kid = [x for x in kid if x not in assignees]
|
|
1900
1699
|
kid.insert(1, new_targ) if is_frozen else kid.insert(0, new_targ)
|
|
1901
1700
|
if is_aug:
|
|
1902
|
-
return
|
|
1903
|
-
ast.Assignment(
|
|
1904
|
-
target=new_targ,
|
|
1905
|
-
type_tag=type_tag if isinstance(type_tag, ast.SubTag) else None,
|
|
1906
|
-
value=value,
|
|
1907
|
-
mutable=is_frozen,
|
|
1908
|
-
aug_op=is_aug,
|
|
1909
|
-
kid=kid,
|
|
1910
|
-
)
|
|
1911
|
-
)
|
|
1912
|
-
return self.nu(
|
|
1913
|
-
ast.Assignment(
|
|
1701
|
+
return ast.Assignment(
|
|
1914
1702
|
target=new_targ,
|
|
1915
1703
|
type_tag=type_tag if isinstance(type_tag, ast.SubTag) else None,
|
|
1916
1704
|
value=value,
|
|
1917
1705
|
mutable=is_frozen,
|
|
1706
|
+
aug_op=is_aug,
|
|
1918
1707
|
kid=kid,
|
|
1919
|
-
semstr=semstr if isinstance(semstr, ast.String) else None,
|
|
1920
1708
|
)
|
|
1709
|
+
return ast.Assignment(
|
|
1710
|
+
target=new_targ,
|
|
1711
|
+
type_tag=type_tag if isinstance(type_tag, ast.SubTag) else None,
|
|
1712
|
+
value=value,
|
|
1713
|
+
mutable=is_frozen,
|
|
1714
|
+
kid=kid,
|
|
1715
|
+
semstr=semstr if isinstance(semstr, ast.String) else None,
|
|
1921
1716
|
)
|
|
1922
1717
|
|
|
1923
|
-
def expression(self,
|
|
1718
|
+
def expression(self, _: None) -> ast.Expr:
|
|
1924
1719
|
"""Grammar rule.
|
|
1925
1720
|
|
|
1926
1721
|
expression: walrus_assign
|
|
1927
1722
|
| pipe (KW_IF expression KW_ELSE expression)?
|
|
1928
1723
|
| lambda_expr
|
|
1929
1724
|
"""
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
)
|
|
1943
|
-
)
|
|
1944
|
-
else:
|
|
1945
|
-
raise self.ice()
|
|
1946
|
-
elif isinstance(kid[0], ast.Expr):
|
|
1947
|
-
return self.nu(kid[0])
|
|
1948
|
-
else:
|
|
1949
|
-
raise self.ice()
|
|
1725
|
+
value = self.consume(ast.Expr)
|
|
1726
|
+
if self.match_token(Tok.KW_IF):
|
|
1727
|
+
condition = self.consume(ast.Expr)
|
|
1728
|
+
self.consume_token(Tok.KW_ELSE)
|
|
1729
|
+
else_value = self.consume(ast.Expr)
|
|
1730
|
+
return ast.IfElseExpr(
|
|
1731
|
+
value=value,
|
|
1732
|
+
condition=condition,
|
|
1733
|
+
else_value=else_value,
|
|
1734
|
+
kid=self.cur_nodes,
|
|
1735
|
+
)
|
|
1736
|
+
return value
|
|
1950
1737
|
|
|
1951
|
-
def walrus_assign(self,
|
|
1738
|
+
def walrus_assign(self, _: None) -> ast.Expr:
|
|
1952
1739
|
"""Grammar rule.
|
|
1953
1740
|
|
|
1954
1741
|
walrus_assign: (walrus_assign WALRUS_EQ)? pipe
|
|
1955
1742
|
"""
|
|
1956
|
-
return self.
|
|
1957
|
-
|
|
1958
|
-
def binary_expr_unwind(self, kid: list[ast.AstNode]) -> ast.Expr:
|
|
1959
|
-
"""Binary expression helper."""
|
|
1960
|
-
if len(kid) > 1:
|
|
1961
|
-
if (
|
|
1962
|
-
isinstance(kid[0], ast.Expr)
|
|
1963
|
-
and isinstance(
|
|
1964
|
-
kid[1],
|
|
1965
|
-
(ast.Token, ast.DisconnectOp, ast.ConnectOp),
|
|
1966
|
-
)
|
|
1967
|
-
and isinstance(kid[2], ast.Expr)
|
|
1968
|
-
):
|
|
1969
|
-
return self.nu(
|
|
1970
|
-
ast.BinaryExpr(
|
|
1971
|
-
left=kid[0],
|
|
1972
|
-
op=kid[1],
|
|
1973
|
-
right=kid[2],
|
|
1974
|
-
kid=kid,
|
|
1975
|
-
)
|
|
1976
|
-
)
|
|
1977
|
-
else:
|
|
1978
|
-
raise self.ice()
|
|
1979
|
-
elif isinstance(kid[0], ast.Expr):
|
|
1980
|
-
return self.nu(kid[0])
|
|
1981
|
-
else:
|
|
1982
|
-
|
|
1983
|
-
raise self.ice()
|
|
1743
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
1984
1744
|
|
|
1985
1745
|
def lambda_expr(self, kid: list[ast.AstNode]) -> ast.LambdaExpr:
|
|
1986
1746
|
"""Grammar rule.
|
|
@@ -2016,156 +1776,134 @@ class JacParser(Pass):
|
|
|
2016
1776
|
new_kid = [i for i in kid if i != params and i != return_type]
|
|
2017
1777
|
new_kid.insert(1, signature) if signature else None
|
|
2018
1778
|
if isinstance(chomp[0], ast.Expr):
|
|
2019
|
-
return
|
|
2020
|
-
|
|
2021
|
-
|
|
2022
|
-
|
|
2023
|
-
kid=new_kid,
|
|
2024
|
-
)
|
|
1779
|
+
return ast.LambdaExpr(
|
|
1780
|
+
signature=signature,
|
|
1781
|
+
body=chomp[0],
|
|
1782
|
+
kid=new_kid,
|
|
2025
1783
|
)
|
|
2026
1784
|
else:
|
|
2027
1785
|
raise self.ice()
|
|
2028
1786
|
|
|
2029
|
-
def pipe(self,
|
|
1787
|
+
def pipe(self, _: None) -> ast.Expr:
|
|
2030
1788
|
"""Grammar rule.
|
|
2031
1789
|
|
|
2032
1790
|
pipe: pipe_back PIPE_FWD pipe
|
|
2033
1791
|
| pipe_back
|
|
2034
1792
|
"""
|
|
2035
|
-
return self.
|
|
1793
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2036
1794
|
|
|
2037
|
-
def pipe_back(self,
|
|
1795
|
+
def pipe_back(self, _: None) -> ast.Expr:
|
|
2038
1796
|
"""Grammar rule.
|
|
2039
1797
|
|
|
2040
1798
|
pipe_back: bitwise_or PIPE_BKWD pipe_back
|
|
2041
1799
|
| bitwise_or
|
|
2042
1800
|
"""
|
|
2043
|
-
return self.
|
|
1801
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2044
1802
|
|
|
2045
|
-
def bitwise_or(self,
|
|
1803
|
+
def bitwise_or(self, _: None) -> ast.Expr:
|
|
2046
1804
|
"""Grammar rule.
|
|
2047
1805
|
|
|
2048
1806
|
bitwise_or: bitwise_xor BW_OR bitwise_or
|
|
2049
1807
|
| bitwise_xor
|
|
2050
1808
|
"""
|
|
2051
|
-
return self.
|
|
1809
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2052
1810
|
|
|
2053
|
-
def bitwise_xor(self,
|
|
1811
|
+
def bitwise_xor(self, _: None) -> ast.Expr:
|
|
2054
1812
|
"""Grammar rule.
|
|
2055
1813
|
|
|
2056
1814
|
bitwise_xor: bitwise_and BW_XOR bitwise_xor
|
|
2057
1815
|
| bitwise_and
|
|
2058
1816
|
"""
|
|
2059
|
-
return self.
|
|
1817
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2060
1818
|
|
|
2061
|
-
def bitwise_and(self,
|
|
1819
|
+
def bitwise_and(self, _: None) -> ast.Expr:
|
|
2062
1820
|
"""Grammar rule.
|
|
2063
1821
|
|
|
2064
1822
|
bitwise_and: shift BW_AND bitwise_and
|
|
2065
1823
|
| shift
|
|
2066
1824
|
"""
|
|
2067
|
-
return self.
|
|
1825
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2068
1826
|
|
|
2069
|
-
def shift(self,
|
|
1827
|
+
def shift(self, _: None) -> ast.Expr:
|
|
2070
1828
|
"""Grammar rule.
|
|
2071
1829
|
|
|
2072
1830
|
shift: (shift (RSHIFT | LSHIFT))? logical_or
|
|
2073
1831
|
"""
|
|
2074
|
-
return self.
|
|
1832
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2075
1833
|
|
|
2076
|
-
def logical_or(self,
|
|
1834
|
+
def logical_or(self, _: None) -> ast.Expr:
|
|
2077
1835
|
"""Grammar rule.
|
|
2078
1836
|
|
|
2079
1837
|
logical_or: logical_and (KW_OR logical_and)*
|
|
2080
1838
|
"""
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
2091
|
-
|
|
2092
|
-
|
|
2093
|
-
|
|
2094
|
-
return self.nu(kid[0])
|
|
2095
|
-
else:
|
|
2096
|
-
|
|
2097
|
-
raise self.ice()
|
|
1839
|
+
value = self.consume(ast.Expr)
|
|
1840
|
+
if not (ops := self.match_token(Tok.KW_OR)):
|
|
1841
|
+
return value
|
|
1842
|
+
values: list = [value]
|
|
1843
|
+
while value := self.consume(ast.Expr):
|
|
1844
|
+
values.append(value)
|
|
1845
|
+
if not self.match_token(Tok.KW_OR):
|
|
1846
|
+
break
|
|
1847
|
+
return ast.BoolExpr(
|
|
1848
|
+
op=ops,
|
|
1849
|
+
values=values,
|
|
1850
|
+
kid=self.cur_nodes,
|
|
1851
|
+
)
|
|
2098
1852
|
|
|
2099
|
-
def logical_and(self,
|
|
1853
|
+
def logical_and(self, _: None) -> ast.Expr:
|
|
2100
1854
|
"""Grammar rule.
|
|
2101
1855
|
|
|
2102
1856
|
logical_and: logical_not (KW_AND logical_not)*
|
|
2103
1857
|
"""
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
|
|
2107
|
-
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2111
|
-
|
|
2112
|
-
|
|
2113
|
-
|
|
2114
|
-
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
return self.nu(kid[0])
|
|
2118
|
-
else:
|
|
2119
|
-
|
|
2120
|
-
raise self.ice()
|
|
1858
|
+
value = self.consume(ast.Expr)
|
|
1859
|
+
if not (ops := self.match_token(Tok.KW_AND)):
|
|
1860
|
+
return value
|
|
1861
|
+
values: list = [value]
|
|
1862
|
+
while value := self.consume(ast.Expr):
|
|
1863
|
+
values.append(value)
|
|
1864
|
+
if not self.match_token(Tok.KW_AND):
|
|
1865
|
+
break
|
|
1866
|
+
return ast.BoolExpr(
|
|
1867
|
+
op=ops,
|
|
1868
|
+
values=values,
|
|
1869
|
+
kid=self.cur_nodes,
|
|
1870
|
+
)
|
|
2121
1871
|
|
|
2122
|
-
def logical_not(self,
|
|
1872
|
+
def logical_not(self, _: None) -> ast.Expr:
|
|
2123
1873
|
"""Grammar rule.
|
|
2124
1874
|
|
|
2125
|
-
|
|
1875
|
+
logical_not: NOT logical_not | compare
|
|
2126
1876
|
"""
|
|
2127
|
-
if
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
|
|
2131
|
-
|
|
2132
|
-
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
)
|
|
2136
|
-
else:
|
|
2137
|
-
raise self.ice()
|
|
2138
|
-
if isinstance(kid[0], ast.Expr):
|
|
2139
|
-
return self.nu(kid[0])
|
|
2140
|
-
else:
|
|
2141
|
-
raise self.ice()
|
|
1877
|
+
if op := self.match_token(Tok.NOT):
|
|
1878
|
+
operand = self.consume(ast.Expr)
|
|
1879
|
+
return ast.UnaryExpr(
|
|
1880
|
+
op=op,
|
|
1881
|
+
operand=operand,
|
|
1882
|
+
kid=self.cur_nodes,
|
|
1883
|
+
)
|
|
1884
|
+
return self.consume(ast.Expr)
|
|
2142
1885
|
|
|
2143
|
-
def compare(self,
|
|
1886
|
+
def compare(self, _: list[ast.AstNode]) -> ast.Expr:
|
|
2144
1887
|
"""Grammar rule.
|
|
2145
1888
|
|
|
2146
1889
|
compare: (arithmetic cmp_op)* arithmetic
|
|
2147
1890
|
"""
|
|
2148
|
-
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
|
|
2156
|
-
|
|
2157
|
-
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
|
|
2161
|
-
|
|
2162
|
-
raise self.ice()
|
|
2163
|
-
elif isinstance(kid[0], ast.Expr):
|
|
2164
|
-
return self.nu(kid[0])
|
|
2165
|
-
else:
|
|
2166
|
-
raise self.ice()
|
|
1891
|
+
ops: list = []
|
|
1892
|
+
rights: list = []
|
|
1893
|
+
left = self.consume(ast.Expr)
|
|
1894
|
+
while op := self.match(ast.Token):
|
|
1895
|
+
ops.append(op)
|
|
1896
|
+
rights.append(self.consume(ast.Expr))
|
|
1897
|
+
if not ops:
|
|
1898
|
+
return left
|
|
1899
|
+
return ast.CompareExpr(
|
|
1900
|
+
left=left,
|
|
1901
|
+
ops=ops,
|
|
1902
|
+
rights=rights,
|
|
1903
|
+
kid=self.cur_nodes,
|
|
1904
|
+
)
|
|
2167
1905
|
|
|
2168
|
-
def cmp_op(self,
|
|
1906
|
+
def cmp_op(self, _: None) -> ast.Token:
|
|
2169
1907
|
"""Grammar rule.
|
|
2170
1908
|
|
|
2171
1909
|
cmp_op: KW_ISN
|
|
@@ -2179,116 +1917,105 @@ class JacParser(Pass):
|
|
|
2179
1917
|
| LT
|
|
2180
1918
|
| EE
|
|
2181
1919
|
"""
|
|
2182
|
-
|
|
2183
|
-
return self.nu(kid[0])
|
|
2184
|
-
else:
|
|
2185
|
-
raise self.ice()
|
|
1920
|
+
return self.consume(ast.Token)
|
|
2186
1921
|
|
|
2187
|
-
def arithmetic(self,
|
|
1922
|
+
def arithmetic(self, _: None) -> ast.Expr:
|
|
2188
1923
|
"""Grammar rule.
|
|
2189
1924
|
|
|
2190
1925
|
arithmetic: (arithmetic (MINUS | PLUS))? term
|
|
2191
1926
|
"""
|
|
2192
|
-
return self.
|
|
1927
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2193
1928
|
|
|
2194
|
-
def term(self,
|
|
1929
|
+
def term(self, _: None) -> ast.Expr:
|
|
2195
1930
|
"""Grammar rule.
|
|
2196
1931
|
|
|
2197
1932
|
term: (term (MOD | DIV | FLOOR_DIV | STAR_MUL | DECOR_OP))? power
|
|
2198
1933
|
"""
|
|
2199
|
-
return self.
|
|
1934
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2200
1935
|
|
|
2201
|
-
def factor(self,
|
|
1936
|
+
def factor(self, _: None) -> ast.Expr:
|
|
2202
1937
|
"""Grammar rule.
|
|
2203
1938
|
|
|
2204
1939
|
factor: (BW_NOT | MINUS | PLUS) factor | connect
|
|
2205
1940
|
"""
|
|
2206
|
-
if
|
|
2207
|
-
|
|
2208
|
-
|
|
2209
|
-
|
|
2210
|
-
|
|
2211
|
-
|
|
2212
|
-
|
|
2213
|
-
|
|
2214
|
-
|
|
2215
|
-
|
|
2216
|
-
|
|
2217
|
-
return self.
|
|
1941
|
+
if (
|
|
1942
|
+
op := self.match_token(Tok.BW_NOT)
|
|
1943
|
+
or self.match_token(Tok.MINUS)
|
|
1944
|
+
or self.match_token(Tok.PLUS)
|
|
1945
|
+
):
|
|
1946
|
+
operand = self.consume(ast.Expr)
|
|
1947
|
+
return ast.UnaryExpr(
|
|
1948
|
+
op=op,
|
|
1949
|
+
operand=operand,
|
|
1950
|
+
kid=self.cur_nodes,
|
|
1951
|
+
)
|
|
1952
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2218
1953
|
|
|
2219
|
-
def power(self,
|
|
1954
|
+
def power(self, _: None) -> ast.Expr:
|
|
2220
1955
|
"""Grammar rule.
|
|
2221
1956
|
|
|
2222
1957
|
power: (power STAR_POW)? factor
|
|
2223
1958
|
"""
|
|
2224
|
-
return self.
|
|
1959
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2225
1960
|
|
|
2226
|
-
def connect(self,
|
|
1961
|
+
def connect(self, _: None) -> ast.Expr:
|
|
2227
1962
|
"""Grammar rule.
|
|
2228
1963
|
|
|
2229
1964
|
connect: (connect (connect_op | disconnect_op))? atomic_pipe
|
|
2230
1965
|
"""
|
|
2231
|
-
return self.
|
|
1966
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2232
1967
|
|
|
2233
|
-
def atomic_pipe(self,
|
|
1968
|
+
def atomic_pipe(self, _: None) -> ast.Expr:
|
|
2234
1969
|
"""Grammar rule.
|
|
2235
1970
|
|
|
2236
1971
|
atomic_pipe: (atomic_pipe A_PIPE_FWD)? atomic_pipe_back
|
|
2237
1972
|
"""
|
|
2238
|
-
return self.
|
|
1973
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2239
1974
|
|
|
2240
|
-
def atomic_pipe_back(self,
|
|
1975
|
+
def atomic_pipe_back(self, _: None) -> ast.Expr:
|
|
2241
1976
|
"""Grammar rule.
|
|
2242
1977
|
|
|
2243
1978
|
atomic_pipe_back: (atomic_pipe_back A_PIPE_BKWD)? ds_spawn
|
|
2244
1979
|
"""
|
|
2245
|
-
return self.
|
|
1980
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2246
1981
|
|
|
2247
|
-
def ds_spawn(self,
|
|
1982
|
+
def ds_spawn(self, _: None) -> ast.Expr:
|
|
2248
1983
|
"""Grammar rule.
|
|
2249
1984
|
|
|
2250
1985
|
ds_spawn: (ds_spawn KW_SPAWN)? unpack
|
|
2251
1986
|
"""
|
|
2252
|
-
return self.
|
|
1987
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2253
1988
|
|
|
2254
|
-
def unpack(self,
|
|
1989
|
+
def unpack(self, _: None) -> ast.Expr:
|
|
2255
1990
|
"""Grammar rule.
|
|
2256
1991
|
|
|
2257
1992
|
unpack: STAR_MUL? ref
|
|
2258
1993
|
"""
|
|
2259
|
-
if
|
|
2260
|
-
|
|
2261
|
-
|
|
2262
|
-
|
|
2263
|
-
|
|
2264
|
-
|
|
2265
|
-
|
|
2266
|
-
|
|
2267
|
-
)
|
|
2268
|
-
else:
|
|
2269
|
-
raise self.ice()
|
|
2270
|
-
return self.binary_expr_unwind(kid)
|
|
1994
|
+
if op := self.match_token(Tok.STAR_MUL):
|
|
1995
|
+
operand = self.consume(ast.Expr)
|
|
1996
|
+
return ast.UnaryExpr(
|
|
1997
|
+
op=op,
|
|
1998
|
+
operand=operand,
|
|
1999
|
+
kid=self.cur_nodes,
|
|
2000
|
+
)
|
|
2001
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2271
2002
|
|
|
2272
|
-
def ref(self,
|
|
2003
|
+
def ref(self, _: None) -> ast.Expr:
|
|
2273
2004
|
"""Grammar rule.
|
|
2274
2005
|
|
|
2275
2006
|
ref: walrus_assign
|
|
2276
2007
|
| BW_AND walrus_assign
|
|
2277
2008
|
"""
|
|
2278
|
-
if
|
|
2279
|
-
|
|
2280
|
-
|
|
2281
|
-
|
|
2282
|
-
|
|
2283
|
-
|
|
2284
|
-
|
|
2285
|
-
|
|
2286
|
-
)
|
|
2287
|
-
else:
|
|
2288
|
-
raise self.ice()
|
|
2289
|
-
return self.binary_expr_unwind(kid)
|
|
2009
|
+
if op := self.match_token(Tok.BW_AND):
|
|
2010
|
+
operand = self.consume(ast.Expr)
|
|
2011
|
+
return ast.UnaryExpr(
|
|
2012
|
+
op=op,
|
|
2013
|
+
operand=operand,
|
|
2014
|
+
kid=self.cur_nodes,
|
|
2015
|
+
)
|
|
2016
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2290
2017
|
|
|
2291
|
-
def pipe_call(self,
|
|
2018
|
+
def pipe_call(self, _: None) -> ast.Expr:
|
|
2292
2019
|
"""Grammar rule.
|
|
2293
2020
|
|
|
2294
2021
|
pipe_call: atomic_chain
|
|
@@ -2297,29 +2024,21 @@ class JacParser(Pass):
|
|
|
2297
2024
|
| KW_SPAWN atomic_chain
|
|
2298
2025
|
| KW_AWAIT atomic_chain
|
|
2299
2026
|
"""
|
|
2300
|
-
if len(
|
|
2301
|
-
if (
|
|
2302
|
-
|
|
2303
|
-
|
|
2304
|
-
|
|
2305
|
-
|
|
2306
|
-
return self.nu(
|
|
2307
|
-
ast.AwaitExpr(
|
|
2308
|
-
target=kid[1],
|
|
2309
|
-
kid=kid,
|
|
2310
|
-
)
|
|
2027
|
+
if len(self.cur_nodes) == 2:
|
|
2028
|
+
if self.match_token(Tok.KW_AWAIT):
|
|
2029
|
+
target = self.consume(ast.Expr)
|
|
2030
|
+
return ast.AwaitExpr(
|
|
2031
|
+
target=target,
|
|
2032
|
+
kid=self.cur_nodes,
|
|
2311
2033
|
)
|
|
2312
|
-
elif
|
|
2313
|
-
|
|
2314
|
-
|
|
2315
|
-
|
|
2316
|
-
|
|
2317
|
-
|
|
2318
|
-
)
|
|
2034
|
+
elif op := self.match(ast.Token):
|
|
2035
|
+
operand = self.consume(ast.Expr)
|
|
2036
|
+
return ast.UnaryExpr(
|
|
2037
|
+
op=op,
|
|
2038
|
+
operand=operand,
|
|
2039
|
+
kid=self.cur_nodes,
|
|
2319
2040
|
)
|
|
2320
|
-
|
|
2321
|
-
raise self.ice()
|
|
2322
|
-
return self.binary_expr_unwind(kid)
|
|
2041
|
+
return self._binary_expr_unwind(self.cur_nodes)
|
|
2323
2042
|
|
|
2324
2043
|
def aug_op(self, kid: list[ast.AstNode]) -> ast.Token:
|
|
2325
2044
|
"""Grammar rule.
|
|
@@ -2339,7 +2058,7 @@ class JacParser(Pass):
|
|
|
2339
2058
|
| WALRUS_EQ
|
|
2340
2059
|
"""
|
|
2341
2060
|
if isinstance(kid[0], ast.Token):
|
|
2342
|
-
return
|
|
2061
|
+
return kid[0]
|
|
2343
2062
|
else:
|
|
2344
2063
|
raise self.ice()
|
|
2345
2064
|
|
|
@@ -2351,7 +2070,7 @@ class JacParser(Pass):
|
|
|
2351
2070
|
| (atomic_call | atom | edge_ref_chain)
|
|
2352
2071
|
"""
|
|
2353
2072
|
if len(kid) < 2 and isinstance(kid[0], ast.Expr):
|
|
2354
|
-
return
|
|
2073
|
+
return kid[0]
|
|
2355
2074
|
chomp = [*kid]
|
|
2356
2075
|
target = chomp[0]
|
|
2357
2076
|
chomp = chomp[1:]
|
|
@@ -2364,14 +2083,12 @@ class JacParser(Pass):
|
|
|
2364
2083
|
and isinstance(chomp[0], ast.AtomExpr)
|
|
2365
2084
|
and isinstance(target, ast.Expr)
|
|
2366
2085
|
):
|
|
2367
|
-
return
|
|
2368
|
-
|
|
2369
|
-
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
|
|
2373
|
-
kid=kid,
|
|
2374
|
-
)
|
|
2086
|
+
return ast.AtomTrailer(
|
|
2087
|
+
target=target,
|
|
2088
|
+
right=chomp[0],
|
|
2089
|
+
is_null_ok=is_null_ok,
|
|
2090
|
+
is_attr=False,
|
|
2091
|
+
kid=kid,
|
|
2375
2092
|
)
|
|
2376
2093
|
elif (
|
|
2377
2094
|
len(chomp) > 1
|
|
@@ -2379,14 +2096,12 @@ class JacParser(Pass):
|
|
|
2379
2096
|
and isinstance(chomp[1], (ast.AtomExpr, ast.AtomTrailer))
|
|
2380
2097
|
and isinstance(target, ast.Expr)
|
|
2381
2098
|
):
|
|
2382
|
-
return
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
kid=kid,
|
|
2389
|
-
)
|
|
2099
|
+
return ast.AtomTrailer(
|
|
2100
|
+
target=(target if chomp[0].name != Tok.DOT_BKWD else chomp[1]),
|
|
2101
|
+
right=(chomp[1] if chomp[0].name != Tok.DOT_BKWD else target),
|
|
2102
|
+
is_null_ok=is_null_ok,
|
|
2103
|
+
is_attr=True,
|
|
2104
|
+
kid=kid,
|
|
2390
2105
|
)
|
|
2391
2106
|
else:
|
|
2392
2107
|
raise self.ice()
|
|
@@ -2402,25 +2117,23 @@ class JacParser(Pass):
|
|
|
2402
2117
|
and kid[-2]
|
|
2403
2118
|
and isinstance(kid[-2], ast.FuncCall)
|
|
2404
2119
|
):
|
|
2405
|
-
return
|
|
2406
|
-
|
|
2407
|
-
|
|
2408
|
-
|
|
2409
|
-
|
|
2410
|
-
kid=kid,
|
|
2411
|
-
)
|
|
2120
|
+
return ast.FuncCall(
|
|
2121
|
+
target=kid[0],
|
|
2122
|
+
params=kid[2] if isinstance(kid[2], ast.SubNodeList) else None,
|
|
2123
|
+
genai_call=kid[-2],
|
|
2124
|
+
kid=kid,
|
|
2412
2125
|
)
|
|
2413
2126
|
if (
|
|
2414
2127
|
len(kid) == 4
|
|
2415
2128
|
and isinstance(kid[0], ast.Expr)
|
|
2416
2129
|
and isinstance(kid[2], ast.SubNodeList)
|
|
2417
2130
|
):
|
|
2418
|
-
return
|
|
2419
|
-
|
|
2131
|
+
return ast.FuncCall(
|
|
2132
|
+
target=kid[0], params=kid[2], genai_call=None, kid=kid
|
|
2420
2133
|
)
|
|
2421
2134
|
elif len(kid) == 3 and isinstance(kid[0], ast.Expr):
|
|
2422
|
-
return
|
|
2423
|
-
|
|
2135
|
+
return ast.FuncCall(
|
|
2136
|
+
target=kid[0], params=None, genai_call=None, kid=kid
|
|
2424
2137
|
)
|
|
2425
2138
|
else:
|
|
2426
2139
|
raise self.ice()
|
|
@@ -2447,14 +2160,10 @@ class JacParser(Pass):
|
|
|
2447
2160
|
)
|
|
2448
2161
|
expr = ast.TupleVal(values=sublist, kid=[sublist])
|
|
2449
2162
|
kid = [expr]
|
|
2450
|
-
return
|
|
2451
|
-
ast.IndexSlice(
|
|
2452
|
-
|
|
2453
|
-
|
|
2454
|
-
],
|
|
2455
|
-
is_range=False,
|
|
2456
|
-
kid=kid,
|
|
2457
|
-
)
|
|
2163
|
+
return ast.IndexSlice(
|
|
2164
|
+
slices=[ast.IndexSlice.Slice(start=expr, stop=None, step=None)],
|
|
2165
|
+
is_range=False,
|
|
2166
|
+
kid=kid,
|
|
2458
2167
|
)
|
|
2459
2168
|
else:
|
|
2460
2169
|
raise self.ice()
|
|
@@ -2489,15 +2198,13 @@ class JacParser(Pass):
|
|
|
2489
2198
|
ast.IndexSlice.Slice(start=expr1, stop=expr2, step=expr3)
|
|
2490
2199
|
)
|
|
2491
2200
|
|
|
2492
|
-
return
|
|
2493
|
-
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
kid=kid,
|
|
2497
|
-
)
|
|
2201
|
+
return ast.IndexSlice(
|
|
2202
|
+
slices=slices,
|
|
2203
|
+
is_range=True,
|
|
2204
|
+
kid=kid,
|
|
2498
2205
|
)
|
|
2499
2206
|
|
|
2500
|
-
def atom(self,
|
|
2207
|
+
def atom(self, _: None) -> ast.Expr:
|
|
2501
2208
|
"""Grammar rule.
|
|
2502
2209
|
|
|
2503
2210
|
atom: named_ref
|
|
@@ -2506,41 +2213,27 @@ class JacParser(Pass):
|
|
|
2506
2213
|
| atom_literal
|
|
2507
2214
|
| type_ref
|
|
2508
2215
|
"""
|
|
2509
|
-
if
|
|
2510
|
-
|
|
2511
|
-
|
|
2512
|
-
|
|
2513
|
-
|
|
2514
|
-
elif len(kid) == 3:
|
|
2515
|
-
if (
|
|
2516
|
-
isinstance(kid[0], ast.Token)
|
|
2517
|
-
and isinstance(kid[1], (ast.Expr, ast.YieldExpr))
|
|
2518
|
-
and isinstance(kid[2], ast.Token)
|
|
2519
|
-
):
|
|
2520
|
-
ret = ast.AtomUnit(value=kid[1], kid=kid)
|
|
2521
|
-
return self.nu(ret)
|
|
2522
|
-
else:
|
|
2523
|
-
raise self.ice()
|
|
2524
|
-
else:
|
|
2525
|
-
raise self.ice()
|
|
2216
|
+
if self.match_token(Tok.LPAREN):
|
|
2217
|
+
value = self.match(ast.Expr) or self.consume(ast.YieldExpr)
|
|
2218
|
+
self.consume_token(Tok.RPAREN)
|
|
2219
|
+
return ast.AtomUnit(value=value, kid=self.cur_nodes)
|
|
2220
|
+
return self.consume(ast.AtomExpr)
|
|
2526
2221
|
|
|
2527
|
-
def yield_expr(self,
|
|
2222
|
+
def yield_expr(self, _: None) -> ast.YieldExpr:
|
|
2528
2223
|
"""Grammar rule.
|
|
2529
2224
|
|
|
2530
2225
|
yield_expr: KW_YIELD KW_FROM? expression
|
|
2531
2226
|
"""
|
|
2532
|
-
|
|
2533
|
-
|
|
2534
|
-
|
|
2535
|
-
|
|
2536
|
-
|
|
2537
|
-
|
|
2538
|
-
|
|
2539
|
-
|
|
2540
|
-
else:
|
|
2541
|
-
raise self.ice()
|
|
2227
|
+
self.consume_token(Tok.KW_YIELD)
|
|
2228
|
+
is_with_from = bool(self.match_token(Tok.KW_FROM))
|
|
2229
|
+
expr = self.consume(ast.Expr)
|
|
2230
|
+
return ast.YieldExpr(
|
|
2231
|
+
expr=expr,
|
|
2232
|
+
with_from=is_with_from,
|
|
2233
|
+
kid=self.cur_nodes,
|
|
2234
|
+
)
|
|
2542
2235
|
|
|
2543
|
-
def atom_literal(self,
|
|
2236
|
+
def atom_literal(self, _: None) -> ast.AtomExpr:
|
|
2544
2237
|
"""Grammar rule.
|
|
2545
2238
|
|
|
2546
2239
|
atom_literal: builtin_type
|
|
@@ -2553,10 +2246,7 @@ class JacParser(Pass):
|
|
|
2553
2246
|
| HEX
|
|
2554
2247
|
| INT
|
|
2555
2248
|
"""
|
|
2556
|
-
|
|
2557
|
-
return self.nu(kid[0])
|
|
2558
|
-
else:
|
|
2559
|
-
raise self.ice()
|
|
2249
|
+
return self.consume(ast.AtomExpr)
|
|
2560
2250
|
|
|
2561
2251
|
def atom_collection(self, kid: list[ast.AstNode]) -> ast.AtomExpr:
|
|
2562
2252
|
"""Grammar rule.
|
|
@@ -2570,23 +2260,18 @@ class JacParser(Pass):
|
|
|
2570
2260
|
| tuple_val
|
|
2571
2261
|
| list_val
|
|
2572
2262
|
"""
|
|
2573
|
-
|
|
2574
|
-
return self.nu(kid[0])
|
|
2575
|
-
else:
|
|
2576
|
-
raise self.ice()
|
|
2263
|
+
return self.consume(ast.AtomExpr)
|
|
2577
2264
|
|
|
2578
2265
|
def multistring(self, kid: list[ast.AstNode]) -> ast.AtomExpr:
|
|
2579
2266
|
"""Grammar rule.
|
|
2580
2267
|
|
|
2581
|
-
multistring: (fstring | STRING
|
|
2268
|
+
multistring: (fstring | STRING)+
|
|
2582
2269
|
"""
|
|
2583
2270
|
valid_strs = [i for i in kid if isinstance(i, (ast.String, ast.FString))]
|
|
2584
2271
|
if len(valid_strs) == len(kid):
|
|
2585
|
-
return
|
|
2586
|
-
|
|
2587
|
-
|
|
2588
|
-
kid=kid,
|
|
2589
|
-
)
|
|
2272
|
+
return ast.MultiString(
|
|
2273
|
+
strings=valid_strs,
|
|
2274
|
+
kid=kid,
|
|
2590
2275
|
)
|
|
2591
2276
|
else:
|
|
2592
2277
|
raise self.ice()
|
|
@@ -2598,18 +2283,14 @@ class JacParser(Pass):
|
|
|
2598
2283
|
| FSTR_SQ_START fstr_sq_parts FSTR_SQ_END
|
|
2599
2284
|
"""
|
|
2600
2285
|
if len(kid) == 2:
|
|
2601
|
-
return
|
|
2602
|
-
|
|
2603
|
-
|
|
2604
|
-
kid=kid,
|
|
2605
|
-
)
|
|
2286
|
+
return ast.FString(
|
|
2287
|
+
parts=None,
|
|
2288
|
+
kid=kid,
|
|
2606
2289
|
)
|
|
2607
2290
|
elif isinstance(kid[1], ast.SubNodeList):
|
|
2608
|
-
return
|
|
2609
|
-
|
|
2610
|
-
|
|
2611
|
-
kid=kid,
|
|
2612
|
-
)
|
|
2291
|
+
return ast.FString(
|
|
2292
|
+
parts=kid[1],
|
|
2293
|
+
kid=kid,
|
|
2613
2294
|
)
|
|
2614
2295
|
else:
|
|
2615
2296
|
raise self.ice()
|
|
@@ -2630,12 +2311,10 @@ class JacParser(Pass):
|
|
|
2630
2311
|
for i in kid
|
|
2631
2312
|
if isinstance(i, ast.Expr)
|
|
2632
2313
|
]
|
|
2633
|
-
return
|
|
2634
|
-
|
|
2635
|
-
|
|
2636
|
-
|
|
2637
|
-
kid=valid_parts,
|
|
2638
|
-
)
|
|
2314
|
+
return ast.SubNodeList[ast.String | ast.ExprStmt](
|
|
2315
|
+
items=valid_parts,
|
|
2316
|
+
delim=None,
|
|
2317
|
+
kid=valid_parts,
|
|
2639
2318
|
)
|
|
2640
2319
|
|
|
2641
2320
|
def fstr_sq_parts(
|
|
@@ -2654,12 +2333,10 @@ class JacParser(Pass):
|
|
|
2654
2333
|
for i in kid
|
|
2655
2334
|
if isinstance(i, ast.Expr)
|
|
2656
2335
|
]
|
|
2657
|
-
return
|
|
2658
|
-
|
|
2659
|
-
|
|
2660
|
-
|
|
2661
|
-
kid=valid_parts,
|
|
2662
|
-
)
|
|
2336
|
+
return ast.SubNodeList[ast.String | ast.ExprStmt](
|
|
2337
|
+
items=valid_parts,
|
|
2338
|
+
delim=None,
|
|
2339
|
+
kid=valid_parts,
|
|
2663
2340
|
)
|
|
2664
2341
|
|
|
2665
2342
|
def list_val(self, kid: list[ast.AstNode]) -> ast.ListVal:
|
|
@@ -2668,18 +2345,14 @@ class JacParser(Pass):
|
|
|
2668
2345
|
list_val: LSQUARE (expr_list COMMA?)? RSQUARE
|
|
2669
2346
|
"""
|
|
2670
2347
|
if len(kid) == 2:
|
|
2671
|
-
return
|
|
2672
|
-
|
|
2673
|
-
|
|
2674
|
-
kid=kid,
|
|
2675
|
-
)
|
|
2348
|
+
return ast.ListVal(
|
|
2349
|
+
values=None,
|
|
2350
|
+
kid=kid,
|
|
2676
2351
|
)
|
|
2677
|
-
elif isinstance(kid[1], ast.SubNodeList):
|
|
2678
|
-
return
|
|
2679
|
-
|
|
2680
|
-
|
|
2681
|
-
kid=kid,
|
|
2682
|
-
)
|
|
2352
|
+
elif isinstance(kid[1], ast.SubNodeList):
|
|
2353
|
+
return ast.ListVal(
|
|
2354
|
+
values=kid[1],
|
|
2355
|
+
kid=kid,
|
|
2683
2356
|
)
|
|
2684
2357
|
else:
|
|
2685
2358
|
raise self.ice()
|
|
@@ -2690,18 +2363,14 @@ class JacParser(Pass):
|
|
|
2690
2363
|
tuple_val: LPAREN tuple_list? RPAREN
|
|
2691
2364
|
"""
|
|
2692
2365
|
if len(kid) == 2:
|
|
2693
|
-
return
|
|
2694
|
-
|
|
2695
|
-
|
|
2696
|
-
kid=kid,
|
|
2697
|
-
)
|
|
2366
|
+
return ast.TupleVal(
|
|
2367
|
+
values=None,
|
|
2368
|
+
kid=kid,
|
|
2698
2369
|
)
|
|
2699
2370
|
elif isinstance(kid[1], ast.SubNodeList):
|
|
2700
|
-
return
|
|
2701
|
-
|
|
2702
|
-
|
|
2703
|
-
kid=kid,
|
|
2704
|
-
)
|
|
2371
|
+
return ast.TupleVal(
|
|
2372
|
+
values=kid[1],
|
|
2373
|
+
kid=kid,
|
|
2705
2374
|
)
|
|
2706
2375
|
else:
|
|
2707
2376
|
raise self.ice()
|
|
@@ -2712,18 +2381,14 @@ class JacParser(Pass):
|
|
|
2712
2381
|
set_val: LBRACE expr_list COMMA? RBRACE
|
|
2713
2382
|
"""
|
|
2714
2383
|
if len(kid) == 2:
|
|
2715
|
-
return
|
|
2716
|
-
|
|
2717
|
-
|
|
2718
|
-
kid=kid,
|
|
2719
|
-
)
|
|
2384
|
+
return ast.SetVal(
|
|
2385
|
+
values=None,
|
|
2386
|
+
kid=kid,
|
|
2720
2387
|
)
|
|
2721
2388
|
elif isinstance(kid[1], ast.SubNodeList):
|
|
2722
|
-
return
|
|
2723
|
-
|
|
2724
|
-
|
|
2725
|
-
kid=kid,
|
|
2726
|
-
)
|
|
2389
|
+
return ast.SetVal(
|
|
2390
|
+
values=kid[1],
|
|
2391
|
+
kid=kid,
|
|
2727
2392
|
)
|
|
2728
2393
|
else:
|
|
2729
2394
|
raise self.ice()
|
|
@@ -2745,12 +2410,10 @@ class JacParser(Pass):
|
|
|
2745
2410
|
expr = kid[0]
|
|
2746
2411
|
new_kid = [expr]
|
|
2747
2412
|
valid_kid = [i for i in new_kid if isinstance(i, ast.Expr)]
|
|
2748
|
-
return
|
|
2749
|
-
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
kid=new_kid,
|
|
2753
|
-
)
|
|
2413
|
+
return ast.SubNodeList[ast.Expr](
|
|
2414
|
+
items=valid_kid,
|
|
2415
|
+
delim=Tok.COMMA,
|
|
2416
|
+
kid=new_kid,
|
|
2754
2417
|
)
|
|
2755
2418
|
|
|
2756
2419
|
def kw_expr_list(self, kid: list[ast.AstNode]) -> ast.SubNodeList[ast.KWPair]:
|
|
@@ -2770,12 +2433,10 @@ class JacParser(Pass):
|
|
|
2770
2433
|
expr = kid[0]
|
|
2771
2434
|
new_kid = [expr]
|
|
2772
2435
|
valid_kid = [i for i in new_kid if isinstance(i, ast.KWPair)]
|
|
2773
|
-
return
|
|
2774
|
-
|
|
2775
|
-
|
|
2776
|
-
|
|
2777
|
-
kid=new_kid,
|
|
2778
|
-
)
|
|
2436
|
+
return ast.SubNodeList[ast.KWPair](
|
|
2437
|
+
items=valid_kid,
|
|
2438
|
+
delim=Tok.COMMA,
|
|
2439
|
+
kid=new_kid,
|
|
2779
2440
|
)
|
|
2780
2441
|
|
|
2781
2442
|
def kw_expr(self, kid: list[ast.AstNode]) -> ast.KWPair:
|
|
@@ -2788,20 +2449,16 @@ class JacParser(Pass):
|
|
|
2788
2449
|
and isinstance(kid[0], ast.NameAtom)
|
|
2789
2450
|
and isinstance(kid[2], ast.Expr)
|
|
2790
2451
|
):
|
|
2791
|
-
return
|
|
2792
|
-
|
|
2793
|
-
|
|
2794
|
-
|
|
2795
|
-
kid=kid,
|
|
2796
|
-
)
|
|
2452
|
+
return ast.KWPair(
|
|
2453
|
+
key=kid[0],
|
|
2454
|
+
value=kid[2],
|
|
2455
|
+
kid=kid,
|
|
2797
2456
|
)
|
|
2798
2457
|
elif len(kid) == 2 and isinstance(kid[1], ast.Expr):
|
|
2799
|
-
return
|
|
2800
|
-
|
|
2801
|
-
|
|
2802
|
-
|
|
2803
|
-
kid=kid,
|
|
2804
|
-
)
|
|
2458
|
+
return ast.KWPair(
|
|
2459
|
+
key=None,
|
|
2460
|
+
value=kid[1],
|
|
2461
|
+
kid=kid,
|
|
2805
2462
|
)
|
|
2806
2463
|
else:
|
|
2807
2464
|
raise self.ice()
|
|
@@ -2812,12 +2469,10 @@ class JacParser(Pass):
|
|
|
2812
2469
|
name_list: (named_ref COMMA)* named_ref
|
|
2813
2470
|
"""
|
|
2814
2471
|
valid_kid = [i for i in kid if isinstance(i, ast.Name)]
|
|
2815
|
-
return
|
|
2816
|
-
|
|
2817
|
-
|
|
2818
|
-
|
|
2819
|
-
kid=kid,
|
|
2820
|
-
)
|
|
2472
|
+
return ast.SubNodeList[ast.Name](
|
|
2473
|
+
items=valid_kid,
|
|
2474
|
+
delim=Tok.COMMA,
|
|
2475
|
+
kid=kid,
|
|
2821
2476
|
)
|
|
2822
2477
|
|
|
2823
2478
|
def tuple_list(
|
|
@@ -2840,7 +2495,7 @@ class JacParser(Pass):
|
|
|
2840
2495
|
# Add the comma to the subnode list if it exists, otherwise the last comma will not be a part of
|
|
2841
2496
|
# the ast, we need it for formatting.
|
|
2842
2497
|
chomp[0].kid.append(chomp[1])
|
|
2843
|
-
return
|
|
2498
|
+
return chomp[0]
|
|
2844
2499
|
else:
|
|
2845
2500
|
# The chomp will be like this:
|
|
2846
2501
|
# expression, COMMA, [subnode_list, [COMMA, [kw_expr_list, [COMMA]]]]
|
|
@@ -2859,12 +2514,10 @@ class JacParser(Pass):
|
|
|
2859
2514
|
expr_list = [*expr_list, *chomp[0].kid]
|
|
2860
2515
|
expr_list = [first_expr, *expr_list]
|
|
2861
2516
|
valid_kid = [i for i in expr_list if isinstance(i, (ast.Expr, ast.KWPair))]
|
|
2862
|
-
return
|
|
2863
|
-
|
|
2864
|
-
|
|
2865
|
-
|
|
2866
|
-
kid=kid,
|
|
2867
|
-
)
|
|
2517
|
+
return ast.SubNodeList[ast.Expr | ast.KWPair](
|
|
2518
|
+
items=valid_kid,
|
|
2519
|
+
delim=Tok.COMMA,
|
|
2520
|
+
kid=kid,
|
|
2868
2521
|
)
|
|
2869
2522
|
|
|
2870
2523
|
def dict_val(self, kid: list[ast.AstNode]) -> ast.DictVal:
|
|
@@ -2877,7 +2530,7 @@ class JacParser(Pass):
|
|
|
2877
2530
|
kid=kid,
|
|
2878
2531
|
)
|
|
2879
2532
|
ret.kv_pairs = [i for i in kid if isinstance(i, ast.KVPair)]
|
|
2880
|
-
return
|
|
2533
|
+
return ret
|
|
2881
2534
|
|
|
2882
2535
|
def kv_pair(self, kid: list[ast.AstNode]) -> ast.KVPair:
|
|
2883
2536
|
"""Grammar rule.
|
|
@@ -2889,20 +2542,16 @@ class JacParser(Pass):
|
|
|
2889
2542
|
and isinstance(kid[0], ast.Expr)
|
|
2890
2543
|
and isinstance(kid[2], ast.Expr)
|
|
2891
2544
|
):
|
|
2892
|
-
return
|
|
2893
|
-
|
|
2894
|
-
|
|
2895
|
-
|
|
2896
|
-
kid=kid,
|
|
2897
|
-
)
|
|
2545
|
+
return ast.KVPair(
|
|
2546
|
+
key=kid[0],
|
|
2547
|
+
value=kid[2],
|
|
2548
|
+
kid=kid,
|
|
2898
2549
|
)
|
|
2899
2550
|
elif len(kid) == 2 and isinstance(kid[1], ast.Expr):
|
|
2900
|
-
return
|
|
2901
|
-
|
|
2902
|
-
|
|
2903
|
-
|
|
2904
|
-
kid=kid,
|
|
2905
|
-
)
|
|
2551
|
+
return ast.KVPair(
|
|
2552
|
+
key=None,
|
|
2553
|
+
value=kid[1],
|
|
2554
|
+
kid=kid,
|
|
2906
2555
|
)
|
|
2907
2556
|
else:
|
|
2908
2557
|
raise self.ice()
|
|
@@ -2914,12 +2563,10 @@ class JacParser(Pass):
|
|
|
2914
2563
|
"""
|
|
2915
2564
|
comprs = [i for i in kid if isinstance(i, ast.InnerCompr)]
|
|
2916
2565
|
if isinstance(kid[1], ast.Expr):
|
|
2917
|
-
return
|
|
2918
|
-
|
|
2919
|
-
|
|
2920
|
-
|
|
2921
|
-
kid=kid,
|
|
2922
|
-
)
|
|
2566
|
+
return ast.ListCompr(
|
|
2567
|
+
out_expr=kid[1],
|
|
2568
|
+
compr=comprs,
|
|
2569
|
+
kid=kid,
|
|
2923
2570
|
)
|
|
2924
2571
|
else:
|
|
2925
2572
|
raise self.ice()
|
|
@@ -2931,12 +2578,10 @@ class JacParser(Pass):
|
|
|
2931
2578
|
"""
|
|
2932
2579
|
comprs = [i for i in kid if isinstance(i, ast.InnerCompr)]
|
|
2933
2580
|
if isinstance(kid[1], ast.Expr):
|
|
2934
|
-
return
|
|
2935
|
-
|
|
2936
|
-
|
|
2937
|
-
|
|
2938
|
-
kid=kid,
|
|
2939
|
-
)
|
|
2581
|
+
return ast.GenCompr(
|
|
2582
|
+
out_expr=kid[1],
|
|
2583
|
+
compr=comprs,
|
|
2584
|
+
kid=kid,
|
|
2940
2585
|
)
|
|
2941
2586
|
else:
|
|
2942
2587
|
raise self.ice()
|
|
@@ -2948,12 +2593,10 @@ class JacParser(Pass):
|
|
|
2948
2593
|
"""
|
|
2949
2594
|
comprs = [i for i in kid if isinstance(i, ast.InnerCompr)]
|
|
2950
2595
|
if isinstance(kid[1], ast.Expr) and isinstance(kid[2], ast.InnerCompr):
|
|
2951
|
-
return
|
|
2952
|
-
|
|
2953
|
-
|
|
2954
|
-
|
|
2955
|
-
kid=kid,
|
|
2956
|
-
)
|
|
2596
|
+
return ast.SetCompr(
|
|
2597
|
+
out_expr=kid[1],
|
|
2598
|
+
compr=comprs,
|
|
2599
|
+
kid=kid,
|
|
2957
2600
|
)
|
|
2958
2601
|
else:
|
|
2959
2602
|
raise self.ice()
|
|
@@ -2965,12 +2608,10 @@ class JacParser(Pass):
|
|
|
2965
2608
|
"""
|
|
2966
2609
|
comprs = [i for i in kid if isinstance(i, ast.InnerCompr)]
|
|
2967
2610
|
if isinstance(kid[1], ast.KVPair) and isinstance(kid[2], ast.InnerCompr):
|
|
2968
|
-
return
|
|
2969
|
-
|
|
2970
|
-
|
|
2971
|
-
|
|
2972
|
-
kid=kid,
|
|
2973
|
-
)
|
|
2611
|
+
return ast.DictCompr(
|
|
2612
|
+
kv_pair=kid[1],
|
|
2613
|
+
compr=comprs,
|
|
2614
|
+
kid=kid,
|
|
2974
2615
|
)
|
|
2975
2616
|
else:
|
|
2976
2617
|
raise self.ice()
|
|
@@ -2987,18 +2628,16 @@ class JacParser(Pass):
|
|
|
2987
2628
|
chomp = chomp[1:] if is_async else chomp
|
|
2988
2629
|
chomp = chomp[1:]
|
|
2989
2630
|
if isinstance(chomp[0], ast.Expr) and isinstance(chomp[2], ast.Expr):
|
|
2990
|
-
return
|
|
2991
|
-
|
|
2992
|
-
|
|
2993
|
-
|
|
2994
|
-
|
|
2995
|
-
|
|
2996
|
-
|
|
2997
|
-
|
|
2998
|
-
|
|
2999
|
-
|
|
3000
|
-
kid=chomp,
|
|
3001
|
-
)
|
|
2631
|
+
return ast.InnerCompr(
|
|
2632
|
+
is_async=is_async,
|
|
2633
|
+
target=chomp[0],
|
|
2634
|
+
collection=chomp[2],
|
|
2635
|
+
conditional=(
|
|
2636
|
+
[i for i in chomp[4:] if isinstance(i, ast.Expr)]
|
|
2637
|
+
if len(chomp) > 4 and isinstance(chomp[4], ast.Expr)
|
|
2638
|
+
else None
|
|
2639
|
+
),
|
|
2640
|
+
kid=chomp,
|
|
3002
2641
|
)
|
|
3003
2642
|
else:
|
|
3004
2643
|
raise self.ice()
|
|
@@ -3023,7 +2662,7 @@ class JacParser(Pass):
|
|
|
3023
2662
|
ends_with_comma
|
|
3024
2663
|
): # Append the trailing comma to the subnode list.
|
|
3025
2664
|
kid[0].kid.append(kid[1])
|
|
3026
|
-
return
|
|
2665
|
+
return kid[0]
|
|
3027
2666
|
else:
|
|
3028
2667
|
raise self.ice()
|
|
3029
2668
|
elif isinstance(kid[0], ast.SubNodeList) and isinstance(
|
|
@@ -3035,12 +2674,10 @@ class JacParser(Pass):
|
|
|
3035
2674
|
if isinstance(i, (ast.Expr, ast.KWPair))
|
|
3036
2675
|
]
|
|
3037
2676
|
if len(valid_kid) == len(kid[0].items) + len(kid[2].items):
|
|
3038
|
-
return
|
|
3039
|
-
|
|
3040
|
-
|
|
3041
|
-
|
|
3042
|
-
kid=kid,
|
|
3043
|
-
)
|
|
2677
|
+
return ast.SubNodeList[ast.Expr | ast.KWPair](
|
|
2678
|
+
items=valid_kid,
|
|
2679
|
+
delim=Tok.COMMA,
|
|
2680
|
+
kid=kid,
|
|
3044
2681
|
)
|
|
3045
2682
|
else:
|
|
3046
2683
|
raise self.ice()
|
|
@@ -3065,12 +2702,10 @@ class JacParser(Pass):
|
|
|
3065
2702
|
assign = kid[0]
|
|
3066
2703
|
new_kid = [assign]
|
|
3067
2704
|
valid_kid = [i for i in new_kid if isinstance(i, ast.Assignment)]
|
|
3068
|
-
return
|
|
3069
|
-
|
|
3070
|
-
|
|
3071
|
-
|
|
3072
|
-
kid=new_kid,
|
|
3073
|
-
)
|
|
2705
|
+
return ast.SubNodeList[ast.Assignment](
|
|
2706
|
+
items=valid_kid,
|
|
2707
|
+
delim=Tok.COMMA,
|
|
2708
|
+
kid=new_kid,
|
|
3074
2709
|
)
|
|
3075
2710
|
|
|
3076
2711
|
def arch_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
@@ -3082,159 +2717,125 @@ class JacParser(Pass):
|
|
|
3082
2717
|
| node_ref
|
|
3083
2718
|
| type_ref
|
|
3084
2719
|
"""
|
|
3085
|
-
|
|
3086
|
-
return self.nu(kid[0])
|
|
3087
|
-
else:
|
|
3088
|
-
raise self.ice()
|
|
2720
|
+
return self.consume(ast.ArchRef)
|
|
3089
2721
|
|
|
3090
2722
|
def node_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3091
2723
|
"""Grammar rule.
|
|
3092
2724
|
|
|
3093
2725
|
node_ref: NODE_OP NAME
|
|
3094
2726
|
"""
|
|
3095
|
-
|
|
3096
|
-
|
|
3097
|
-
|
|
3098
|
-
|
|
3099
|
-
|
|
3100
|
-
|
|
3101
|
-
|
|
3102
|
-
)
|
|
3103
|
-
else:
|
|
3104
|
-
raise self.ice()
|
|
2727
|
+
arch_type = self.consume(ast.Token)
|
|
2728
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2729
|
+
return ast.ArchRef(
|
|
2730
|
+
arch_type=arch_type,
|
|
2731
|
+
arch_name=arch_name,
|
|
2732
|
+
kid=self.cur_nodes,
|
|
2733
|
+
)
|
|
3105
2734
|
|
|
3106
2735
|
def edge_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3107
2736
|
"""Grammar rule.
|
|
3108
2737
|
|
|
3109
2738
|
edge_ref: EDGE_OP NAME
|
|
3110
2739
|
"""
|
|
3111
|
-
|
|
3112
|
-
|
|
3113
|
-
|
|
3114
|
-
|
|
3115
|
-
|
|
3116
|
-
|
|
3117
|
-
|
|
3118
|
-
)
|
|
3119
|
-
else:
|
|
3120
|
-
raise self.ice()
|
|
2740
|
+
arch_type = self.consume(ast.Token)
|
|
2741
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2742
|
+
return ast.ArchRef(
|
|
2743
|
+
arch_type=arch_type,
|
|
2744
|
+
arch_name=arch_name,
|
|
2745
|
+
kid=self.cur_nodes,
|
|
2746
|
+
)
|
|
3121
2747
|
|
|
3122
2748
|
def walker_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3123
2749
|
"""Grammar rule.
|
|
3124
2750
|
|
|
3125
2751
|
walker_ref: WALKER_OP NAME
|
|
3126
2752
|
"""
|
|
3127
|
-
|
|
3128
|
-
|
|
3129
|
-
|
|
3130
|
-
|
|
3131
|
-
|
|
3132
|
-
|
|
3133
|
-
|
|
3134
|
-
)
|
|
3135
|
-
else:
|
|
3136
|
-
raise self.ice()
|
|
2753
|
+
arch_type = self.consume(ast.Token)
|
|
2754
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2755
|
+
return ast.ArchRef(
|
|
2756
|
+
arch_type=arch_type,
|
|
2757
|
+
arch_name=arch_name,
|
|
2758
|
+
kid=self.cur_nodes,
|
|
2759
|
+
)
|
|
3137
2760
|
|
|
3138
2761
|
def class_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3139
2762
|
"""Grammar rule.
|
|
3140
2763
|
|
|
3141
2764
|
class_ref: CLASS_OP name_ref
|
|
3142
2765
|
"""
|
|
3143
|
-
|
|
3144
|
-
|
|
3145
|
-
|
|
3146
|
-
|
|
3147
|
-
|
|
3148
|
-
|
|
3149
|
-
|
|
3150
|
-
)
|
|
3151
|
-
else:
|
|
3152
|
-
raise self.ice()
|
|
2766
|
+
arch_type = self.consume(ast.Token)
|
|
2767
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2768
|
+
return ast.ArchRef(
|
|
2769
|
+
arch_type=arch_type,
|
|
2770
|
+
arch_name=arch_name,
|
|
2771
|
+
kid=self.cur_nodes,
|
|
2772
|
+
)
|
|
3153
2773
|
|
|
3154
2774
|
def object_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3155
2775
|
"""Grammar rule.
|
|
3156
2776
|
|
|
3157
2777
|
object_ref: OBJECT_OP name_ref
|
|
3158
2778
|
"""
|
|
3159
|
-
|
|
3160
|
-
|
|
3161
|
-
|
|
3162
|
-
|
|
3163
|
-
|
|
3164
|
-
|
|
3165
|
-
|
|
3166
|
-
)
|
|
3167
|
-
else:
|
|
3168
|
-
raise self.ice()
|
|
2779
|
+
arch_type = self.consume(ast.Token)
|
|
2780
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2781
|
+
return ast.ArchRef(
|
|
2782
|
+
arch_type=arch_type,
|
|
2783
|
+
arch_name=arch_name,
|
|
2784
|
+
kid=self.cur_nodes,
|
|
2785
|
+
)
|
|
3169
2786
|
|
|
3170
2787
|
def type_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3171
2788
|
"""Grammar rule.
|
|
3172
2789
|
|
|
3173
2790
|
type_ref: TYPE_OP (named_ref | builtin_type)
|
|
3174
2791
|
"""
|
|
3175
|
-
|
|
3176
|
-
|
|
3177
|
-
|
|
3178
|
-
|
|
3179
|
-
|
|
3180
|
-
|
|
3181
|
-
|
|
3182
|
-
)
|
|
3183
|
-
else:
|
|
3184
|
-
raise self.ice()
|
|
2792
|
+
arch_type = self.consume(ast.Token)
|
|
2793
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2794
|
+
return ast.ArchRef(
|
|
2795
|
+
arch_type=arch_type,
|
|
2796
|
+
arch_name=arch_name,
|
|
2797
|
+
kid=self.cur_nodes,
|
|
2798
|
+
)
|
|
3185
2799
|
|
|
3186
2800
|
def enum_ref(self, kid: list[ast.AstNode]) -> ast.ArchRef:
|
|
3187
2801
|
"""Grammar rule.
|
|
3188
2802
|
|
|
3189
2803
|
enum_ref: ENUM_OP NAME
|
|
3190
2804
|
"""
|
|
3191
|
-
|
|
3192
|
-
|
|
3193
|
-
|
|
3194
|
-
|
|
3195
|
-
|
|
3196
|
-
|
|
3197
|
-
|
|
3198
|
-
)
|
|
3199
|
-
else:
|
|
3200
|
-
raise self.ice()
|
|
2805
|
+
arch_type = self.consume(ast.Token)
|
|
2806
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2807
|
+
return ast.ArchRef(
|
|
2808
|
+
arch_type=arch_type,
|
|
2809
|
+
arch_name=arch_name,
|
|
2810
|
+
kid=self.cur_nodes,
|
|
2811
|
+
)
|
|
3201
2812
|
|
|
3202
|
-
def ability_ref(self,
|
|
2813
|
+
def ability_ref(self, _: None) -> ast.ArchRef:
|
|
3203
2814
|
"""Grammar rule.
|
|
3204
2815
|
|
|
3205
2816
|
ability_ref: ABILITY_OP (special_ref | name_ref)
|
|
3206
2817
|
"""
|
|
3207
|
-
|
|
3208
|
-
|
|
3209
|
-
|
|
3210
|
-
|
|
3211
|
-
|
|
3212
|
-
|
|
3213
|
-
|
|
3214
|
-
)
|
|
3215
|
-
else:
|
|
3216
|
-
raise self.ice()
|
|
2818
|
+
arch_type = self.consume_token(Tok.ABILITY_OP)
|
|
2819
|
+
arch_name = self.consume(ast.NameAtom)
|
|
2820
|
+
return ast.ArchRef(
|
|
2821
|
+
arch_type=arch_type,
|
|
2822
|
+
arch_name=arch_name,
|
|
2823
|
+
kid=self.cur_nodes,
|
|
2824
|
+
)
|
|
3217
2825
|
|
|
3218
2826
|
def arch_or_ability_chain(self, kid: list[ast.AstNode]) -> ast.ArchRefChain:
|
|
3219
2827
|
"""Grammar rule.
|
|
3220
2828
|
|
|
3221
2829
|
arch_or_ability_chain: arch_or_ability_chain? (ability_ref | arch_ref)
|
|
3222
2830
|
"""
|
|
3223
|
-
consume =
|
|
3224
|
-
name =
|
|
3225
|
-
if isinstance(kid[0], ast.SubNodeList):
|
|
3226
|
-
consume = kid[0]
|
|
3227
|
-
name = kid[1]
|
|
3228
|
-
else:
|
|
3229
|
-
name = kid[0]
|
|
2831
|
+
consume = self.match(ast.ArchRefChain)
|
|
2832
|
+
name = self.consume(ast.ArchRef)
|
|
3230
2833
|
new_kid = [*consume.kid, name] if consume else [name]
|
|
3231
2834
|
valid_kid = [i for i in new_kid if isinstance(i, ast.ArchRef)]
|
|
3232
2835
|
if len(valid_kid) == len(new_kid):
|
|
3233
|
-
return
|
|
3234
|
-
|
|
3235
|
-
|
|
3236
|
-
kid=new_kid,
|
|
3237
|
-
)
|
|
2836
|
+
return ast.ArchRefChain(
|
|
2837
|
+
archs=valid_kid,
|
|
2838
|
+
kid=new_kid,
|
|
3238
2839
|
)
|
|
3239
2840
|
else:
|
|
3240
2841
|
raise self.ice()
|
|
@@ -3248,20 +2849,16 @@ class JacParser(Pass):
|
|
|
3248
2849
|
if isinstance(kid[1], ast.ArchRef) and isinstance(
|
|
3249
2850
|
kid[0], ast.ArchRefChain
|
|
3250
2851
|
):
|
|
3251
|
-
return
|
|
3252
|
-
|
|
3253
|
-
|
|
3254
|
-
kid=[*(kid[0].kid), kid[1]],
|
|
3255
|
-
)
|
|
2852
|
+
return ast.ArchRefChain(
|
|
2853
|
+
archs=[*(kid[0].archs), kid[1]],
|
|
2854
|
+
kid=[*(kid[0].kid), kid[1]],
|
|
3256
2855
|
)
|
|
3257
2856
|
else:
|
|
3258
2857
|
raise self.ice()
|
|
3259
2858
|
elif isinstance(kid[0], ast.ArchRef):
|
|
3260
|
-
return
|
|
3261
|
-
|
|
3262
|
-
|
|
3263
|
-
kid=kid,
|
|
3264
|
-
)
|
|
2859
|
+
return ast.ArchRefChain(
|
|
2860
|
+
archs=[kid[0]],
|
|
2861
|
+
kid=kid,
|
|
3265
2862
|
)
|
|
3266
2863
|
else:
|
|
3267
2864
|
raise self.ice()
|
|
@@ -3275,20 +2872,16 @@ class JacParser(Pass):
|
|
|
3275
2872
|
if isinstance(kid[1], ast.ArchRef) and isinstance(
|
|
3276
2873
|
kid[0], ast.ArchRefChain
|
|
3277
2874
|
):
|
|
3278
|
-
return
|
|
3279
|
-
|
|
3280
|
-
|
|
3281
|
-
kid=[*(kid[0].kid), kid[1]],
|
|
3282
|
-
)
|
|
2875
|
+
return ast.ArchRefChain(
|
|
2876
|
+
archs=[*(kid[0].archs), kid[1]],
|
|
2877
|
+
kid=[*(kid[0].kid), kid[1]],
|
|
3283
2878
|
)
|
|
3284
2879
|
else:
|
|
3285
2880
|
raise self.ice()
|
|
3286
2881
|
elif isinstance(kid[0], ast.ArchRef):
|
|
3287
|
-
return
|
|
3288
|
-
|
|
3289
|
-
|
|
3290
|
-
kid=kid,
|
|
3291
|
-
)
|
|
2882
|
+
return ast.ArchRefChain(
|
|
2883
|
+
archs=[kid[0]],
|
|
2884
|
+
kid=kid,
|
|
3292
2885
|
)
|
|
3293
2886
|
else:
|
|
3294
2887
|
raise self.ice()
|
|
@@ -3302,20 +2895,16 @@ class JacParser(Pass):
|
|
|
3302
2895
|
if isinstance(kid[1], ast.ArchRef) and isinstance(
|
|
3303
2896
|
kid[0], ast.ArchRefChain
|
|
3304
2897
|
):
|
|
3305
|
-
return
|
|
3306
|
-
|
|
3307
|
-
|
|
3308
|
-
kid=[*(kid[0].kid), kid[1]],
|
|
3309
|
-
)
|
|
2898
|
+
return ast.ArchRefChain(
|
|
2899
|
+
archs=[*(kid[0].archs), kid[1]],
|
|
2900
|
+
kid=[*(kid[0].kid), kid[1]],
|
|
3310
2901
|
)
|
|
3311
2902
|
else:
|
|
3312
2903
|
raise self.ice()
|
|
3313
2904
|
elif isinstance(kid[0], ast.ArchRef):
|
|
3314
|
-
return
|
|
3315
|
-
|
|
3316
|
-
|
|
3317
|
-
kid=kid,
|
|
3318
|
-
)
|
|
2905
|
+
return ast.ArchRefChain(
|
|
2906
|
+
archs=[kid[0]],
|
|
2907
|
+
kid=kid,
|
|
3319
2908
|
)
|
|
3320
2909
|
else:
|
|
3321
2910
|
raise self.ice()
|
|
@@ -3326,13 +2915,10 @@ class JacParser(Pass):
|
|
|
3326
2915
|
(EDGE_OP|NODE_OP)? LSQUARE expression? (edge_op_ref (filter_compr | expression)?)+ RSQUARE
|
|
3327
2916
|
"""
|
|
3328
2917
|
valid_chain = [i for i in kid if isinstance(i, (ast.Expr, ast.FilterCompr))]
|
|
3329
|
-
return
|
|
3330
|
-
|
|
3331
|
-
|
|
3332
|
-
|
|
3333
|
-
and kid[0].name == Tok.EDGE_OP,
|
|
3334
|
-
kid=kid,
|
|
3335
|
-
)
|
|
2918
|
+
return ast.EdgeRefTrailer(
|
|
2919
|
+
chain=valid_chain,
|
|
2920
|
+
edges_only=isinstance(kid[0], ast.Token) and kid[0].name == Tok.EDGE_OP,
|
|
2921
|
+
kid=kid,
|
|
3336
2922
|
)
|
|
3337
2923
|
|
|
3338
2924
|
def edge_op_ref(self, kid: list[ast.AstNode]) -> ast.EdgeOpRef:
|
|
@@ -3340,10 +2926,7 @@ class JacParser(Pass):
|
|
|
3340
2926
|
|
|
3341
2927
|
edge_op_ref: (edge_any | edge_from | edge_to)
|
|
3342
2928
|
"""
|
|
3343
|
-
|
|
3344
|
-
return self.nu(kid[0])
|
|
3345
|
-
else:
|
|
3346
|
-
raise self.ice()
|
|
2929
|
+
return self.consume(ast.EdgeOpRef)
|
|
3347
2930
|
|
|
3348
2931
|
def edge_to(self, kid: list[ast.AstNode]) -> ast.EdgeOpRef:
|
|
3349
2932
|
"""Grammar rule.
|
|
@@ -3353,9 +2936,7 @@ class JacParser(Pass):
|
|
|
3353
2936
|
"""
|
|
3354
2937
|
fcond = kid[1] if len(kid) > 1 else None
|
|
3355
2938
|
if isinstance(fcond, ast.FilterCompr) or fcond is None:
|
|
3356
|
-
return
|
|
3357
|
-
ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.OUT, kid=kid)
|
|
3358
|
-
)
|
|
2939
|
+
return ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.OUT, kid=kid)
|
|
3359
2940
|
else:
|
|
3360
2941
|
raise self.ice()
|
|
3361
2942
|
|
|
@@ -3367,9 +2948,7 @@ class JacParser(Pass):
|
|
|
3367
2948
|
"""
|
|
3368
2949
|
fcond = kid[1] if len(kid) > 1 else None
|
|
3369
2950
|
if isinstance(fcond, ast.FilterCompr) or fcond is None:
|
|
3370
|
-
return
|
|
3371
|
-
ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.IN, kid=kid)
|
|
3372
|
-
)
|
|
2951
|
+
return ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.IN, kid=kid)
|
|
3373
2952
|
else:
|
|
3374
2953
|
raise self.ice()
|
|
3375
2954
|
|
|
@@ -3381,9 +2960,7 @@ class JacParser(Pass):
|
|
|
3381
2960
|
"""
|
|
3382
2961
|
fcond = kid[1] if len(kid) > 1 else None
|
|
3383
2962
|
if isinstance(fcond, ast.FilterCompr) or fcond is None:
|
|
3384
|
-
return
|
|
3385
|
-
ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.ANY, kid=kid)
|
|
3386
|
-
)
|
|
2963
|
+
return ast.EdgeOpRef(filter_cond=fcond, edge_dir=EdgeDir.ANY, kid=kid)
|
|
3387
2964
|
else:
|
|
3388
2965
|
raise self.ice()
|
|
3389
2966
|
|
|
@@ -3393,7 +2970,7 @@ class JacParser(Pass):
|
|
|
3393
2970
|
connect_op: connect_from | connect_to | connect_any
|
|
3394
2971
|
"""
|
|
3395
2972
|
if len(kid) < 2 and isinstance(kid[0], ast.ConnectOp):
|
|
3396
|
-
return
|
|
2973
|
+
return kid[0]
|
|
3397
2974
|
else:
|
|
3398
2975
|
raise self.ice()
|
|
3399
2976
|
|
|
@@ -3403,11 +2980,9 @@ class JacParser(Pass):
|
|
|
3403
2980
|
disconnect_op: NOT edge_op_ref
|
|
3404
2981
|
"""
|
|
3405
2982
|
if isinstance(kid[1], ast.EdgeOpRef):
|
|
3406
|
-
return
|
|
3407
|
-
|
|
3408
|
-
|
|
3409
|
-
kid=kid,
|
|
3410
|
-
)
|
|
2983
|
+
return ast.DisconnectOp(
|
|
2984
|
+
edge_spec=kid[1],
|
|
2985
|
+
kid=kid,
|
|
3411
2986
|
)
|
|
3412
2987
|
else:
|
|
3413
2988
|
raise self.ice()
|
|
@@ -3430,13 +3005,11 @@ class JacParser(Pass):
|
|
|
3430
3005
|
)
|
|
3431
3006
|
if conn_assign:
|
|
3432
3007
|
kid[3] = conn_assign
|
|
3433
|
-
return
|
|
3434
|
-
|
|
3435
|
-
|
|
3436
|
-
|
|
3437
|
-
|
|
3438
|
-
kid=kid,
|
|
3439
|
-
)
|
|
3008
|
+
return ast.ConnectOp(
|
|
3009
|
+
conn_type=conn_type,
|
|
3010
|
+
conn_assign=conn_assign,
|
|
3011
|
+
edge_dir=EdgeDir.OUT,
|
|
3012
|
+
kid=kid,
|
|
3440
3013
|
)
|
|
3441
3014
|
else:
|
|
3442
3015
|
raise self.ice()
|
|
@@ -3459,13 +3032,11 @@ class JacParser(Pass):
|
|
|
3459
3032
|
)
|
|
3460
3033
|
if conn_assign:
|
|
3461
3034
|
kid[3] = conn_assign
|
|
3462
|
-
return
|
|
3463
|
-
|
|
3464
|
-
|
|
3465
|
-
|
|
3466
|
-
|
|
3467
|
-
kid=kid,
|
|
3468
|
-
)
|
|
3035
|
+
return ast.ConnectOp(
|
|
3036
|
+
conn_type=conn_type,
|
|
3037
|
+
conn_assign=conn_assign,
|
|
3038
|
+
edge_dir=EdgeDir.IN,
|
|
3039
|
+
kid=kid,
|
|
3469
3040
|
)
|
|
3470
3041
|
else:
|
|
3471
3042
|
raise self.ice()
|
|
@@ -3487,13 +3058,11 @@ class JacParser(Pass):
|
|
|
3487
3058
|
)
|
|
3488
3059
|
if conn_assign:
|
|
3489
3060
|
kid[3] = conn_assign
|
|
3490
|
-
return
|
|
3491
|
-
|
|
3492
|
-
|
|
3493
|
-
|
|
3494
|
-
|
|
3495
|
-
kid=kid,
|
|
3496
|
-
)
|
|
3061
|
+
return ast.ConnectOp(
|
|
3062
|
+
conn_type=conn_type,
|
|
3063
|
+
conn_assign=conn_assign,
|
|
3064
|
+
edge_dir=EdgeDir.ANY,
|
|
3065
|
+
kid=kid,
|
|
3497
3066
|
)
|
|
3498
3067
|
else:
|
|
3499
3068
|
raise self.ice()
|
|
@@ -3505,11 +3074,11 @@ class JacParser(Pass):
|
|
|
3505
3074
|
| LPAREN TYPE_OP NULL_OK typed_filter_compare_list RPAREN
|
|
3506
3075
|
"""
|
|
3507
3076
|
if isinstance(kid[2], ast.SubNodeList):
|
|
3508
|
-
return
|
|
3077
|
+
return ast.FilterCompr(compares=kid[2], f_type=None, kid=kid)
|
|
3509
3078
|
elif isinstance(kid[3], ast.FilterCompr):
|
|
3510
3079
|
kid[3].add_kids_left(kid[:3])
|
|
3511
3080
|
kid[3].add_kids_right(kid[4:])
|
|
3512
|
-
return
|
|
3081
|
+
return kid[3]
|
|
3513
3082
|
else:
|
|
3514
3083
|
raise self.ice()
|
|
3515
3084
|
|
|
@@ -3532,12 +3101,10 @@ class JacParser(Pass):
|
|
|
3532
3101
|
expr = kid[0]
|
|
3533
3102
|
new_kid = [expr]
|
|
3534
3103
|
valid_kid = [i for i in new_kid if isinstance(i, ast.CompareExpr)]
|
|
3535
|
-
return
|
|
3536
|
-
|
|
3537
|
-
|
|
3538
|
-
|
|
3539
|
-
kid=new_kid,
|
|
3540
|
-
)
|
|
3104
|
+
return ast.SubNodeList[ast.CompareExpr](
|
|
3105
|
+
items=valid_kid,
|
|
3106
|
+
delim=Tok.COMMA,
|
|
3107
|
+
kid=new_kid,
|
|
3541
3108
|
)
|
|
3542
3109
|
|
|
3543
3110
|
def typed_filter_compare_list(self, kid: list[ast.AstNode]) -> ast.FilterCompr:
|
|
@@ -3558,7 +3125,7 @@ class JacParser(Pass):
|
|
|
3558
3125
|
if isinstance(expr, ast.Expr) and (
|
|
3559
3126
|
(isinstance(compares, ast.SubNodeList)) or compares is None
|
|
3560
3127
|
):
|
|
3561
|
-
return
|
|
3128
|
+
return ast.FilterCompr(compares=compares, f_type=expr, kid=kid)
|
|
3562
3129
|
else:
|
|
3563
3130
|
raise self.ice()
|
|
3564
3131
|
|
|
@@ -3569,110 +3136,95 @@ class JacParser(Pass):
|
|
|
3569
3136
|
"""
|
|
3570
3137
|
ret = self.compare(kid)
|
|
3571
3138
|
if isinstance(ret, ast.CompareExpr):
|
|
3572
|
-
return
|
|
3139
|
+
return ret
|
|
3573
3140
|
else:
|
|
3574
3141
|
raise self.ice()
|
|
3575
3142
|
|
|
3576
|
-
def assign_compr(self,
|
|
3143
|
+
def assign_compr(self, _: None) -> ast.AssignCompr:
|
|
3577
3144
|
"""Grammar rule.
|
|
3578
3145
|
|
|
3579
3146
|
filter_compr: LPAREN EQ kw_expr_list RPAREN
|
|
3580
3147
|
"""
|
|
3581
|
-
|
|
3582
|
-
|
|
3583
|
-
|
|
3584
|
-
|
|
3585
|
-
|
|
3586
|
-
)
|
|
3587
|
-
)
|
|
3588
|
-
else:
|
|
3589
|
-
raise self.ice()
|
|
3148
|
+
self.consume_token(Tok.LPAREN)
|
|
3149
|
+
self.consume_token(Tok.EQ)
|
|
3150
|
+
assigns = self.consume(ast.SubNodeList)
|
|
3151
|
+
self.consume_token(Tok.RPAREN)
|
|
3152
|
+
return ast.AssignCompr(assigns=assigns, kid=self.cur_nodes)
|
|
3590
3153
|
|
|
3591
|
-
def match_stmt(self,
|
|
3154
|
+
def match_stmt(self, _: None) -> ast.MatchStmt:
|
|
3592
3155
|
"""Grammar rule.
|
|
3593
3156
|
|
|
3594
3157
|
match_stmt: KW_MATCH expr_list LBRACE match_case_block+ RBRACE
|
|
3595
3158
|
"""
|
|
3596
|
-
|
|
3597
|
-
|
|
3598
|
-
|
|
3599
|
-
|
|
3600
|
-
|
|
3601
|
-
|
|
3602
|
-
|
|
3603
|
-
|
|
3604
|
-
|
|
3605
|
-
|
|
3606
|
-
|
|
3159
|
+
self.consume_token(Tok.KW_MATCH)
|
|
3160
|
+
target = self.consume(ast.Expr)
|
|
3161
|
+
self.consume_token(Tok.LBRACE)
|
|
3162
|
+
cases = [self.consume(ast.MatchCase)]
|
|
3163
|
+
while case := self.match(ast.MatchCase):
|
|
3164
|
+
cases.append(case)
|
|
3165
|
+
self.consume_token(Tok.RBRACE)
|
|
3166
|
+
return ast.MatchStmt(
|
|
3167
|
+
target=target,
|
|
3168
|
+
cases=cases,
|
|
3169
|
+
kid=self.cur_nodes,
|
|
3170
|
+
)
|
|
3607
3171
|
|
|
3608
|
-
def match_case_block(self,
|
|
3172
|
+
def match_case_block(self, _: None) -> ast.MatchCase:
|
|
3609
3173
|
"""Grammar rule.
|
|
3610
3174
|
|
|
3611
3175
|
match_case_block: KW_CASE pattern_seq (KW_IF expression)? COLON statement_list
|
|
3612
3176
|
"""
|
|
3613
|
-
|
|
3614
|
-
|
|
3615
|
-
|
|
3616
|
-
if
|
|
3617
|
-
guard
|
|
3618
|
-
)
|
|
3619
|
-
|
|
3620
|
-
|
|
3621
|
-
|
|
3622
|
-
|
|
3623
|
-
|
|
3624
|
-
|
|
3625
|
-
|
|
3626
|
-
|
|
3627
|
-
|
|
3628
|
-
raise self.ice()
|
|
3177
|
+
guard: ast.Expr | None = None
|
|
3178
|
+
self.consume_token(Tok.KW_CASE)
|
|
3179
|
+
pattern = self.consume(ast.MatchPattern)
|
|
3180
|
+
if self.match_token(Tok.KW_IF):
|
|
3181
|
+
guard = self.consume(ast.Expr)
|
|
3182
|
+
self.consume_token(Tok.COLON)
|
|
3183
|
+
stmts = [self.consume(ast.CodeBlockStmt)]
|
|
3184
|
+
while stmt := self.match(ast.CodeBlockStmt):
|
|
3185
|
+
stmts.append(stmt)
|
|
3186
|
+
return ast.MatchCase(
|
|
3187
|
+
pattern=pattern,
|
|
3188
|
+
guard=guard,
|
|
3189
|
+
body=stmts,
|
|
3190
|
+
kid=self.cur_nodes,
|
|
3191
|
+
)
|
|
3629
3192
|
|
|
3630
|
-
def pattern_seq(self,
|
|
3193
|
+
def pattern_seq(self, _: None) -> ast.MatchPattern:
|
|
3631
3194
|
"""Grammar rule.
|
|
3632
3195
|
|
|
3633
3196
|
pattern_seq: (or_pattern | as_pattern)
|
|
3634
3197
|
"""
|
|
3635
|
-
|
|
3636
|
-
return self.nu(kid[0])
|
|
3637
|
-
else:
|
|
3638
|
-
raise self.ice()
|
|
3198
|
+
return self.consume(ast.MatchPattern)
|
|
3639
3199
|
|
|
3640
|
-
def or_pattern(self,
|
|
3200
|
+
def or_pattern(self, _: None) -> ast.MatchPattern:
|
|
3641
3201
|
"""Grammar rule.
|
|
3642
3202
|
|
|
3643
3203
|
or_pattern: (pattern BW_OR)* pattern
|
|
3644
3204
|
"""
|
|
3645
|
-
|
|
3646
|
-
|
|
3647
|
-
|
|
3648
|
-
|
|
3649
|
-
|
|
3650
|
-
|
|
3651
|
-
patterns
|
|
3652
|
-
|
|
3653
|
-
|
|
3654
|
-
patterns=patterns,
|
|
3655
|
-
kid=kid,
|
|
3656
|
-
)
|
|
3657
|
-
)
|
|
3205
|
+
patterns: list = [self.consume(ast.MatchPattern)]
|
|
3206
|
+
while self.match_token(Tok.BW_OR):
|
|
3207
|
+
patterns.append(self.consume(ast.MatchPattern))
|
|
3208
|
+
if len(patterns) == 1:
|
|
3209
|
+
return patterns[0]
|
|
3210
|
+
return ast.MatchOr(
|
|
3211
|
+
patterns=patterns,
|
|
3212
|
+
kid=self.cur_nodes,
|
|
3213
|
+
)
|
|
3658
3214
|
|
|
3659
|
-
def as_pattern(self,
|
|
3215
|
+
def as_pattern(self, _: None) -> ast.MatchPattern:
|
|
3660
3216
|
"""Grammar rule.
|
|
3661
3217
|
|
|
3662
3218
|
as_pattern: pattern KW_AS NAME
|
|
3663
3219
|
"""
|
|
3664
|
-
|
|
3665
|
-
|
|
3666
|
-
)
|
|
3667
|
-
|
|
3668
|
-
|
|
3669
|
-
|
|
3670
|
-
|
|
3671
|
-
|
|
3672
|
-
)
|
|
3673
|
-
)
|
|
3674
|
-
else:
|
|
3675
|
-
raise self.ice()
|
|
3220
|
+
pattern = self.consume(ast.MatchPattern)
|
|
3221
|
+
self.consume_token(Tok.KW_AS)
|
|
3222
|
+
name = self.consume(ast.NameAtom)
|
|
3223
|
+
return ast.MatchAs(
|
|
3224
|
+
pattern=pattern,
|
|
3225
|
+
name=name,
|
|
3226
|
+
kid=self.cur_nodes,
|
|
3227
|
+
)
|
|
3676
3228
|
|
|
3677
3229
|
def pattern(self, kid: list[ast.AstNode]) -> ast.MatchPattern:
|
|
3678
3230
|
"""Grammar rule.
|
|
@@ -3683,141 +3235,109 @@ class JacParser(Pass):
|
|
|
3683
3235
|
| mapping_pattern
|
|
3684
3236
|
| class_pattern
|
|
3685
3237
|
"""
|
|
3686
|
-
|
|
3687
|
-
return self.nu(kid[0])
|
|
3688
|
-
else:
|
|
3689
|
-
raise self.ice()
|
|
3238
|
+
return self.consume(ast.MatchPattern)
|
|
3690
3239
|
|
|
3691
|
-
def literal_pattern(self,
|
|
3240
|
+
def literal_pattern(self, _: None) -> ast.MatchPattern:
|
|
3692
3241
|
"""Grammar rule.
|
|
3693
3242
|
|
|
3694
3243
|
literal_pattern: (INT | FLOAT | multistring)
|
|
3695
3244
|
"""
|
|
3696
|
-
|
|
3697
|
-
|
|
3698
|
-
|
|
3699
|
-
|
|
3700
|
-
|
|
3701
|
-
)
|
|
3702
|
-
)
|
|
3703
|
-
else:
|
|
3704
|
-
raise self.ice()
|
|
3245
|
+
value = self.consume(ast.Expr)
|
|
3246
|
+
return ast.MatchValue(
|
|
3247
|
+
value=value,
|
|
3248
|
+
kid=self.cur_nodes,
|
|
3249
|
+
)
|
|
3705
3250
|
|
|
3706
|
-
def singleton_pattern(self,
|
|
3251
|
+
def singleton_pattern(self, _: None) -> ast.MatchPattern:
|
|
3707
3252
|
"""Grammar rule.
|
|
3708
3253
|
|
|
3709
3254
|
singleton_pattern: (NULL | BOOL)
|
|
3710
3255
|
"""
|
|
3711
|
-
|
|
3712
|
-
|
|
3713
|
-
|
|
3714
|
-
|
|
3715
|
-
|
|
3716
|
-
)
|
|
3717
|
-
)
|
|
3718
|
-
else:
|
|
3719
|
-
raise self.ice()
|
|
3256
|
+
value = self.match(ast.Null) or self.consume(ast.Bool)
|
|
3257
|
+
return ast.MatchSingleton(
|
|
3258
|
+
value=value,
|
|
3259
|
+
kid=self.cur_nodes,
|
|
3260
|
+
)
|
|
3720
3261
|
|
|
3721
|
-
def capture_pattern(self,
|
|
3262
|
+
def capture_pattern(self, _: None) -> ast.MatchPattern:
|
|
3722
3263
|
"""Grammar rule.
|
|
3723
3264
|
|
|
3724
3265
|
capture_pattern: NAME
|
|
3725
3266
|
"""
|
|
3726
|
-
|
|
3727
|
-
|
|
3728
|
-
|
|
3729
|
-
|
|
3730
|
-
):
|
|
3731
|
-
return self.nu(
|
|
3732
|
-
ast.MatchWild(
|
|
3733
|
-
kid=kid,
|
|
3734
|
-
)
|
|
3735
|
-
)
|
|
3736
|
-
if isinstance(kid[0], ast.NameAtom):
|
|
3737
|
-
return self.nu(
|
|
3738
|
-
ast.MatchAs(
|
|
3739
|
-
name=kid[0],
|
|
3740
|
-
pattern=None,
|
|
3741
|
-
kid=kid,
|
|
3742
|
-
)
|
|
3267
|
+
name = self.consume(ast.Name)
|
|
3268
|
+
if name.sym_name == "_":
|
|
3269
|
+
return ast.MatchWild(
|
|
3270
|
+
kid=self.cur_nodes,
|
|
3743
3271
|
)
|
|
3744
|
-
|
|
3745
|
-
|
|
3272
|
+
return ast.MatchAs(
|
|
3273
|
+
name=name,
|
|
3274
|
+
pattern=None,
|
|
3275
|
+
kid=self.cur_nodes,
|
|
3276
|
+
)
|
|
3746
3277
|
|
|
3747
|
-
def sequence_pattern(self,
|
|
3278
|
+
def sequence_pattern(self, _: None) -> ast.MatchPattern:
|
|
3748
3279
|
"""Grammar rule.
|
|
3749
3280
|
|
|
3750
3281
|
sequence_pattern: LSQUARE list_inner_pattern (COMMA list_inner_pattern)* RSQUARE
|
|
3751
3282
|
| LPAREN list_inner_pattern (COMMA list_inner_pattern)* RPAREN
|
|
3752
3283
|
"""
|
|
3753
|
-
|
|
3754
|
-
|
|
3755
|
-
|
|
3756
|
-
|
|
3757
|
-
|
|
3758
|
-
|
|
3284
|
+
self.consume_token(Tok.LSQUARE) or self.consume_token(Tok.LPAREN)
|
|
3285
|
+
patterns = [self.consume(ast.MatchPattern)]
|
|
3286
|
+
while self.match_token(Tok.COMMA):
|
|
3287
|
+
patterns.append(self.consume(ast.MatchPattern))
|
|
3288
|
+
self.consume_token(Tok.RSQUARE) or self.consume_token(Tok.RPAREN)
|
|
3289
|
+
return ast.MatchSequence(
|
|
3290
|
+
values=patterns,
|
|
3291
|
+
kid=self.cur_nodes,
|
|
3759
3292
|
)
|
|
3760
3293
|
|
|
3761
|
-
def mapping_pattern(self,
|
|
3294
|
+
def mapping_pattern(self, _: None) -> ast.MatchMapping:
|
|
3762
3295
|
"""Grammar rule.
|
|
3763
3296
|
|
|
3764
3297
|
mapping_pattern: LBRACE (dict_inner_pattern (COMMA dict_inner_pattern)*)? RBRACE
|
|
3765
3298
|
"""
|
|
3766
|
-
|
|
3767
|
-
|
|
3768
|
-
|
|
3769
|
-
|
|
3770
|
-
|
|
3771
|
-
values=patterns,
|
|
3772
|
-
kid=kid,
|
|
3299
|
+
self.consume_token(Tok.LBRACE)
|
|
3300
|
+
patterns = [self.match(ast.MatchKVPair) or self.consume(ast.MatchStar)]
|
|
3301
|
+
while self.match_token(Tok.COMMA):
|
|
3302
|
+
patterns.append(
|
|
3303
|
+
self.match(ast.MatchKVPair) or self.consume(ast.MatchStar)
|
|
3773
3304
|
)
|
|
3305
|
+
self.consume_token(Tok.RBRACE)
|
|
3306
|
+
return ast.MatchMapping(
|
|
3307
|
+
values=patterns,
|
|
3308
|
+
kid=self.cur_nodes,
|
|
3774
3309
|
)
|
|
3775
3310
|
|
|
3776
|
-
def list_inner_pattern(self,
|
|
3311
|
+
def list_inner_pattern(self, _: None) -> ast.MatchPattern:
|
|
3777
3312
|
"""Grammar rule.
|
|
3778
3313
|
|
|
3779
3314
|
list_inner_pattern: (pattern_seq | STAR_MUL NAME)
|
|
3780
3315
|
"""
|
|
3781
|
-
if
|
|
3782
|
-
|
|
3783
|
-
|
|
3784
|
-
|
|
3785
|
-
|
|
3786
|
-
|
|
3787
|
-
name=kid[-1],
|
|
3788
|
-
kid=kid,
|
|
3789
|
-
)
|
|
3316
|
+
if self.match_token(Tok.STAR_MUL):
|
|
3317
|
+
name = self.consume(ast.Name)
|
|
3318
|
+
return ast.MatchStar(
|
|
3319
|
+
is_list=True,
|
|
3320
|
+
name=name,
|
|
3321
|
+
kid=self.cur_nodes,
|
|
3790
3322
|
)
|
|
3791
|
-
|
|
3792
|
-
raise self.ice()
|
|
3323
|
+
return self.consume(ast.MatchPattern)
|
|
3793
3324
|
|
|
3794
|
-
def dict_inner_pattern(
|
|
3795
|
-
self, kid: list[ast.AstNode]
|
|
3796
|
-
) -> ast.MatchKVPair | ast.MatchStar:
|
|
3325
|
+
def dict_inner_pattern(self, _: None) -> ast.MatchKVPair | ast.MatchStar:
|
|
3797
3326
|
"""Grammar rule.
|
|
3798
3327
|
|
|
3799
3328
|
dict_inner_pattern: (pattern_seq COLON pattern_seq | STAR_POW NAME)
|
|
3800
3329
|
"""
|
|
3801
|
-
if
|
|
3802
|
-
|
|
3803
|
-
|
|
3804
|
-
|
|
3805
|
-
|
|
3806
|
-
|
|
3807
|
-
value=kid[2],
|
|
3808
|
-
kid=kid,
|
|
3809
|
-
)
|
|
3810
|
-
)
|
|
3811
|
-
elif isinstance(kid[-1], ast.Name):
|
|
3812
|
-
return self.nu(
|
|
3813
|
-
ast.MatchStar(
|
|
3814
|
-
is_list=False,
|
|
3815
|
-
name=kid[-1],
|
|
3816
|
-
kid=kid,
|
|
3817
|
-
)
|
|
3330
|
+
if self.match_token(Tok.STAR_POW):
|
|
3331
|
+
name = self.consume(ast.Name)
|
|
3332
|
+
return ast.MatchStar(
|
|
3333
|
+
is_list=False,
|
|
3334
|
+
name=name,
|
|
3335
|
+
kid=self.cur_nodes,
|
|
3818
3336
|
)
|
|
3819
|
-
|
|
3820
|
-
|
|
3337
|
+
pattern = self.consume(ast.MatchPattern)
|
|
3338
|
+
self.consume_token(Tok.COLON)
|
|
3339
|
+
value = self.consume(ast.MatchPattern)
|
|
3340
|
+
return ast.MatchKVPair(key=pattern, value=value, kid=self.cur_nodes)
|
|
3821
3341
|
|
|
3822
3342
|
def class_pattern(self, kid: list[ast.AstNode]) -> ast.MatchArch:
|
|
3823
3343
|
"""Grammar rule.
|
|
@@ -3886,13 +3406,11 @@ class JacParser(Pass):
|
|
|
3886
3406
|
kid_nodes.append(kw)
|
|
3887
3407
|
kid_nodes.append(rapren)
|
|
3888
3408
|
|
|
3889
|
-
return
|
|
3890
|
-
|
|
3891
|
-
|
|
3892
|
-
|
|
3893
|
-
|
|
3894
|
-
kid=kid_nodes,
|
|
3895
|
-
)
|
|
3409
|
+
return ast.MatchArch(
|
|
3410
|
+
name=name,
|
|
3411
|
+
arg_patterns=arg,
|
|
3412
|
+
kw_patterns=kw,
|
|
3413
|
+
kid=kid_nodes,
|
|
3896
3414
|
)
|
|
3897
3415
|
else:
|
|
3898
3416
|
raise self.ice()
|
|
@@ -3998,7 +3516,6 @@ class JacParser(Pass):
|
|
|
3998
3516
|
Tok.FSTR_BESC,
|
|
3999
3517
|
Tok.FSTR_PIECE,
|
|
4000
3518
|
Tok.FSTR_SQ_PIECE,
|
|
4001
|
-
Tok.DOC_STRING,
|
|
4002
3519
|
]:
|
|
4003
3520
|
ret_type = ast.String
|
|
4004
3521
|
if token.type == Tok.FSTR_BESC:
|
|
@@ -4027,4 +3544,4 @@ class JacParser(Pass):
|
|
|
4027
3544
|
err.column = ret.loc.col_start
|
|
4028
3545
|
raise err
|
|
4029
3546
|
self.terminals.append(ret)
|
|
4030
|
-
return
|
|
3547
|
+
return ret
|