jaclang 0.8.9__py3-none-any.whl → 0.8.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of jaclang might be problematic. Click here for more details.
- jaclang/cli/cli.py +147 -25
- jaclang/cli/cmdreg.py +144 -8
- jaclang/compiler/__init__.py +6 -1
- jaclang/compiler/codeinfo.py +16 -1
- jaclang/compiler/constant.py +33 -13
- jaclang/compiler/jac.lark +130 -31
- jaclang/compiler/larkparse/jac_parser.py +2 -2
- jaclang/compiler/parser.py +567 -176
- jaclang/compiler/passes/__init__.py +2 -1
- jaclang/compiler/passes/ast_gen/__init__.py +5 -0
- jaclang/compiler/passes/ast_gen/base_ast_gen_pass.py +54 -0
- jaclang/compiler/passes/ast_gen/jsx_processor.py +344 -0
- jaclang/compiler/passes/ecmascript/__init__.py +25 -0
- jaclang/compiler/passes/ecmascript/es_unparse.py +576 -0
- jaclang/compiler/passes/ecmascript/esast_gen_pass.py +2068 -0
- jaclang/compiler/passes/ecmascript/estree.py +972 -0
- jaclang/compiler/passes/ecmascript/tests/__init__.py +1 -0
- jaclang/compiler/passes/ecmascript/tests/fixtures/advanced_language_features.jac +170 -0
- jaclang/compiler/passes/ecmascript/tests/fixtures/class_separate_impl.impl.jac +30 -0
- jaclang/compiler/passes/ecmascript/tests/fixtures/class_separate_impl.jac +14 -0
- jaclang/compiler/passes/ecmascript/tests/fixtures/client_jsx.jac +89 -0
- jaclang/compiler/passes/ecmascript/tests/fixtures/core_language_features.jac +195 -0
- jaclang/compiler/passes/ecmascript/tests/test_esast_gen_pass.py +167 -0
- jaclang/compiler/passes/ecmascript/tests/test_js_generation.py +239 -0
- jaclang/compiler/passes/main/__init__.py +0 -3
- jaclang/compiler/passes/main/annex_pass.py +23 -1
- jaclang/compiler/passes/main/pyast_gen_pass.py +324 -234
- jaclang/compiler/passes/main/pyast_load_pass.py +46 -11
- jaclang/compiler/passes/main/pyjac_ast_link_pass.py +2 -0
- jaclang/compiler/passes/main/sym_tab_build_pass.py +18 -1
- jaclang/compiler/passes/main/tests/fixtures/autoimpl.cl.jac +7 -0
- jaclang/compiler/passes/main/tests/fixtures/checker_arity.jac +3 -0
- jaclang/compiler/passes/main/tests/fixtures/checker_class_construct.jac +33 -0
- jaclang/compiler/passes/main/tests/fixtures/defuse_modpath.jac +7 -0
- jaclang/compiler/passes/main/tests/fixtures/member_access_type_resolve.jac +2 -1
- jaclang/compiler/passes/main/tests/test_checker_pass.py +31 -2
- jaclang/compiler/passes/main/tests/test_def_use_pass.py +12 -0
- jaclang/compiler/passes/main/tests/test_import_pass.py +23 -4
- jaclang/compiler/passes/main/tests/test_pyast_gen_pass.py +25 -0
- jaclang/compiler/passes/main/type_checker_pass.py +7 -0
- jaclang/compiler/passes/tool/doc_ir_gen_pass.py +115 -0
- jaclang/compiler/passes/tool/fuse_comments_pass.py +1 -10
- jaclang/compiler/passes/tool/tests/test_jac_format_pass.py +4 -1
- jaclang/compiler/passes/transform.py +9 -1
- jaclang/compiler/passes/uni_pass.py +5 -7
- jaclang/compiler/program.py +22 -25
- jaclang/compiler/tests/test_client_codegen.py +113 -0
- jaclang/compiler/tests/test_importer.py +12 -10
- jaclang/compiler/tests/test_parser.py +249 -3
- jaclang/compiler/type_system/type_evaluator.jac +169 -50
- jaclang/compiler/type_system/type_utils.py +1 -1
- jaclang/compiler/type_system/types.py +6 -0
- jaclang/compiler/unitree.py +430 -84
- jaclang/langserve/engine.jac +224 -288
- jaclang/langserve/sem_manager.jac +12 -8
- jaclang/langserve/server.jac +48 -48
- jaclang/langserve/tests/fixtures/greet.py +17 -0
- jaclang/langserve/tests/fixtures/md_path.jac +22 -0
- jaclang/langserve/tests/fixtures/user.jac +15 -0
- jaclang/langserve/tests/test_server.py +66 -371
- jaclang/lib.py +1 -1
- jaclang/runtimelib/client_bundle.py +169 -0
- jaclang/runtimelib/client_runtime.jac +586 -0
- jaclang/runtimelib/constructs.py +2 -0
- jaclang/runtimelib/machine.py +259 -100
- jaclang/runtimelib/meta_importer.py +111 -22
- jaclang/runtimelib/mtp.py +15 -0
- jaclang/runtimelib/server.py +1089 -0
- jaclang/runtimelib/tests/fixtures/client_app.jac +18 -0
- jaclang/runtimelib/tests/fixtures/custom_access_validation.jac +1 -1
- jaclang/runtimelib/tests/fixtures/savable_object.jac +4 -5
- jaclang/runtimelib/tests/fixtures/serve_api.jac +75 -0
- jaclang/runtimelib/tests/test_client_bundle.py +55 -0
- jaclang/runtimelib/tests/test_client_render.py +63 -0
- jaclang/runtimelib/tests/test_serve.py +1069 -0
- jaclang/settings.py +0 -2
- jaclang/tests/fixtures/iife_functions.jac +142 -0
- jaclang/tests/fixtures/iife_functions_client.jac +143 -0
- jaclang/tests/fixtures/multistatement_lambda.jac +116 -0
- jaclang/tests/fixtures/multistatement_lambda_client.jac +113 -0
- jaclang/tests/fixtures/needs_import_dup.jac +6 -4
- jaclang/tests/fixtures/py_run.py +7 -5
- jaclang/tests/fixtures/pyfunc_fstr.py +2 -2
- jaclang/tests/fixtures/simple_lambda_test.jac +12 -0
- jaclang/tests/test_cli.py +1 -1
- jaclang/tests/test_language.py +10 -39
- jaclang/tests/test_reference.py +17 -2
- jaclang/utils/NonGPT.py +375 -0
- jaclang/utils/helpers.py +44 -16
- jaclang/utils/lang_tools.py +31 -4
- jaclang/utils/tests/test_lang_tools.py +1 -1
- jaclang/utils/treeprinter.py +8 -3
- {jaclang-0.8.9.dist-info → jaclang-0.8.10.dist-info}/METADATA +3 -3
- {jaclang-0.8.9.dist-info → jaclang-0.8.10.dist-info}/RECORD +96 -66
- jaclang/compiler/passes/main/binder_pass.py +0 -594
- jaclang/compiler/passes/main/tests/fixtures/sym_binder.jac +0 -47
- jaclang/compiler/passes/main/tests/test_binder_pass.py +0 -111
- jaclang/langserve/tests/session.jac +0 -294
- jaclang/langserve/tests/test_dev_server.py +0 -80
- jaclang/runtimelib/importer.py +0 -351
- jaclang/tests/test_typecheck.py +0 -542
- {jaclang-0.8.9.dist-info → jaclang-0.8.10.dist-info}/WHEEL +0 -0
- {jaclang-0.8.9.dist-info → jaclang-0.8.10.dist-info}/entry_points.txt +0 -0
jaclang/compiler/parser.py
CHANGED
|
@@ -3,17 +3,17 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import keyword
|
|
6
|
-
import logging
|
|
7
6
|
import os
|
|
8
7
|
import sys
|
|
9
|
-
from
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from threading import Event
|
|
10
|
+
from typing import Callable, Optional, Sequence, TYPE_CHECKING, TypeAlias, TypeVar, cast
|
|
10
11
|
|
|
11
12
|
import jaclang.compiler.unitree as uni
|
|
12
13
|
from jaclang.compiler import TOKEN_MAP, jac_lark as jl
|
|
13
14
|
from jaclang.compiler.constant import EdgeDir, Tokens as Tok
|
|
14
15
|
from jaclang.compiler.passes.main import Transform
|
|
15
16
|
from jaclang.utils.helpers import ANSIColors
|
|
16
|
-
from jaclang.vendor.lark import Lark, Transformer, Tree, logger
|
|
17
17
|
|
|
18
18
|
if TYPE_CHECKING:
|
|
19
19
|
from jaclang.compiler.program import JacProgram
|
|
@@ -22,25 +22,70 @@ T = TypeVar("T", bound=uni.UniNode)
|
|
|
22
22
|
TL = TypeVar("TL", bound=(uni.UniNode | list))
|
|
23
23
|
|
|
24
24
|
|
|
25
|
+
@dataclass
|
|
26
|
+
class LarkParseInput:
|
|
27
|
+
"""Input for Lark parser transform."""
|
|
28
|
+
|
|
29
|
+
ir_value: str
|
|
30
|
+
on_error: Callable[[jl.UnexpectedInput], bool]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class LarkParseOutput:
|
|
35
|
+
"""Output from Lark parser transform."""
|
|
36
|
+
|
|
37
|
+
tree: jl.Tree[jl.Tree[str]]
|
|
38
|
+
comments: list[jl.Token]
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class LarkParseTransform(Transform[LarkParseInput, LarkParseOutput]):
|
|
42
|
+
"""Transform for Lark parsing step."""
|
|
43
|
+
|
|
44
|
+
comment_cache: list[jl.Token] = []
|
|
45
|
+
parser = jl.Lark_StandAlone(
|
|
46
|
+
lexer_callbacks={"COMMENT": lambda comment: LarkParseTransform.comment_cache.append(comment)} # type: ignore
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
def __init__(self, ir_in: LarkParseInput, prog: JacProgram) -> None:
|
|
50
|
+
"""Initialize Lark parser transform."""
|
|
51
|
+
Transform.__init__(self, ir_in=ir_in, prog=prog)
|
|
52
|
+
|
|
53
|
+
def transform(self, ir_in: LarkParseInput) -> LarkParseOutput:
|
|
54
|
+
"""Transform input IR by parsing with Lark."""
|
|
55
|
+
LarkParseTransform.comment_cache = []
|
|
56
|
+
tree = LarkParseTransform.parser.parse(ir_in.ir_value, on_error=ir_in.on_error)
|
|
57
|
+
return LarkParseOutput(
|
|
58
|
+
tree=tree,
|
|
59
|
+
comments=LarkParseTransform.comment_cache.copy(),
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
25
63
|
class JacParser(Transform[uni.Source, uni.Module]):
|
|
26
64
|
"""Jac Parser."""
|
|
27
65
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
66
|
+
def __init__(
|
|
67
|
+
self, root_ir: uni.Source, prog: JacProgram, cancel_token: Event | None = None
|
|
68
|
+
) -> None:
|
|
31
69
|
"""Initialize parser."""
|
|
32
70
|
self.mod_path = root_ir.loc.mod_path
|
|
33
71
|
self.node_list: list[uni.UniNode] = []
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
Transform.__init__(self, ir_in=root_ir, prog=prog)
|
|
72
|
+
self._node_ids: set[int] = set()
|
|
73
|
+
Transform.__init__(self, ir_in=root_ir, prog=prog, cancel_token=cancel_token)
|
|
37
74
|
|
|
38
75
|
def transform(self, ir_in: uni.Source) -> uni.Module:
|
|
39
76
|
"""Transform input IR."""
|
|
40
77
|
try:
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
78
|
+
# Create input for Lark parser transform
|
|
79
|
+
lark_input = LarkParseInput(
|
|
80
|
+
ir_value=ir_in.value,
|
|
81
|
+
on_error=self.error_callback,
|
|
82
|
+
)
|
|
83
|
+
# Use LarkParseTransform instead of direct parser call
|
|
84
|
+
lark_transform = LarkParseTransform(ir_in=lark_input, prog=self.prog)
|
|
85
|
+
parse_output = lark_transform.ir_out
|
|
86
|
+
# Transform parse tree to AST
|
|
87
|
+
mod = JacParser.TreeToAST(parser=self).transform(parse_output.tree)
|
|
88
|
+
ir_in.comments = [self.proc_comment(i, mod) for i in parse_output.comments]
|
|
44
89
|
if not isinstance(mod, uni.Module):
|
|
45
90
|
raise self.ice()
|
|
46
91
|
if len(self.errors_had) != 0:
|
|
@@ -190,37 +235,6 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
190
235
|
print(error_label, end=" ", file=sys.stderr)
|
|
191
236
|
print(alrt.pretty_print(colors=colors), file=sys.stderr)
|
|
192
237
|
|
|
193
|
-
@staticmethod
|
|
194
|
-
def _comment_callback(comment: jl.Token) -> None:
|
|
195
|
-
JacParser.comment_cache.append(comment)
|
|
196
|
-
|
|
197
|
-
@staticmethod
|
|
198
|
-
def parse(
|
|
199
|
-
ir: str, on_error: Callable[[jl.UnexpectedInput], bool]
|
|
200
|
-
) -> tuple[jl.Tree[jl.Tree[str]], list[jl.Token]]:
|
|
201
|
-
"""Parse input IR."""
|
|
202
|
-
JacParser.comment_cache = []
|
|
203
|
-
return (
|
|
204
|
-
JacParser.parser.parse(ir, on_error=on_error),
|
|
205
|
-
JacParser.comment_cache,
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
@staticmethod
|
|
209
|
-
def make_dev() -> None:
|
|
210
|
-
"""Make parser in dev mode."""
|
|
211
|
-
JacParser.parser = Lark.open(
|
|
212
|
-
"jac.lark",
|
|
213
|
-
parser="lalr",
|
|
214
|
-
rel_to=__file__,
|
|
215
|
-
debug=True,
|
|
216
|
-
lexer_callbacks={"COMMENT": JacParser._comment_callback},
|
|
217
|
-
)
|
|
218
|
-
JacParser.JacTransformer = Transformer[Tree[str], uni.UniNode] # type: ignore
|
|
219
|
-
logger.setLevel(logging.DEBUG)
|
|
220
|
-
|
|
221
|
-
comment_cache: list[jl.Token] = []
|
|
222
|
-
|
|
223
|
-
parser = jl.Lark_StandAlone(lexer_callbacks={"COMMENT": _comment_callback}) # type: ignore
|
|
224
238
|
JacTransformer: TypeAlias = jl.Transformer[jl.Tree[str], uni.UniNode]
|
|
225
239
|
|
|
226
240
|
class TreeToAST(JacTransformer):
|
|
@@ -245,7 +259,9 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
245
259
|
|
|
246
260
|
def _node_update(self, node: T) -> T:
|
|
247
261
|
self.parse_ref.cur_node = node
|
|
248
|
-
|
|
262
|
+
node_id = id(node)
|
|
263
|
+
if node_id not in self.parse_ref._node_ids:
|
|
264
|
+
self.parse_ref._node_ids.add(node_id)
|
|
249
265
|
self.parse_ref.node_list.append(node)
|
|
250
266
|
return node
|
|
251
267
|
|
|
@@ -253,6 +269,8 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
253
269
|
self, tree: jl.Tree, new_children: None | list[uni.UniNode] = None
|
|
254
270
|
) -> uni.UniNode:
|
|
255
271
|
self.cur_nodes = new_children or tree.children # type: ignore[assignment]
|
|
272
|
+
if self.parse_ref.is_canceled():
|
|
273
|
+
raise StopIteration
|
|
256
274
|
try:
|
|
257
275
|
return self._node_update(super()._call_userfunc(tree, new_children))
|
|
258
276
|
finally:
|
|
@@ -372,17 +390,29 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
372
390
|
| STRING (tl_stmt_with_doc | toplevel_stmt)*
|
|
373
391
|
"""
|
|
374
392
|
doc = self.match(uni.String)
|
|
375
|
-
|
|
393
|
+
# Collect all statements, flattening lists from cl { ... } blocks
|
|
394
|
+
body: list[uni.ElementStmt] = []
|
|
395
|
+
flat_kids: list[uni.UniNode] = []
|
|
396
|
+
|
|
397
|
+
for node in self.cur_nodes:
|
|
398
|
+
if isinstance(node, list):
|
|
399
|
+
# This is a list from cl { } block
|
|
400
|
+
body.extend(node)
|
|
401
|
+
flat_kids.extend(node)
|
|
402
|
+
elif isinstance(node, uni.ElementStmt):
|
|
403
|
+
body.append(node)
|
|
404
|
+
flat_kids.append(node)
|
|
405
|
+
else:
|
|
406
|
+
flat_kids.append(node)
|
|
407
|
+
|
|
376
408
|
mod = uni.Module(
|
|
377
409
|
name=self.parse_ref.mod_path.split(os.path.sep)[-1].rstrip(".jac"),
|
|
378
410
|
source=self.parse_ref.ir_in,
|
|
379
411
|
doc=doc,
|
|
380
412
|
body=body,
|
|
381
413
|
terminals=self.terminals,
|
|
382
|
-
kid=
|
|
383
|
-
|
|
384
|
-
or [uni.EmptyToken(uni.Source("", self.parse_ref.mod_path))]
|
|
385
|
-
),
|
|
414
|
+
kid=flat_kids
|
|
415
|
+
or [uni.EmptyToken(uni.Source("", self.parse_ref.mod_path))],
|
|
386
416
|
)
|
|
387
417
|
return mod
|
|
388
418
|
|
|
@@ -397,19 +427,48 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
397
427
|
element.add_kids_left([doc])
|
|
398
428
|
return element
|
|
399
429
|
|
|
400
|
-
def
|
|
430
|
+
def onelang_stmt(self, _: None) -> uni.ElementStmt:
|
|
401
431
|
"""Grammar rule.
|
|
402
432
|
|
|
403
|
-
|
|
433
|
+
onelang_stmt: import_stmt
|
|
404
434
|
| archetype
|
|
405
435
|
| ability
|
|
406
436
|
| global_var
|
|
407
437
|
| free_code
|
|
408
|
-
| py_code_block
|
|
409
438
|
| test
|
|
439
|
+
| impl_def
|
|
440
|
+
| sem_def
|
|
410
441
|
"""
|
|
411
442
|
return self.consume(uni.ElementStmt)
|
|
412
443
|
|
|
444
|
+
def toplevel_stmt(self, _: None) -> uni.ElementStmt | list[uni.ElementStmt]:
|
|
445
|
+
"""Grammar rule.
|
|
446
|
+
|
|
447
|
+
toplevel_stmt: KW_CLIENT? onelang_stmt
|
|
448
|
+
| KW_CLIENT LBRACE onelang_stmt* RBRACE
|
|
449
|
+
| py_code_block
|
|
450
|
+
"""
|
|
451
|
+
client_tok = self.match_token(Tok.KW_CLIENT)
|
|
452
|
+
if client_tok:
|
|
453
|
+
lbrace = self.match_token(Tok.LBRACE)
|
|
454
|
+
if lbrace:
|
|
455
|
+
# Collect all statements in the block
|
|
456
|
+
elements: list[uni.ElementStmt] = []
|
|
457
|
+
while elem := self.match(uni.ElementStmt):
|
|
458
|
+
if isinstance(elem, uni.ClientFacingNode):
|
|
459
|
+
elem.is_client_decl = True
|
|
460
|
+
elements.append(elem)
|
|
461
|
+
self.consume(uni.Token) # RBRACE
|
|
462
|
+
return elements
|
|
463
|
+
else:
|
|
464
|
+
element = self.consume(uni.ElementStmt)
|
|
465
|
+
if isinstance(element, uni.ClientFacingNode):
|
|
466
|
+
element.is_client_decl = True
|
|
467
|
+
element.add_kids_left([client_tok])
|
|
468
|
+
return element
|
|
469
|
+
else:
|
|
470
|
+
return self.consume(uni.ElementStmt)
|
|
471
|
+
|
|
413
472
|
def global_var(self, _: None) -> uni.GlobalVars:
|
|
414
473
|
"""Grammar rule.
|
|
415
474
|
|
|
@@ -866,14 +925,14 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
866
925
|
def ability_decl(self, _: None) -> uni.Ability:
|
|
867
926
|
"""Grammar rule.
|
|
868
927
|
|
|
869
|
-
ability_decl: KW_OVERRIDE? KW_STATIC? KW_CAN access_tag? named_ref
|
|
928
|
+
ability_decl: KW_OVERRIDE? KW_STATIC? KW_CAN access_tag? named_ref?
|
|
870
929
|
event_clause (block_tail | KW_ABSTRACT? SEMI)
|
|
871
930
|
"""
|
|
872
931
|
is_override = self.match_token(Tok.KW_OVERRIDE) is not None
|
|
873
932
|
is_static = self.match_token(Tok.KW_STATIC) is not None
|
|
874
933
|
self.consume_token(Tok.KW_CAN)
|
|
875
934
|
access = self.match(uni.SubTag)
|
|
876
|
-
name = self.
|
|
935
|
+
name = self.match(uni.NameAtom)
|
|
877
936
|
signature = self.consume(uni.EventSignature)
|
|
878
937
|
|
|
879
938
|
# Handle block_tail
|
|
@@ -1708,44 +1767,90 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
1708
1767
|
def lambda_expr(self, _: None) -> uni.LambdaExpr:
|
|
1709
1768
|
"""Grammar rule.
|
|
1710
1769
|
|
|
1711
|
-
lambda_expr: KW_LAMBDA func_decl_params? (RETURN_HINT expression)? COLON expression
|
|
1770
|
+
lambda_expr: KW_LAMBDA func_decl_params? (RETURN_HINT expression)? ( COLON expression | code_block )
|
|
1712
1771
|
"""
|
|
1713
1772
|
return_type: uni.Expr | None = None
|
|
1714
1773
|
return_hint_tok: uni.Token | None = None
|
|
1715
1774
|
sig_kid: list[uni.UniNode] = []
|
|
1716
1775
|
self.consume_token(Tok.KW_LAMBDA)
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
1736
|
-
|
|
1737
|
-
|
|
1738
|
-
|
|
1776
|
+
param_nodes: list[uni.UniNode] | None = None
|
|
1777
|
+
signature = self.match(uni.FuncSignature)
|
|
1778
|
+
signature_created = False
|
|
1779
|
+
if not signature:
|
|
1780
|
+
if self.node_idx < len(self.cur_nodes) and isinstance(
|
|
1781
|
+
self.cur_nodes[self.node_idx], list
|
|
1782
|
+
):
|
|
1783
|
+
candidate: list[uni.UniNode] = self.cur_nodes[self.node_idx] # type: ignore[assignment]
|
|
1784
|
+
first = candidate[0] if candidate else None
|
|
1785
|
+
if not (
|
|
1786
|
+
isinstance(first, uni.Token) and first.name == Tok.LBRACE.name
|
|
1787
|
+
):
|
|
1788
|
+
param_nodes = self.consume(list)
|
|
1789
|
+
elif (
|
|
1790
|
+
self.node_idx < len(self.cur_nodes)
|
|
1791
|
+
and isinstance(self.cur_nodes[self.node_idx], uni.Token)
|
|
1792
|
+
and self.cur_nodes[self.node_idx].name == Tok.LPAREN.name
|
|
1793
|
+
):
|
|
1794
|
+
self.consume_token(Tok.LPAREN)
|
|
1795
|
+
param_nodes = self.match(list)
|
|
1796
|
+
self.consume_token(Tok.RPAREN)
|
|
1797
|
+
if return_hint_tok := self.match_token(Tok.RETURN_HINT):
|
|
1798
|
+
return_type = self.consume(uni.Expr)
|
|
1799
|
+
if param_nodes:
|
|
1800
|
+
sig_kid.extend(param_nodes)
|
|
1801
|
+
if return_hint_tok:
|
|
1802
|
+
sig_kid.append(return_hint_tok)
|
|
1803
|
+
if return_type:
|
|
1804
|
+
sig_kid.append(return_type)
|
|
1805
|
+
signature = (
|
|
1806
|
+
uni.FuncSignature(
|
|
1807
|
+
posonly_params=[],
|
|
1808
|
+
params=(
|
|
1809
|
+
self.extract_from_list(param_nodes, uni.ParamVar)
|
|
1810
|
+
if param_nodes
|
|
1811
|
+
else []
|
|
1812
|
+
),
|
|
1813
|
+
varargs=None,
|
|
1814
|
+
kwonlyargs=[],
|
|
1815
|
+
kwargs=None,
|
|
1816
|
+
return_type=return_type,
|
|
1817
|
+
kid=sig_kid,
|
|
1818
|
+
)
|
|
1819
|
+
if param_nodes or return_type
|
|
1820
|
+
else None
|
|
1739
1821
|
)
|
|
1740
|
-
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1822
|
+
signature_created = signature is not None
|
|
1823
|
+
|
|
1824
|
+
# Check if body is a code block or expression
|
|
1825
|
+
block_nodes: list[uni.UniNode] | None = None
|
|
1826
|
+
if self.match_token(Tok.COLON):
|
|
1827
|
+
# Single-expression body
|
|
1828
|
+
body: uni.Expr | list[uni.CodeBlockStmt] = self.consume(uni.Expr)
|
|
1829
|
+
else:
|
|
1830
|
+
if self.node_idx < len(self.cur_nodes) and isinstance(
|
|
1831
|
+
self.cur_nodes[self.node_idx], list
|
|
1832
|
+
):
|
|
1833
|
+
block_nodes = self.consume(list)
|
|
1834
|
+
body = self.extract_from_list(block_nodes, uni.CodeBlockStmt)
|
|
1835
|
+
else:
|
|
1836
|
+
self.consume_token(Tok.LBRACE)
|
|
1837
|
+
body_stmts: list[uni.CodeBlockStmt] = []
|
|
1838
|
+
while not self.match_token(Tok.RBRACE):
|
|
1839
|
+
body_stmts.append(self.consume(uni.CodeBlockStmt))
|
|
1840
|
+
body = body_stmts
|
|
1841
|
+
|
|
1842
|
+
new_kid: list[uni.UniNode] = []
|
|
1843
|
+
for item in self.cur_nodes:
|
|
1844
|
+
if param_nodes is not None and item is param_nodes:
|
|
1845
|
+
continue
|
|
1846
|
+
if item is return_type or item is return_hint_tok:
|
|
1847
|
+
continue
|
|
1848
|
+
if block_nodes is not None and item is block_nodes:
|
|
1849
|
+
new_kid.extend(block_nodes)
|
|
1850
|
+
else:
|
|
1851
|
+
new_kid.append(item)
|
|
1852
|
+
if signature_created and signature:
|
|
1853
|
+
new_kid.insert(1, signature)
|
|
1749
1854
|
return uni.LambdaExpr(
|
|
1750
1855
|
signature=signature,
|
|
1751
1856
|
body=body,
|
|
@@ -2141,13 +2246,19 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
2141
2246
|
"""Grammar rule.
|
|
2142
2247
|
|
|
2143
2248
|
atom: named_ref
|
|
2144
|
-
| LPAREN (expression | yield_expr) RPAREN
|
|
2249
|
+
| LPAREN (expression | yield_expr | function_decl) RPAREN
|
|
2145
2250
|
| atom_collection
|
|
2146
2251
|
| atom_literal
|
|
2147
2252
|
| type_ref
|
|
2253
|
+
| jsx_element
|
|
2148
2254
|
"""
|
|
2149
2255
|
if self.match_token(Tok.LPAREN):
|
|
2150
|
-
|
|
2256
|
+
# Try to match expression first, then yield_expr, then function_decl
|
|
2257
|
+
value = self.match(uni.Expr)
|
|
2258
|
+
if value is None:
|
|
2259
|
+
value = self.match(uni.YieldExpr)
|
|
2260
|
+
if value is None:
|
|
2261
|
+
value = self.consume(uni.Ability)
|
|
2151
2262
|
self.consume_token(Tok.RPAREN)
|
|
2152
2263
|
return uni.AtomUnit(value=value, kid=self.cur_nodes)
|
|
2153
2264
|
return self.consume(uni.AtomExpr)
|
|
@@ -2212,108 +2323,142 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
2212
2323
|
def fstring(self, _: None) -> uni.FString:
|
|
2213
2324
|
"""Grammar rule.
|
|
2214
2325
|
|
|
2215
|
-
fstring:
|
|
2216
|
-
|
|
2217
|
-
| FSTR_TRIPLE_START fstr_triple_parts FSTR_TRIPLE_END
|
|
2218
|
-
| FSTR_SQ_TRIPLE_START fstr_sq_triple_parts FSTR_SQ_TRIPLE_END
|
|
2326
|
+
fstring: F_DQ_START fstr_dq_part* F_DQ_END
|
|
2327
|
+
| F_SQ_START fstr_sq_part* F_SQ_END
|
|
2219
2328
|
"""
|
|
2220
|
-
|
|
2221
|
-
|
|
2222
|
-
|
|
2223
|
-
|
|
2224
|
-
|
|
2225
|
-
|
|
2226
|
-
|
|
2227
|
-
|
|
2228
|
-
self.
|
|
2229
|
-
|
|
2230
|
-
|
|
2231
|
-
|
|
2232
|
-
|
|
2329
|
+
fstring_configs = [
|
|
2330
|
+
([Tok.F_DQ_START, Tok.RF_DQ_START], Tok.F_DQ_END),
|
|
2331
|
+
([Tok.F_SQ_START, Tok.RF_SQ_START], Tok.F_SQ_END),
|
|
2332
|
+
([Tok.F_TDQ_START, Tok.RF_TDQ_START], Tok.F_TDQ_END),
|
|
2333
|
+
([Tok.F_TSQ_START, Tok.RF_TSQ_START], Tok.F_TSQ_END),
|
|
2334
|
+
]
|
|
2335
|
+
|
|
2336
|
+
for start_toks, end_tok in fstring_configs:
|
|
2337
|
+
if fstr := self._process_fstring(start_toks, end_tok):
|
|
2338
|
+
return fstr
|
|
2339
|
+
|
|
2340
|
+
raise self.ice()
|
|
2341
|
+
|
|
2342
|
+
def _process_fstring(
|
|
2343
|
+
self, start_tok: list[Tok], end_tok: Tok
|
|
2344
|
+
) -> Optional[uni.FString]:
|
|
2345
|
+
"""Process fstring nodes."""
|
|
2346
|
+
tok_start = self.match_token(start_tok[0]) or self.match_token(start_tok[1])
|
|
2347
|
+
if not tok_start:
|
|
2348
|
+
return None
|
|
2349
|
+
parts = []
|
|
2350
|
+
while part := self.match(uni.String) or self.match(uni.FormattedValue):
|
|
2351
|
+
parts.append(part)
|
|
2352
|
+
tok_end = self.consume_token(end_tok)
|
|
2233
2353
|
return uni.FString(
|
|
2234
|
-
|
|
2235
|
-
|
|
2236
|
-
|
|
2237
|
-
else []
|
|
2238
|
-
),
|
|
2354
|
+
start=tok_start,
|
|
2355
|
+
parts=parts,
|
|
2356
|
+
end=tok_end,
|
|
2239
2357
|
kid=self.flat_cur_nodes,
|
|
2240
2358
|
)
|
|
2241
2359
|
|
|
2242
|
-
def
|
|
2360
|
+
def fstr_dq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2243
2361
|
"""Grammar rule.
|
|
2244
2362
|
|
|
2245
|
-
|
|
2363
|
+
fstr_dq_part: F_TEXT_DQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2246
2364
|
"""
|
|
2247
|
-
|
|
2248
|
-
(
|
|
2249
|
-
i
|
|
2250
|
-
if isinstance(i, uni.String)
|
|
2251
|
-
else (
|
|
2252
|
-
uni.ExprStmt(expr=i, in_fstring=True, kid=[i])
|
|
2253
|
-
if isinstance(i, uni.Expr)
|
|
2254
|
-
else i
|
|
2255
|
-
)
|
|
2256
|
-
)
|
|
2257
|
-
for i in self.cur_nodes
|
|
2258
|
-
]
|
|
2259
|
-
return valid_parts
|
|
2365
|
+
return self._process_f_expr(Tok.F_TEXT_DQ, self.cur_nodes)
|
|
2260
2366
|
|
|
2261
|
-
def
|
|
2367
|
+
def fstr_sq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2262
2368
|
"""Grammar rule.
|
|
2263
2369
|
|
|
2264
|
-
|
|
2370
|
+
fstr_sq_part: F_TEXT_SQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2265
2371
|
"""
|
|
2266
|
-
|
|
2267
|
-
(
|
|
2268
|
-
i
|
|
2269
|
-
if isinstance(i, uni.String)
|
|
2270
|
-
else (
|
|
2271
|
-
uni.ExprStmt(expr=i, in_fstring=True, kid=[i])
|
|
2272
|
-
if isinstance(i, uni.Expr)
|
|
2273
|
-
else i
|
|
2274
|
-
)
|
|
2275
|
-
)
|
|
2276
|
-
for i in self.cur_nodes
|
|
2277
|
-
]
|
|
2278
|
-
return valid_parts
|
|
2372
|
+
return self._process_f_expr(Tok.F_TEXT_SQ, self.cur_nodes)
|
|
2279
2373
|
|
|
2280
|
-
def
|
|
2374
|
+
def fstr_tdq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2281
2375
|
"""Grammar rule.
|
|
2282
2376
|
|
|
2283
|
-
|
|
2377
|
+
fstr_tdq_part: F_TEXT_DQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2284
2378
|
"""
|
|
2285
|
-
|
|
2286
|
-
(
|
|
2287
|
-
i
|
|
2288
|
-
if isinstance(i, uni.String)
|
|
2289
|
-
else (
|
|
2290
|
-
uni.ExprStmt(expr=i, in_fstring=True, kid=[i])
|
|
2291
|
-
if isinstance(i, uni.Expr)
|
|
2292
|
-
else i
|
|
2293
|
-
)
|
|
2294
|
-
)
|
|
2295
|
-
for i in self.cur_nodes
|
|
2296
|
-
]
|
|
2297
|
-
return valid_parts
|
|
2379
|
+
return self._process_f_expr(Tok.F_TEXT_TDQ, self.cur_nodes)
|
|
2298
2380
|
|
|
2299
|
-
def
|
|
2381
|
+
def fstr_tsq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2300
2382
|
"""Grammar rule.
|
|
2301
2383
|
|
|
2302
|
-
|
|
2384
|
+
fstr_sq_part: F_TEXT_SQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2303
2385
|
"""
|
|
2304
|
-
|
|
2305
|
-
|
|
2306
|
-
|
|
2307
|
-
|
|
2308
|
-
|
|
2309
|
-
|
|
2310
|
-
|
|
2311
|
-
|
|
2312
|
-
|
|
2386
|
+
return self._process_f_expr(Tok.F_TEXT_TSQ, self.cur_nodes)
|
|
2387
|
+
|
|
2388
|
+
def rfstr_dq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2389
|
+
"""Grammar rule.
|
|
2390
|
+
|
|
2391
|
+
fstr_dq_part: F_TEXT_DQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2392
|
+
"""
|
|
2393
|
+
return self._process_f_expr(Tok.RF_TEXT_DQ, self.cur_nodes)
|
|
2394
|
+
|
|
2395
|
+
def rfstr_sq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2396
|
+
"""Grammar rule.
|
|
2397
|
+
|
|
2398
|
+
fstr_sq_part: F_TEXT_SQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2399
|
+
"""
|
|
2400
|
+
return self._process_f_expr(Tok.RF_TEXT_SQ, self.cur_nodes)
|
|
2401
|
+
|
|
2402
|
+
def rfstr_tdq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2403
|
+
"""Grammar rule.
|
|
2404
|
+
|
|
2405
|
+
fstr_tdq_part: F_TEXT_DQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2406
|
+
"""
|
|
2407
|
+
return self._process_f_expr(Tok.RF_TEXT_TDQ, self.cur_nodes)
|
|
2408
|
+
|
|
2409
|
+
def rfstr_tsq_part(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2410
|
+
"""Grammar rule.
|
|
2411
|
+
|
|
2412
|
+
fstr_sq_part: F_TEXT_SQ | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2413
|
+
"""
|
|
2414
|
+
return self._process_f_expr(Tok.RF_TEXT_TSQ, self.cur_nodes)
|
|
2415
|
+
|
|
2416
|
+
def fformat(self, _: None) -> uni.Token | uni.FormattedValue:
|
|
2417
|
+
"""Grammar rule.
|
|
2418
|
+
|
|
2419
|
+
fformat: F_FORMAT_TEXT | D_LBRACE | D_RBRACE | LBRACE expression CONV? (COLON fformat*)? RBRACE
|
|
2420
|
+
"""
|
|
2421
|
+
return self._process_f_expr(Tok.F_FORMAT_TEXT, self.cur_nodes)
|
|
2422
|
+
|
|
2423
|
+
def _process_f_expr(
|
|
2424
|
+
self, token: Tok, nodes: list[uni.UniNode]
|
|
2425
|
+
) -> uni.Token | uni.FormattedValue:
|
|
2426
|
+
"""Process fexpression nodes."""
|
|
2427
|
+
if (
|
|
2428
|
+
tok := self.match_token(token)
|
|
2429
|
+
or self.match_token(Tok.D_LBRACE)
|
|
2430
|
+
or self.match_token(Tok.D_RBRACE)
|
|
2431
|
+
):
|
|
2432
|
+
return tok
|
|
2433
|
+
else:
|
|
2434
|
+
conversion = -1
|
|
2435
|
+
format_spec = None
|
|
2436
|
+
self.consume_token(Tok.LBRACE)
|
|
2437
|
+
expr = self.consume(uni.Expr)
|
|
2438
|
+
if conv_tok := self.match_token(Tok.CONV):
|
|
2439
|
+
conversion = ord(conv_tok.value[1:])
|
|
2440
|
+
if self.match_token(Tok.COLON):
|
|
2441
|
+
parts = []
|
|
2442
|
+
while part := self.match(uni.String) or self.match(
|
|
2443
|
+
uni.FormattedValue
|
|
2444
|
+
):
|
|
2445
|
+
parts.append(part)
|
|
2446
|
+
if len(parts) == 1 and isinstance(parts[0], uni.String):
|
|
2447
|
+
format_spec = parts[0]
|
|
2448
|
+
elif parts:
|
|
2449
|
+
format_spec = uni.FString(
|
|
2450
|
+
start=None,
|
|
2451
|
+
parts=parts,
|
|
2452
|
+
end=None,
|
|
2453
|
+
kid=parts,
|
|
2454
|
+
)
|
|
2455
|
+
self.consume_token(Tok.RBRACE)
|
|
2456
|
+
return uni.FormattedValue(
|
|
2457
|
+
format_part=expr,
|
|
2458
|
+
conversion=conversion,
|
|
2459
|
+
format_spec=format_spec,
|
|
2460
|
+
kid=self.cur_nodes,
|
|
2313
2461
|
)
|
|
2314
|
-
for i in self.cur_nodes
|
|
2315
|
-
]
|
|
2316
|
-
return valid_parts
|
|
2317
2462
|
|
|
2318
2463
|
def list_val(self, _: None) -> uni.ListVal:
|
|
2319
2464
|
"""Grammar rule.
|
|
@@ -2583,6 +2728,244 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
2583
2728
|
kid=self.cur_nodes,
|
|
2584
2729
|
)
|
|
2585
2730
|
|
|
2731
|
+
def jsx_element(self, _: None) -> uni.JsxElement:
|
|
2732
|
+
"""Grammar rule.
|
|
2733
|
+
|
|
2734
|
+
jsx_element: jsx_self_closing
|
|
2735
|
+
| jsx_fragment
|
|
2736
|
+
| jsx_opening_closing
|
|
2737
|
+
"""
|
|
2738
|
+
return self.consume(uni.JsxElement)
|
|
2739
|
+
|
|
2740
|
+
def jsx_self_closing(self, _: None) -> uni.JsxElement:
|
|
2741
|
+
"""Grammar rule.
|
|
2742
|
+
|
|
2743
|
+
jsx_self_closing: JSX_OPEN_START jsx_element_name jsx_attributes? JSX_SELF_CLOSE
|
|
2744
|
+
"""
|
|
2745
|
+
self.consume_token(Tok.JSX_OPEN_START)
|
|
2746
|
+
name = self.consume(uni.JsxElementName)
|
|
2747
|
+
# jsx_attributes is optional and returns a list when present
|
|
2748
|
+
attrs_list = self.match(
|
|
2749
|
+
list
|
|
2750
|
+
) # Will match jsx_attributes which returns a list
|
|
2751
|
+
attrs = attrs_list if attrs_list else []
|
|
2752
|
+
self.consume_token(Tok.JSX_SELF_CLOSE)
|
|
2753
|
+
|
|
2754
|
+
return uni.JsxElement(
|
|
2755
|
+
name=name,
|
|
2756
|
+
attributes=attrs,
|
|
2757
|
+
children=None,
|
|
2758
|
+
is_self_closing=True,
|
|
2759
|
+
is_fragment=False,
|
|
2760
|
+
kid=self.flat_cur_nodes,
|
|
2761
|
+
)
|
|
2762
|
+
|
|
2763
|
+
def jsx_opening_closing(self, _: None) -> uni.JsxElement:
|
|
2764
|
+
"""Grammar rule.
|
|
2765
|
+
|
|
2766
|
+
jsx_opening_closing: jsx_opening_element jsx_children? jsx_closing_element
|
|
2767
|
+
"""
|
|
2768
|
+
opening = self.consume(uni.JsxElement) # From jsx_opening_element
|
|
2769
|
+
# jsx_children is optional and returns a list when present
|
|
2770
|
+
children_list = self.match(
|
|
2771
|
+
list
|
|
2772
|
+
) # Will match jsx_children which returns a list
|
|
2773
|
+
children = children_list if children_list else []
|
|
2774
|
+
self.consume(uni.JsxElement) # From jsx_closing_element (closing tag)
|
|
2775
|
+
|
|
2776
|
+
# Merge opening and closing into single element
|
|
2777
|
+
return uni.JsxElement(
|
|
2778
|
+
name=opening.name,
|
|
2779
|
+
attributes=opening.attributes,
|
|
2780
|
+
children=children if children else None,
|
|
2781
|
+
is_self_closing=False,
|
|
2782
|
+
is_fragment=False,
|
|
2783
|
+
kid=self.flat_cur_nodes,
|
|
2784
|
+
)
|
|
2785
|
+
|
|
2786
|
+
def jsx_fragment(self, _: None) -> uni.JsxElement:
|
|
2787
|
+
"""Grammar rule.
|
|
2788
|
+
|
|
2789
|
+
jsx_fragment: JSX_FRAG_OPEN jsx_children? JSX_FRAG_CLOSE
|
|
2790
|
+
"""
|
|
2791
|
+
self.consume_token(Tok.JSX_FRAG_OPEN)
|
|
2792
|
+
# jsx_children is optional and returns a list when present
|
|
2793
|
+
children_list = self.match(
|
|
2794
|
+
list
|
|
2795
|
+
) # Will match jsx_children which returns a list
|
|
2796
|
+
children = children_list if children_list else []
|
|
2797
|
+
self.consume_token(Tok.JSX_FRAG_CLOSE)
|
|
2798
|
+
|
|
2799
|
+
return uni.JsxElement(
|
|
2800
|
+
name=None,
|
|
2801
|
+
attributes=None,
|
|
2802
|
+
children=children if children else None,
|
|
2803
|
+
is_self_closing=False,
|
|
2804
|
+
is_fragment=True,
|
|
2805
|
+
kid=self.flat_cur_nodes,
|
|
2806
|
+
)
|
|
2807
|
+
|
|
2808
|
+
def jsx_opening_element(self, _: None) -> uni.JsxElement:
|
|
2809
|
+
"""Grammar rule.
|
|
2810
|
+
|
|
2811
|
+
jsx_opening_element: JSX_OPEN_START jsx_element_name jsx_attributes? JSX_TAG_END
|
|
2812
|
+
"""
|
|
2813
|
+
self.consume_token(Tok.JSX_OPEN_START)
|
|
2814
|
+
name = self.consume(uni.JsxElementName)
|
|
2815
|
+
# jsx_attributes is optional and returns a list when present
|
|
2816
|
+
attrs_list = self.match(
|
|
2817
|
+
list
|
|
2818
|
+
) # Will match jsx_attributes which returns a list
|
|
2819
|
+
attrs = attrs_list if attrs_list else []
|
|
2820
|
+
self.consume_token(Tok.JSX_TAG_END)
|
|
2821
|
+
|
|
2822
|
+
# Return partial element (will be completed in jsx_opening_closing)
|
|
2823
|
+
return uni.JsxElement(
|
|
2824
|
+
name=name,
|
|
2825
|
+
attributes=attrs,
|
|
2826
|
+
children=None,
|
|
2827
|
+
is_self_closing=False,
|
|
2828
|
+
is_fragment=False,
|
|
2829
|
+
kid=self.flat_cur_nodes,
|
|
2830
|
+
)
|
|
2831
|
+
|
|
2832
|
+
def jsx_closing_element(self, _: None) -> uni.JsxElement:
|
|
2833
|
+
"""Grammar rule.
|
|
2834
|
+
|
|
2835
|
+
jsx_closing_element: JSX_CLOSE_START jsx_element_name JSX_TAG_END
|
|
2836
|
+
"""
|
|
2837
|
+
self.consume_token(Tok.JSX_CLOSE_START)
|
|
2838
|
+
name = self.consume(uni.JsxElementName)
|
|
2839
|
+
self.consume_token(Tok.JSX_TAG_END)
|
|
2840
|
+
# Return stub element with just closing info
|
|
2841
|
+
return uni.JsxElement(
|
|
2842
|
+
name=name,
|
|
2843
|
+
attributes=None,
|
|
2844
|
+
children=None,
|
|
2845
|
+
is_self_closing=False,
|
|
2846
|
+
is_fragment=False,
|
|
2847
|
+
kid=self.cur_nodes,
|
|
2848
|
+
)
|
|
2849
|
+
|
|
2850
|
+
def jsx_element_name(self, _: None) -> uni.JsxElementName:
|
|
2851
|
+
"""Grammar rule.
|
|
2852
|
+
|
|
2853
|
+
jsx_element_name: JSX_NAME (DOT JSX_NAME)*
|
|
2854
|
+
"""
|
|
2855
|
+
parts = [self.consume_token(Tok.JSX_NAME)]
|
|
2856
|
+
while self.match_token(Tok.DOT):
|
|
2857
|
+
parts.append(self.consume_token(Tok.JSX_NAME))
|
|
2858
|
+
return uni.JsxElementName(
|
|
2859
|
+
parts=parts,
|
|
2860
|
+
kid=self.cur_nodes,
|
|
2861
|
+
)
|
|
2862
|
+
|
|
2863
|
+
def jsx_attributes(self, _: None) -> list[uni.JsxAttribute]:
|
|
2864
|
+
"""Grammar rule.
|
|
2865
|
+
|
|
2866
|
+
jsx_attributes: jsx_attribute+
|
|
2867
|
+
"""
|
|
2868
|
+
return self.consume_many(uni.JsxAttribute)
|
|
2869
|
+
|
|
2870
|
+
def jsx_attribute(self, _: None) -> uni.JsxAttribute:
|
|
2871
|
+
"""Grammar rule.
|
|
2872
|
+
|
|
2873
|
+
jsx_attribute: jsx_spread_attribute | jsx_normal_attribute
|
|
2874
|
+
"""
|
|
2875
|
+
return self.consume(uni.JsxAttribute)
|
|
2876
|
+
|
|
2877
|
+
def jsx_spread_attribute(self, _: None) -> uni.JsxSpreadAttribute:
|
|
2878
|
+
"""Grammar rule.
|
|
2879
|
+
|
|
2880
|
+
jsx_spread_attribute: LBRACE ELLIPSIS expression RBRACE
|
|
2881
|
+
"""
|
|
2882
|
+
self.consume_token(Tok.LBRACE)
|
|
2883
|
+
self.consume_token(Tok.ELLIPSIS)
|
|
2884
|
+
expr = self.consume(uni.Expr)
|
|
2885
|
+
self.consume_token(Tok.RBRACE)
|
|
2886
|
+
return uni.JsxSpreadAttribute(
|
|
2887
|
+
expr=expr,
|
|
2888
|
+
kid=self.cur_nodes,
|
|
2889
|
+
)
|
|
2890
|
+
|
|
2891
|
+
def jsx_normal_attribute(self, _: None) -> uni.JsxNormalAttribute:
|
|
2892
|
+
"""Grammar rule.
|
|
2893
|
+
|
|
2894
|
+
jsx_normal_attribute: JSX_NAME (EQ jsx_attr_value)?
|
|
2895
|
+
"""
|
|
2896
|
+
name = self.consume_token(Tok.JSX_NAME)
|
|
2897
|
+
value = None
|
|
2898
|
+
if self.match_token(Tok.EQ):
|
|
2899
|
+
value = self.consume(uni.Expr)
|
|
2900
|
+
return uni.JsxNormalAttribute(
|
|
2901
|
+
name=name,
|
|
2902
|
+
value=value,
|
|
2903
|
+
kid=self.cur_nodes,
|
|
2904
|
+
)
|
|
2905
|
+
|
|
2906
|
+
def jsx_attr_value(self, _: None) -> uni.String | uni.Expr:
|
|
2907
|
+
"""Grammar rule.
|
|
2908
|
+
|
|
2909
|
+
jsx_attr_value: STRING | LBRACE expression RBRACE
|
|
2910
|
+
"""
|
|
2911
|
+
if string := self.match(uni.String):
|
|
2912
|
+
return string
|
|
2913
|
+
self.consume_token(Tok.LBRACE)
|
|
2914
|
+
expr = self.consume(uni.Expr)
|
|
2915
|
+
self.consume_token(Tok.RBRACE)
|
|
2916
|
+
return expr
|
|
2917
|
+
|
|
2918
|
+
def jsx_children(self, _: None) -> list[uni.JsxChild]:
|
|
2919
|
+
"""Grammar rule.
|
|
2920
|
+
|
|
2921
|
+
jsx_children: jsx_child+
|
|
2922
|
+
"""
|
|
2923
|
+
# The grammar already produces a list of children
|
|
2924
|
+
# Just collect all JsxChild nodes from cur_nodes
|
|
2925
|
+
children = []
|
|
2926
|
+
while self.node_idx < len(self.cur_nodes):
|
|
2927
|
+
if isinstance(
|
|
2928
|
+
self.cur_nodes[self.node_idx], (uni.JsxChild, uni.JsxElement)
|
|
2929
|
+
):
|
|
2930
|
+
children.append(self.cur_nodes[self.node_idx]) # type: ignore[arg-type]
|
|
2931
|
+
self.node_idx += 1
|
|
2932
|
+
else:
|
|
2933
|
+
break
|
|
2934
|
+
return children
|
|
2935
|
+
|
|
2936
|
+
def jsx_child(self, _: None) -> uni.JsxChild:
|
|
2937
|
+
"""Grammar rule.
|
|
2938
|
+
|
|
2939
|
+
jsx_child: jsx_element | jsx_expression
|
|
2940
|
+
"""
|
|
2941
|
+
if jsx_elem := self.match(uni.JsxElement):
|
|
2942
|
+
return jsx_elem # type: ignore[return-value]
|
|
2943
|
+
return self.consume(uni.JsxChild)
|
|
2944
|
+
|
|
2945
|
+
def jsx_expression(self, _: None) -> uni.JsxExpression:
|
|
2946
|
+
"""Grammar rule.
|
|
2947
|
+
|
|
2948
|
+
jsx_expression: LBRACE expression RBRACE
|
|
2949
|
+
"""
|
|
2950
|
+
self.consume_token(Tok.LBRACE)
|
|
2951
|
+
expr = self.consume(uni.Expr)
|
|
2952
|
+
self.consume_token(Tok.RBRACE)
|
|
2953
|
+
return uni.JsxExpression(
|
|
2954
|
+
expr=expr,
|
|
2955
|
+
kid=self.cur_nodes,
|
|
2956
|
+
)
|
|
2957
|
+
|
|
2958
|
+
def jsx_text(self, _: None) -> uni.JsxText:
|
|
2959
|
+
"""Grammar rule.
|
|
2960
|
+
|
|
2961
|
+
jsx_text: JSX_TEXT
|
|
2962
|
+
"""
|
|
2963
|
+
text = self.consume_token(Tok.JSX_TEXT)
|
|
2964
|
+
return uni.JsxText(
|
|
2965
|
+
value=text,
|
|
2966
|
+
kid=self.cur_nodes,
|
|
2967
|
+
)
|
|
2968
|
+
|
|
2586
2969
|
def edge_ref_chain(self, _: None) -> uni.EdgeRefTrailer:
|
|
2587
2970
|
"""Grammar rule.
|
|
2588
2971
|
|
|
@@ -3163,15 +3546,23 @@ class JacParser(Transform[uni.Source, uni.Module]):
|
|
|
3163
3546
|
ret_type = uni.Int
|
|
3164
3547
|
elif token.type in [
|
|
3165
3548
|
Tok.STRING,
|
|
3166
|
-
Tok.
|
|
3167
|
-
Tok.
|
|
3168
|
-
Tok.
|
|
3169
|
-
Tok.
|
|
3170
|
-
Tok.
|
|
3549
|
+
Tok.D_LBRACE,
|
|
3550
|
+
Tok.D_RBRACE,
|
|
3551
|
+
Tok.F_TEXT_DQ,
|
|
3552
|
+
Tok.F_TEXT_SQ,
|
|
3553
|
+
Tok.F_TEXT_TDQ,
|
|
3554
|
+
Tok.F_TEXT_TSQ,
|
|
3555
|
+
Tok.RF_TEXT_DQ,
|
|
3556
|
+
Tok.RF_TEXT_SQ,
|
|
3557
|
+
Tok.RF_TEXT_TDQ,
|
|
3558
|
+
Tok.RF_TEXT_TSQ,
|
|
3559
|
+
Tok.F_FORMAT_TEXT,
|
|
3171
3560
|
]:
|
|
3172
3561
|
ret_type = uni.String
|
|
3173
|
-
if token.type == Tok.
|
|
3174
|
-
token.value =
|
|
3562
|
+
if token.type == Tok.D_LBRACE:
|
|
3563
|
+
token.value = "{"
|
|
3564
|
+
elif token.type == Tok.D_RBRACE:
|
|
3565
|
+
token.value = "}"
|
|
3175
3566
|
elif token.type == Tok.BOOL:
|
|
3176
3567
|
ret_type = uni.Bool
|
|
3177
3568
|
elif token.type == Tok.PYNLINE and isinstance(token.value, str):
|