jaclang 0.8.1__py3-none-any.whl → 0.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jaclang might be problematic. Click here for more details.

Files changed (77) hide show
  1. jaclang/__init__.py +6 -0
  2. jaclang/cli/cli.py +21 -50
  3. jaclang/compiler/codeinfo.py +0 -1
  4. jaclang/compiler/jac.lark +12 -10
  5. jaclang/compiler/larkparse/jac_parser.py +2 -2
  6. jaclang/compiler/parser.py +18 -10
  7. jaclang/compiler/passes/main/__init__.py +0 -14
  8. jaclang/compiler/passes/main/annex_pass.py +2 -8
  9. jaclang/compiler/passes/main/cfg_build_pass.py +38 -12
  10. jaclang/compiler/passes/main/import_pass.py +3 -11
  11. jaclang/compiler/passes/main/pyast_gen_pass.py +243 -592
  12. jaclang/compiler/passes/main/sym_tab_link_pass.py +2 -5
  13. jaclang/compiler/passes/main/tests/test_cfg_build_pass.py +2 -8
  14. jaclang/compiler/passes/main/tests/test_decl_impl_match_pass.py +7 -8
  15. jaclang/compiler/passes/main/tests/test_import_pass.py +5 -18
  16. jaclang/compiler/passes/main/tests/test_pyast_gen_pass.py +2 -6
  17. jaclang/compiler/passes/main/tests/test_sub_node_pass.py +1 -3
  18. jaclang/compiler/passes/main/tests/test_sym_tab_link_pass.py +20 -17
  19. jaclang/compiler/passes/tool/doc_ir_gen_pass.py +237 -105
  20. jaclang/compiler/passes/tool/jac_formatter_pass.py +2 -0
  21. jaclang/compiler/passes/tool/tests/fixtures/archetype_frmt.jac +14 -0
  22. jaclang/compiler/passes/tool/tests/fixtures/general_format_checks/triple_quoted_string.jac +5 -4
  23. jaclang/compiler/passes/tool/tests/fixtures/import_fmt.jac +6 -0
  24. jaclang/compiler/passes/tool/tests/fixtures/simple_walk_fmt.jac +3 -3
  25. jaclang/compiler/passes/tool/tests/fixtures/tagbreak.jac +9 -0
  26. jaclang/compiler/passes/tool/tests/test_jac_format_pass.py +18 -3
  27. jaclang/compiler/passes/tool/tests/test_unparse_validate.py +2 -2
  28. jaclang/compiler/program.py +21 -60
  29. jaclang/compiler/tests/fixtures/pkg_import_lib_py/__init__.py +2 -8
  30. jaclang/compiler/tests/fixtures/pkg_import_lib_py/sub/__init__.py +1 -5
  31. jaclang/compiler/tests/test_importer.py +10 -13
  32. jaclang/compiler/unitree.py +32 -16
  33. jaclang/langserve/__init__.jac +1 -1
  34. jaclang/langserve/engine.jac +113 -108
  35. jaclang/langserve/server.jac +17 -2
  36. jaclang/langserve/tests/server_test/test_lang_serve.py +138 -46
  37. jaclang/langserve/tests/server_test/utils.py +35 -9
  38. jaclang/langserve/tests/test_sem_tokens.py +1 -1
  39. jaclang/langserve/tests/test_server.py +3 -7
  40. jaclang/runtimelib/archetype.py +127 -5
  41. jaclang/runtimelib/importer.py +51 -94
  42. jaclang/runtimelib/machine.py +391 -268
  43. jaclang/runtimelib/meta_importer.py +86 -0
  44. jaclang/runtimelib/tests/fixtures/graph_purger.jac +24 -26
  45. jaclang/runtimelib/tests/fixtures/other_root_access.jac +25 -16
  46. jaclang/runtimelib/tests/test_jaseci.py +3 -1
  47. jaclang/tests/fixtures/arch_rel_import_creation.jac +23 -23
  48. jaclang/tests/fixtures/async_ability.jac +43 -10
  49. jaclang/tests/fixtures/async_function.jac +18 -0
  50. jaclang/tests/fixtures/async_walker.jac +17 -12
  51. jaclang/tests/fixtures/create_dynamic_archetype.jac +25 -28
  52. jaclang/tests/fixtures/deep/deeper/deep_outer_import.jac +7 -4
  53. jaclang/tests/fixtures/deep/deeper/snd_lev.jac +2 -2
  54. jaclang/tests/fixtures/deep/deeper/snd_lev_dup.jac +6 -0
  55. jaclang/tests/fixtures/deep/one_lev.jac +2 -2
  56. jaclang/tests/fixtures/deep/one_lev_dup.jac +4 -3
  57. jaclang/tests/fixtures/dynamic_archetype.jac +19 -12
  58. jaclang/tests/fixtures/foo.jac +14 -22
  59. jaclang/tests/fixtures/jac_from_py.py +1 -1
  60. jaclang/tests/fixtures/jp_importer.jac +6 -6
  61. jaclang/tests/fixtures/jp_importer_auto.jac +5 -3
  62. jaclang/tests/fixtures/unicode_strings.jac +24 -0
  63. jaclang/tests/fixtures/walker_update.jac +5 -7
  64. jaclang/tests/test_language.py +138 -140
  65. jaclang/tests/test_reference.py +9 -4
  66. jaclang/tests/test_typecheck.py +13 -26
  67. jaclang/utils/lang_tools.py +7 -5
  68. jaclang/utils/module_resolver.py +23 -0
  69. {jaclang-0.8.1.dist-info → jaclang-0.8.2.dist-info}/METADATA +1 -1
  70. {jaclang-0.8.1.dist-info → jaclang-0.8.2.dist-info}/RECORD +72 -70
  71. jaclang/compiler/passes/main/tests/fixtures/main_err.jac +0 -6
  72. jaclang/compiler/passes/main/tests/fixtures/second_err.jac +0 -4
  73. jaclang/compiler/passes/tool/tests/fixtures/corelib.jac +0 -644
  74. jaclang/compiler/passes/tool/tests/test_doc_ir_gen_pass.py +0 -29
  75. jaclang/tests/fixtures/deep/deeper/__init__.jac +0 -1
  76. {jaclang-0.8.1.dist-info → jaclang-0.8.2.dist-info}/WHEEL +0 -0
  77. {jaclang-0.8.1.dist-info → jaclang-0.8.2.dist-info}/entry_points.txt +0 -0
@@ -78,26 +78,6 @@ class DocIRGenPass(UniPass):
78
78
 
79
79
  return self.concat(result)
80
80
 
81
- def _strip_trailing_ws(self, part: doc.DocType) -> doc.DocType:
82
- """Recursively strip trailing whitespace from a Doc node."""
83
- if isinstance(part, doc.Concat) and part.parts:
84
- while (
85
- part.parts
86
- and isinstance(part.parts[-1], doc.Text)
87
- and getattr(part.parts[-1], "text", "") == " "
88
- ):
89
- part.parts.pop()
90
- if part.parts:
91
- part.parts[-1] = self._strip_trailing_ws(part.parts[-1])
92
- elif isinstance(part, (doc.Group, doc.Indent, doc.Align)):
93
- part.contents = self._strip_trailing_ws(part.contents)
94
- return part
95
-
96
- def finalize(self, parts: List[doc.DocType], group: bool = True) -> doc.DocType:
97
- """Concat parts and remove trailing whitespace before grouping."""
98
- result = self._strip_trailing_ws(self.concat(parts))
99
- return self.group(result) if group else result
100
-
101
81
  def is_one_line(self, node: uni.UniNode) -> bool:
102
82
  """Check if the node is a one line node."""
103
83
  kid = [i for i in node.kid if not isinstance(i, uni.CommentToken)]
@@ -139,9 +119,15 @@ class DocIRGenPass(UniPass):
139
119
  """Exit import node."""
140
120
  parts: list[doc.DocType] = []
141
121
  for i in node.kid:
142
- if isinstance(i, uni.Token) and i.name == Tok.SEMI:
122
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
123
+ parts.pop()
124
+ parts.append(i.gen.doc_ir)
125
+ parts.append(self.space())
126
+ elif isinstance(i, uni.Token) and i.name == Tok.SEMI:
143
127
  parts.pop()
144
128
  parts.append(i.gen.doc_ir)
129
+ elif isinstance(i, uni.Token) and i.name == Tok.RBRACE:
130
+ parts.append(i.gen.doc_ir)
145
131
  else:
146
132
  parts.append(i.gen.doc_ir)
147
133
  parts.append(self.space())
@@ -188,6 +174,13 @@ class DocIRGenPass(UniPass):
188
174
  parts.append(self.space())
189
175
  elif isinstance(i, uni.Token) and i.name == Tok.LBRACE:
190
176
  parts.append(i.gen.doc_ir)
177
+ elif isinstance(i, uni.Token) and i.name == Tok.LPAREN:
178
+ parts.pop()
179
+ parts.append(i.gen.doc_ir)
180
+ elif isinstance(i, uni.Token) and i.name == Tok.RPAREN:
181
+ parts.pop()
182
+ parts.append(i.gen.doc_ir)
183
+ parts.append(self.space())
191
184
  elif isinstance(node.body, Sequence) and i in node.body:
192
185
  if not in_body:
193
186
  body_parts.append(self.hard_line())
@@ -214,7 +207,7 @@ class DocIRGenPass(UniPass):
214
207
  parts.append(i.gen.doc_ir)
215
208
  parts.append(self.space())
216
209
 
217
- node.gen.doc_ir = self.finalize(parts)
210
+ node.gen.doc_ir = self.group(self.concat(parts))
218
211
 
219
212
  def exit_ability(self, node: uni.Ability) -> None:
220
213
  """Generate DocIR for abilities."""
@@ -250,19 +243,21 @@ class DocIRGenPass(UniPass):
250
243
  else:
251
244
  parts.append(i.gen.doc_ir)
252
245
  parts.append(self.space())
253
- node.gen.doc_ir = self.finalize(parts)
246
+ node.gen.doc_ir = self.group(self.concat(parts))
254
247
 
255
248
  def exit_func_signature(self, node: uni.FuncSignature) -> None:
256
249
  """Generate DocIR for function signatures."""
257
250
  parts: list[doc.DocType] = []
258
251
  indent_parts: list[doc.DocType] = []
259
252
  in_params = False
253
+ has_parens = False
260
254
  for i in node.kid:
261
255
  if isinstance(i, uni.Token) and i.name == Tok.LPAREN and node.params:
262
256
  in_params = True
263
257
  parts.append(i.gen.doc_ir)
264
258
  elif isinstance(i, uni.Token) and i.name == Tok.RPAREN and node.params:
265
259
  in_params = False
260
+ has_parens = True
266
261
  parts.append(
267
262
  self.indent(self.concat([self.tight_line(), *indent_parts]))
268
263
  )
@@ -280,9 +275,16 @@ class DocIRGenPass(UniPass):
280
275
  else:
281
276
  indent_parts.append(i.gen.doc_ir)
282
277
  else:
278
+ if (
279
+ isinstance(i, uni.Token)
280
+ and i.name == Tok.RETURN_HINT
281
+ and not has_parens
282
+ ):
283
+ parts.append(self.space())
283
284
  parts.append(i.gen.doc_ir)
284
285
  parts.append(self.space())
285
- node.gen.doc_ir = self.finalize(parts)
286
+ parts.pop()
287
+ node.gen.doc_ir = self.group(self.concat(parts))
286
288
 
287
289
  def exit_param_var(self, node: uni.ParamVar) -> None:
288
290
  """Generate DocIR for parameter variables."""
@@ -294,27 +296,52 @@ class DocIRGenPass(UniPass):
294
296
  parts.append(self.space())
295
297
  else:
296
298
  parts.append(i.gen.doc_ir)
297
- node.gen.doc_ir = self.finalize(parts)
299
+ node.gen.doc_ir = self.group(self.concat(parts))
298
300
 
299
301
  def exit_type_ref(self, node: uni.TypeRef) -> None:
300
302
  """Generate DocIR for type references."""
301
303
  parts: list[doc.DocType] = []
302
304
  for i in node.kid:
303
305
  parts.append(i.gen.doc_ir)
304
- node.gen.doc_ir = self.finalize(parts)
306
+ node.gen.doc_ir = self.group(self.concat(parts))
305
307
 
306
308
  def exit_assignment(self, node: uni.Assignment) -> None:
307
309
  """Generate DocIR for assignments."""
308
- parts: list[doc.DocType] = []
309
- for i in node.kid:
310
- if i == node.type_tag or (isinstance(i, uni.Token) and i.name == Tok.SEMI):
311
- parts.pop()
312
- parts.append(i.gen.doc_ir)
313
- parts.append(self.space())
310
+ lhs_parts: list[doc.DocType] = []
311
+ rhs_parts: list[doc.DocType] = []
312
+ eq_tok: Optional[doc.DocType] = None
313
+ seen_eq = False
314
+
315
+ for i in node.kid:
316
+ if isinstance(i, uni.Token) and i.name == Tok.KW_LET:
317
+ lhs_parts.append(i.gen.doc_ir)
318
+ lhs_parts.append(self.space())
319
+ elif isinstance(i, uni.Token) and i.name == Tok.EQ and not seen_eq:
320
+ eq_tok = i.gen.doc_ir
321
+ seen_eq = True
322
+ elif seen_eq:
323
+ rhs_parts.append(i.gen.doc_ir)
314
324
  else:
315
- parts.append(i.gen.doc_ir)
316
- parts.append(self.space())
317
- node.gen.doc_ir = self.finalize(parts)
325
+ if i == node.aug_op:
326
+ lhs_parts.append(self.space())
327
+ lhs_parts.append(i.gen.doc_ir)
328
+ if i == node.aug_op:
329
+ lhs_parts.append(self.space())
330
+
331
+ if eq_tok is not None:
332
+ rhs_concat = self.concat(rhs_parts)
333
+ node.gen.doc_ir = self.group(
334
+ self.concat(
335
+ [
336
+ *lhs_parts,
337
+ self.space(),
338
+ eq_tok,
339
+ self.indent(self.concat([self.line(), rhs_concat])),
340
+ ]
341
+ )
342
+ )
343
+ else:
344
+ node.gen.doc_ir = self.group(self.concat(lhs_parts + rhs_parts))
318
345
 
319
346
  def exit_if_stmt(self, node: uni.IfStmt) -> None:
320
347
  """Generate DocIR for if statements."""
@@ -343,7 +370,8 @@ class DocIRGenPass(UniPass):
343
370
  else:
344
371
  parts.append(i.gen.doc_ir)
345
372
  parts.append(self.space())
346
- node.gen.doc_ir = self.finalize(parts)
373
+ parts.pop()
374
+ node.gen.doc_ir = self.group(self.concat(parts))
347
375
 
348
376
  def exit_else_if(self, node: uni.ElseIf) -> None:
349
377
  """Generate DocIR for else if statements."""
@@ -372,7 +400,7 @@ class DocIRGenPass(UniPass):
372
400
  else:
373
401
  parts.append(i.gen.doc_ir)
374
402
  parts.append(self.space())
375
- node.gen.doc_ir = self.finalize(parts)
403
+ node.gen.doc_ir = self.group(self.concat(parts))
376
404
 
377
405
  def exit_else_stmt(self, node: uni.ElseStmt) -> None:
378
406
  """Generate DocIR for else statements."""
@@ -401,7 +429,7 @@ class DocIRGenPass(UniPass):
401
429
  else:
402
430
  parts.append(i.gen.doc_ir)
403
431
  parts.append(self.space())
404
- node.gen.doc_ir = self.finalize(parts)
432
+ node.gen.doc_ir = self.group(self.concat(parts))
405
433
 
406
434
  def exit_binary_expr(self, node: uni.BinaryExpr) -> None:
407
435
  """Generate DocIR for binary expressions."""
@@ -409,7 +437,8 @@ class DocIRGenPass(UniPass):
409
437
  for i in node.kid:
410
438
  parts.append(i.gen.doc_ir)
411
439
  parts.append(self.space())
412
- node.gen.doc_ir = self.finalize(parts)
440
+ parts.pop()
441
+ node.gen.doc_ir = self.group(self.concat(parts))
413
442
 
414
443
  def exit_expr_stmt(self, node: uni.ExprStmt) -> None:
415
444
  """Generate DocIR for expression statements."""
@@ -424,7 +453,7 @@ class DocIRGenPass(UniPass):
424
453
  for i in node.kid:
425
454
  parts.append(i.gen.doc_ir)
426
455
  parts.append(self.space())
427
- node.gen.doc_ir = self.finalize(parts)
456
+ node.gen.doc_ir = self.group(self.concat(parts))
428
457
 
429
458
  def exit_return_stmt(self, node: uni.ReturnStmt) -> None:
430
459
  """Generate DocIR for return statements."""
@@ -436,7 +465,7 @@ class DocIRGenPass(UniPass):
436
465
  else:
437
466
  parts.append(i.gen.doc_ir)
438
467
  parts.append(self.space())
439
- node.gen.doc_ir = self.finalize(parts)
468
+ node.gen.doc_ir = self.group(self.concat(parts))
440
469
 
441
470
  def exit_func_call(self, node: uni.FuncCall) -> None:
442
471
  """Generate DocIR for function calls."""
@@ -475,8 +504,24 @@ class DocIRGenPass(UniPass):
475
504
  """Generate DocIR for list values."""
476
505
  parts: list[doc.DocType] = []
477
506
  for i in node.kid:
478
- parts.append(i.gen.doc_ir)
479
- node.gen.doc_ir = self.group(self.concat(parts))
507
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
508
+ parts.append(i.gen.doc_ir)
509
+ parts.append(self.space())
510
+ else:
511
+ parts.append(i.gen.doc_ir)
512
+ not_broke = self.concat(parts)
513
+ parts = []
514
+ for i in node.kid:
515
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
516
+ parts.append(i.gen.doc_ir)
517
+ parts.append(self.hard_line())
518
+ elif isinstance(i, uni.Token) and i.name == Tok.LSQUARE:
519
+ parts.append(self.hard_line())
520
+ parts.append(i.gen.doc_ir)
521
+ else:
522
+ parts.append(i.gen.doc_ir)
523
+ broke = self.concat(parts)
524
+ node.gen.doc_ir = self.group(self.if_break(broke, not_broke))
480
525
 
481
526
  def exit_dict_val(self, node: uni.DictVal) -> None:
482
527
  """Generate DocIR for dictionary values."""
@@ -488,7 +533,7 @@ class DocIRGenPass(UniPass):
488
533
  else:
489
534
  parts.append(i.gen.doc_ir)
490
535
  parts.append(self.space())
491
- node.gen.doc_ir = self.finalize(parts)
536
+ node.gen.doc_ir = self.group(self.concat(parts))
492
537
 
493
538
  def exit_k_v_pair(self, node: uni.KVPair) -> None:
494
539
  """Generate DocIR for key-value pairs."""
@@ -496,7 +541,8 @@ class DocIRGenPass(UniPass):
496
541
  for i in node.kid:
497
542
  parts.append(i.gen.doc_ir)
498
543
  parts.append(self.space())
499
- node.gen.doc_ir = self.finalize(parts)
544
+ parts.pop()
545
+ node.gen.doc_ir = self.group(self.concat(parts))
500
546
 
501
547
  def exit_has_var(self, node: uni.HasVar) -> None:
502
548
  """Generate DocIR for has variable declarations."""
@@ -527,7 +573,7 @@ class DocIRGenPass(UniPass):
527
573
  else:
528
574
  parts.append(i.gen.doc_ir)
529
575
  parts.append(self.space())
530
- node.gen.doc_ir = self.finalize(parts)
576
+ node.gen.doc_ir = self.group(self.concat(parts))
531
577
 
532
578
  def exit_while_stmt(self, node: uni.WhileStmt) -> None:
533
579
  """Generate DocIR for while statements."""
@@ -535,7 +581,7 @@ class DocIRGenPass(UniPass):
535
581
  for i in node.kid:
536
582
  parts.append(i.gen.doc_ir)
537
583
  parts.append(self.space())
538
- node.gen.doc_ir = self.finalize(parts)
584
+ node.gen.doc_ir = self.group(self.concat(parts))
539
585
 
540
586
  def exit_in_for_stmt(self, node: uni.InForStmt) -> None:
541
587
  """Generate DocIR for for-in statements."""
@@ -543,7 +589,7 @@ class DocIRGenPass(UniPass):
543
589
  for i in node.kid:
544
590
  parts.append(i.gen.doc_ir)
545
591
  parts.append(self.space())
546
- node.gen.doc_ir = self.finalize(parts)
592
+ node.gen.doc_ir = self.group(self.concat(parts))
547
593
 
548
594
  def exit_iter_for_stmt(self, node: uni.IterForStmt) -> None:
549
595
  """Generate DocIR for iterative for statements."""
@@ -551,7 +597,7 @@ class DocIRGenPass(UniPass):
551
597
  for i in node.kid:
552
598
  parts.append(i.gen.doc_ir)
553
599
  parts.append(self.space())
554
- node.gen.doc_ir = self.finalize(parts)
600
+ node.gen.doc_ir = self.group(self.concat(parts))
555
601
 
556
602
  def exit_try_stmt(self, node: uni.TryStmt) -> None:
557
603
  """Generate DocIR for try statements."""
@@ -559,7 +605,7 @@ class DocIRGenPass(UniPass):
559
605
  for i in node.kid:
560
606
  parts.append(i.gen.doc_ir)
561
607
  parts.append(self.space())
562
- node.gen.doc_ir = self.finalize(parts)
608
+ node.gen.doc_ir = self.group(self.concat(parts))
563
609
 
564
610
  def exit_except(self, node: uni.Except) -> None:
565
611
  """Generate DocIR for except clauses."""
@@ -567,7 +613,7 @@ class DocIRGenPass(UniPass):
567
613
  for i in node.kid:
568
614
  parts.append(i.gen.doc_ir)
569
615
  parts.append(self.space())
570
- node.gen.doc_ir = self.finalize(parts)
616
+ node.gen.doc_ir = self.group(self.concat(parts))
571
617
 
572
618
  def exit_finally_stmt(self, node: uni.FinallyStmt) -> None:
573
619
  """Generate DocIR for finally statements."""
@@ -575,14 +621,30 @@ class DocIRGenPass(UniPass):
575
621
  for i in node.kid:
576
622
  parts.append(i.gen.doc_ir)
577
623
  parts.append(self.space())
578
- node.gen.doc_ir = self.finalize(parts)
624
+ node.gen.doc_ir = self.group(self.concat(parts))
579
625
 
580
626
  def exit_tuple_val(self, node: uni.TupleVal) -> None:
581
627
  """Generate DocIR for tuple values."""
582
628
  parts: list[doc.DocType] = []
583
629
  for i in node.kid:
584
- parts.append(i.gen.doc_ir)
585
- node.gen.doc_ir = self.group(self.concat(parts))
630
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
631
+ parts.append(i.gen.doc_ir)
632
+ parts.append(self.space())
633
+ else:
634
+ parts.append(i.gen.doc_ir)
635
+ not_broke = self.concat(parts)
636
+ parts = []
637
+ for i in node.kid:
638
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
639
+ parts.append(i.gen.doc_ir)
640
+ parts.append(self.hard_line())
641
+ elif isinstance(i, uni.Token) and i.name == Tok.LPAREN:
642
+ parts.append(self.hard_line())
643
+ parts.append(i.gen.doc_ir)
644
+ else:
645
+ parts.append(i.gen.doc_ir)
646
+ broke = self.concat(parts)
647
+ node.gen.doc_ir = self.group(self.if_break(broke, not_broke))
586
648
 
587
649
  def exit_multi_string(self, node: uni.MultiString) -> None:
588
650
  """Generate DocIR for multiline strings."""
@@ -605,7 +667,8 @@ class DocIRGenPass(UniPass):
605
667
  for i in node.kid:
606
668
  parts.append(i.gen.doc_ir)
607
669
  parts.append(self.space())
608
- node.gen.doc_ir = self.finalize(parts)
670
+ parts.pop()
671
+ node.gen.doc_ir = self.group(self.concat(parts))
609
672
 
610
673
  def exit_list_compr(self, node: uni.ListCompr) -> None:
611
674
  """Generate DocIR for list comprehensions."""
@@ -613,7 +676,8 @@ class DocIRGenPass(UniPass):
613
676
  for i in node.kid:
614
677
  parts.append(i.gen.doc_ir)
615
678
  parts.append(self.space())
616
- node.gen.doc_ir = self.finalize(parts)
679
+ parts.pop()
680
+ node.gen.doc_ir = self.group(self.concat(parts))
617
681
 
618
682
  def exit_inner_compr(self, node: uni.InnerCompr) -> None:
619
683
  """Generate DocIR for inner comprehension clauses."""
@@ -621,7 +685,8 @@ class DocIRGenPass(UniPass):
621
685
  for i in node.kid:
622
686
  parts.append(i.gen.doc_ir)
623
687
  parts.append(self.space())
624
- node.gen.doc_ir = self.finalize(parts)
688
+ parts.pop()
689
+ node.gen.doc_ir = self.group(self.concat(parts))
625
690
 
626
691
  def exit_f_string(self, node: uni.FString) -> None:
627
692
  """Generate DocIR for formatted strings."""
@@ -644,7 +709,7 @@ class DocIRGenPass(UniPass):
644
709
  else:
645
710
  parts.append(i.gen.doc_ir)
646
711
  parts.append(self.space())
647
-
712
+ parts.pop()
648
713
  node.gen.doc_ir = self.group(self.concat(parts))
649
714
 
650
715
  def exit_bool_expr(self, node: uni.BoolExpr) -> None:
@@ -657,7 +722,6 @@ class DocIRGenPass(UniPass):
657
722
  else:
658
723
  parts.append(i.gen.doc_ir)
659
724
  parts.append(self.line()) # Potential break
660
-
661
725
  node.gen.doc_ir = self.group(self.concat(parts))
662
726
 
663
727
  def exit_unary_expr(self, node: uni.UnaryExpr) -> None:
@@ -723,7 +787,8 @@ class DocIRGenPass(UniPass):
723
787
  for i in node.kid:
724
788
  parts.append(i.gen.doc_ir)
725
789
  parts.append(self.space())
726
- node.gen.doc_ir = self.finalize(parts)
790
+ parts.pop()
791
+ node.gen.doc_ir = self.group(self.concat(parts))
727
792
 
728
793
  def exit_gen_compr(self, node: uni.GenCompr) -> None:
729
794
  """Generate DocIR for generator comprehensions."""
@@ -731,7 +796,8 @@ class DocIRGenPass(UniPass):
731
796
  for i in node.kid:
732
797
  parts.append(i.gen.doc_ir)
733
798
  parts.append(self.space())
734
- node.gen.doc_ir = self.finalize(parts)
799
+ parts.pop()
800
+ node.gen.doc_ir = self.group(self.concat(parts))
735
801
 
736
802
  def exit_set_compr(self, node: uni.SetCompr) -> None:
737
803
  """Generate DocIR for set comprehensions."""
@@ -739,7 +805,8 @@ class DocIRGenPass(UniPass):
739
805
  for i in node.kid:
740
806
  parts.append(i.gen.doc_ir)
741
807
  parts.append(self.space())
742
- node.gen.doc_ir = self.finalize(parts)
808
+ parts.pop()
809
+ node.gen.doc_ir = self.group(self.concat(parts))
743
810
 
744
811
  def exit_dict_compr(self, node: uni.DictCompr) -> None:
745
812
  """Generate DocIR for dictionary comprehensions."""
@@ -750,14 +817,15 @@ class DocIRGenPass(UniPass):
750
817
  else:
751
818
  parts.append(i.gen.doc_ir)
752
819
  parts.append(self.space())
753
- node.gen.doc_ir = self.finalize(parts)
820
+ parts.pop()
821
+ node.gen.doc_ir = self.group(self.concat(parts))
754
822
 
755
823
  def exit_k_w_pair(self, node: uni.KWPair) -> None:
756
824
  """Generate DocIR for keyword arguments."""
757
825
  parts: list[doc.DocType] = []
758
826
  for i in node.kid:
759
827
  parts.append(i.gen.doc_ir)
760
- node.gen.doc_ir = self.finalize(parts)
828
+ node.gen.doc_ir = self.group(self.concat(parts))
761
829
 
762
830
  def exit_await_expr(self, node: uni.AwaitExpr) -> None:
763
831
  """Generate DocIR for await expressions."""
@@ -765,7 +833,8 @@ class DocIRGenPass(UniPass):
765
833
  for i in node.kid:
766
834
  parts.append(i.gen.doc_ir)
767
835
  parts.append(self.space())
768
- node.gen.doc_ir = self.finalize(parts)
836
+ parts.pop()
837
+ node.gen.doc_ir = self.group(self.concat(parts))
769
838
 
770
839
  def exit_yield_expr(self, node: uni.YieldExpr) -> None:
771
840
  """Generate DocIR for yield expressions."""
@@ -773,7 +842,8 @@ class DocIRGenPass(UniPass):
773
842
  for i in node.kid:
774
843
  parts.append(i.gen.doc_ir)
775
844
  parts.append(self.space())
776
- node.gen.doc_ir = self.finalize(parts)
845
+ parts.pop()
846
+ node.gen.doc_ir = self.group(self.concat(parts))
777
847
 
778
848
  def exit_ctrl_stmt(self, node: uni.CtrlStmt) -> None:
779
849
  """Generate DocIR for control statements (break, continue, skip)."""
@@ -788,7 +858,8 @@ class DocIRGenPass(UniPass):
788
858
  for i in node.kid:
789
859
  parts.append(i.gen.doc_ir)
790
860
  parts.append(self.space())
791
- node.gen.doc_ir = self.finalize(parts)
861
+ parts.pop()
862
+ node.gen.doc_ir = self.group(self.concat(parts))
792
863
 
793
864
  def exit_disengage_stmt(self, node: uni.DisengageStmt) -> None:
794
865
  """Generate DocIR for disengage statements."""
@@ -803,7 +874,8 @@ class DocIRGenPass(UniPass):
803
874
  for i in node.kid:
804
875
  parts.append(i.gen.doc_ir)
805
876
  parts.append(self.space())
806
- node.gen.doc_ir = self.finalize(parts)
877
+ parts.pop()
878
+ node.gen.doc_ir = self.group(self.concat(parts))
807
879
 
808
880
  def exit_assert_stmt(self, node: uni.AssertStmt) -> None:
809
881
  """Generate DocIR for assert statements."""
@@ -811,7 +883,8 @@ class DocIRGenPass(UniPass):
811
883
  for i in node.kid:
812
884
  parts.append(i.gen.doc_ir)
813
885
  parts.append(self.space())
814
- node.gen.doc_ir = self.finalize(parts)
886
+ parts.pop()
887
+ node.gen.doc_ir = self.group(self.concat(parts))
815
888
 
816
889
  def exit_raise_stmt(self, node: uni.RaiseStmt) -> None:
817
890
  """Generate DocIR for raise statements."""
@@ -819,7 +892,8 @@ class DocIRGenPass(UniPass):
819
892
  for i in node.kid:
820
893
  parts.append(i.gen.doc_ir)
821
894
  parts.append(self.space())
822
- node.gen.doc_ir = self.finalize(parts)
895
+ parts.pop()
896
+ node.gen.doc_ir = self.group(self.concat(parts))
823
897
 
824
898
  def exit_global_vars(self, node: uni.GlobalVars) -> None:
825
899
  """Generate DocIR for global variables."""
@@ -835,7 +909,7 @@ class DocIRGenPass(UniPass):
835
909
  else:
836
910
  parts.append(i.gen.doc_ir)
837
911
  parts.append(self.space())
838
- node.gen.doc_ir = self.finalize(parts)
912
+ node.gen.doc_ir = self.group(self.concat(parts))
839
913
 
840
914
  def exit_module_code(self, node: uni.ModuleCode) -> None:
841
915
  """Generate DocIR for module code."""
@@ -870,7 +944,7 @@ class DocIRGenPass(UniPass):
870
944
  else:
871
945
  parts.append(i.gen.doc_ir)
872
946
  parts.append(self.space())
873
- node.gen.doc_ir = self.finalize(parts)
947
+ node.gen.doc_ir = self.group(self.concat(parts))
874
948
 
875
949
  def exit_global_stmt(self, node: uni.GlobalStmt) -> None:
876
950
  """Generate DocIR for global statements."""
@@ -885,7 +959,7 @@ class DocIRGenPass(UniPass):
885
959
  for i in node.kid:
886
960
  parts.append(i.gen.doc_ir)
887
961
  parts.append(self.space())
888
- node.gen.doc_ir = self.finalize(parts)
962
+ node.gen.doc_ir = self.group(self.concat(parts))
889
963
 
890
964
  def exit_visit_stmt(self, node: uni.VisitStmt) -> None:
891
965
  """Generate DocIR for visit statements."""
@@ -898,7 +972,7 @@ class DocIRGenPass(UniPass):
898
972
  else:
899
973
  parts.append(i.gen.doc_ir)
900
974
  parts.append(self.space())
901
- node.gen.doc_ir = self.finalize(parts)
975
+ node.gen.doc_ir = self.group(self.concat(parts))
902
976
 
903
977
  def exit_ignore_stmt(self, node: uni.IgnoreStmt) -> None:
904
978
  """Generate DocIR for ignore statements."""
@@ -906,7 +980,7 @@ class DocIRGenPass(UniPass):
906
980
  for i in node.kid:
907
981
  parts.append(i.gen.doc_ir)
908
982
  parts.append(self.space())
909
- node.gen.doc_ir = self.finalize(parts)
983
+ node.gen.doc_ir = self.group(self.concat(parts))
910
984
 
911
985
  def exit_connect_op(self, node: uni.ConnectOp) -> None:
912
986
  """Generate DocIR for connect operator."""
@@ -914,7 +988,8 @@ class DocIRGenPass(UniPass):
914
988
  for i in node.kid:
915
989
  parts.append(i.gen.doc_ir)
916
990
  parts.append(self.space())
917
- node.gen.doc_ir = self.finalize(parts)
991
+ parts.pop()
992
+ node.gen.doc_ir = self.group(self.concat(parts))
918
993
 
919
994
  def exit_disconnect_op(self, node: uni.DisconnectOp) -> None:
920
995
  """Generate DocIR for disconnect operator."""
@@ -922,7 +997,8 @@ class DocIRGenPass(UniPass):
922
997
  for i in node.kid:
923
998
  parts.append(i.gen.doc_ir)
924
999
  parts.append(self.space())
925
- node.gen.doc_ir = self.finalize(parts)
1000
+ parts.pop()
1001
+ node.gen.doc_ir = self.group(self.concat(parts))
926
1002
 
927
1003
  def exit_compare_expr(self, node: uni.CompareExpr) -> None:
928
1004
  """Generate DocIR for comparison expressions."""
@@ -930,7 +1006,8 @@ class DocIRGenPass(UniPass):
930
1006
  for i in node.kid:
931
1007
  parts.append(i.gen.doc_ir)
932
1008
  parts.append(self.space())
933
- node.gen.doc_ir = self.finalize(parts)
1009
+ parts.pop()
1010
+ node.gen.doc_ir = self.group(self.concat(parts))
934
1011
 
935
1012
  def exit_atom_unit(self, node: uni.AtomUnit) -> None:
936
1013
  """Generate DocIR for atom units (parenthesized expressions)."""
@@ -952,7 +1029,8 @@ class DocIRGenPass(UniPass):
952
1029
  parts.append(i.gen.doc_ir)
953
1030
  parts.append(self.space())
954
1031
  prev_item = i
955
- node.gen.doc_ir = self.finalize(parts)
1032
+ parts.pop()
1033
+ node.gen.doc_ir = self.group(self.concat(parts))
956
1034
 
957
1035
  def exit_expr_as_item(self, node: uni.ExprAsItem) -> None:
958
1036
  """Generate DocIR for expression as item nodes."""
@@ -960,7 +1038,7 @@ class DocIRGenPass(UniPass):
960
1038
  for i in node.kid:
961
1039
  parts.append(i.gen.doc_ir)
962
1040
  parts.append(self.space())
963
- node.gen.doc_ir = self.finalize(parts)
1041
+ node.gen.doc_ir = self.group(self.concat(parts))
964
1042
 
965
1043
  def exit_filter_compr(self, node: uni.FilterCompr) -> None:
966
1044
  """Generate DocIR for filter comprehensions."""
@@ -968,7 +1046,7 @@ class DocIRGenPass(UniPass):
968
1046
  for i in node.kid:
969
1047
  parts.append(i.gen.doc_ir)
970
1048
  parts.append(self.space())
971
- node.gen.doc_ir = self.finalize(parts)
1049
+ node.gen.doc_ir = self.group(self.concat(parts))
972
1050
 
973
1051
  def exit_assign_compr(self, node: uni.AssignCompr) -> None:
974
1052
  """Generate DocIR for assignment comprehensions."""
@@ -976,7 +1054,7 @@ class DocIRGenPass(UniPass):
976
1054
  for i in node.kid:
977
1055
  parts.append(i.gen.doc_ir)
978
1056
  parts.append(self.space())
979
- node.gen.doc_ir = self.finalize(parts)
1057
+ node.gen.doc_ir = self.group(self.concat(parts))
980
1058
 
981
1059
  def exit_py_inline_code(self, node: uni.PyInlineCode) -> None:
982
1060
  """Generate DocIR for Python inline code blocks."""
@@ -993,7 +1071,7 @@ class DocIRGenPass(UniPass):
993
1071
  else:
994
1072
  parts.append(i.gen.doc_ir)
995
1073
  parts.append(self.space())
996
- node.gen.doc_ir = self.finalize(parts)
1074
+ node.gen.doc_ir = self.group(self.concat(parts))
997
1075
 
998
1076
  def exit_test(self, node: uni.Test) -> None:
999
1077
  """Generate DocIR for test nodes."""
@@ -1009,7 +1087,7 @@ class DocIRGenPass(UniPass):
1009
1087
  else:
1010
1088
  parts.append(i.gen.doc_ir)
1011
1089
  parts.append(self.space())
1012
- node.gen.doc_ir = self.finalize(parts)
1090
+ node.gen.doc_ir = self.group(self.concat(parts))
1013
1091
 
1014
1092
  def exit_check_stmt(self, node: uni.CheckStmt) -> None:
1015
1093
  """Generate DocIR for check statements."""
@@ -1019,7 +1097,7 @@ class DocIRGenPass(UniPass):
1019
1097
  parts.pop()
1020
1098
  parts.append(i.gen.doc_ir)
1021
1099
  parts.append(self.space())
1022
- node.gen.doc_ir = self.finalize(parts)
1100
+ node.gen.doc_ir = self.group(self.concat(parts))
1023
1101
 
1024
1102
  def exit_match_stmt(self, node: uni.MatchStmt) -> None:
1025
1103
  """Generate DocIR for match statements."""
@@ -1039,7 +1117,7 @@ class DocIRGenPass(UniPass):
1039
1117
  else:
1040
1118
  parts.append(i.gen.doc_ir)
1041
1119
  parts.append(self.space())
1042
- node.gen.doc_ir = self.finalize(parts)
1120
+ node.gen.doc_ir = self.group(self.concat(parts))
1043
1121
 
1044
1122
  def exit_match_case(self, node: uni.MatchCase) -> None:
1045
1123
  """Generate DocIR for match cases."""
@@ -1056,7 +1134,7 @@ class DocIRGenPass(UniPass):
1056
1134
  parts.append(i.gen.doc_ir)
1057
1135
  parts.append(self.space())
1058
1136
  parts.append(self.indent(self.concat([self.hard_line()] + indent_parts)))
1059
- node.gen.doc_ir = self.finalize(parts)
1137
+ node.gen.doc_ir = self.group(self.concat(parts))
1060
1138
 
1061
1139
  def exit_match_value(self, node: uni.MatchValue) -> None:
1062
1140
  """Generate DocIR for match value patterns."""
@@ -1064,7 +1142,8 @@ class DocIRGenPass(UniPass):
1064
1142
  for i in node.kid:
1065
1143
  parts.append(i.gen.doc_ir)
1066
1144
  parts.append(self.space())
1067
- node.gen.doc_ir = self.finalize(parts)
1145
+ parts.pop()
1146
+ node.gen.doc_ir = self.group(self.concat(parts))
1068
1147
 
1069
1148
  def exit_match_singleton(self, node: uni.MatchSingleton) -> None:
1070
1149
  """Generate DocIR for match singleton patterns."""
@@ -1072,7 +1151,8 @@ class DocIRGenPass(UniPass):
1072
1151
  for i in node.kid:
1073
1152
  parts.append(i.gen.doc_ir)
1074
1153
  parts.append(self.space())
1075
- node.gen.doc_ir = self.finalize(parts)
1154
+ parts.pop()
1155
+ node.gen.doc_ir = self.group(self.concat(parts))
1076
1156
 
1077
1157
  def exit_match_sequence(self, node: uni.MatchSequence) -> None:
1078
1158
  """Generate DocIR for match sequence patterns."""
@@ -1080,7 +1160,8 @@ class DocIRGenPass(UniPass):
1080
1160
  for i in node.kid:
1081
1161
  parts.append(i.gen.doc_ir)
1082
1162
  parts.append(self.space())
1083
- node.gen.doc_ir = self.finalize(parts)
1163
+ parts.pop()
1164
+ node.gen.doc_ir = self.group(self.concat(parts))
1084
1165
 
1085
1166
  def exit_match_mapping(self, node: uni.MatchMapping) -> None:
1086
1167
  """Generate DocIR for match mapping patterns."""
@@ -1088,7 +1169,8 @@ class DocIRGenPass(UniPass):
1088
1169
  for i in node.kid:
1089
1170
  parts.append(i.gen.doc_ir)
1090
1171
  parts.append(self.space())
1091
- node.gen.doc_ir = self.finalize(parts)
1172
+ parts.pop()
1173
+ node.gen.doc_ir = self.group(self.concat(parts))
1092
1174
 
1093
1175
  def exit_match_or(self, node: uni.MatchOr) -> None:
1094
1176
  """Generate DocIR for match OR patterns."""
@@ -1096,7 +1178,8 @@ class DocIRGenPass(UniPass):
1096
1178
  for i in node.kid:
1097
1179
  parts.append(i.gen.doc_ir)
1098
1180
  parts.append(self.space())
1099
- node.gen.doc_ir = self.finalize(parts)
1181
+ parts.pop()
1182
+ node.gen.doc_ir = self.group(self.concat(parts))
1100
1183
 
1101
1184
  def exit_match_as(self, node: uni.MatchAs) -> None:
1102
1185
  """Generate DocIR for match AS patterns."""
@@ -1104,7 +1187,8 @@ class DocIRGenPass(UniPass):
1104
1187
  for i in node.kid:
1105
1188
  parts.append(i.gen.doc_ir)
1106
1189
  parts.append(self.space())
1107
- node.gen.doc_ir = self.finalize(parts)
1190
+ parts.pop()
1191
+ node.gen.doc_ir = self.group(self.concat(parts))
1108
1192
 
1109
1193
  def exit_match_wild(self, node: uni.MatchWild) -> None:
1110
1194
  """Generate DocIR for match wildcard patterns."""
@@ -1112,7 +1196,8 @@ class DocIRGenPass(UniPass):
1112
1196
  for i in node.kid:
1113
1197
  parts.append(i.gen.doc_ir)
1114
1198
  parts.append(self.space())
1115
- node.gen.doc_ir = self.finalize(parts)
1199
+ parts.pop()
1200
+ node.gen.doc_ir = self.group(self.concat(parts))
1116
1201
 
1117
1202
  def exit_match_star(self, node: uni.MatchStar) -> None:
1118
1203
  """Generate DocIR for match star patterns (e.g., *args, **kwargs)."""
@@ -1120,7 +1205,8 @@ class DocIRGenPass(UniPass):
1120
1205
  for i in node.kid:
1121
1206
  parts.append(i.gen.doc_ir)
1122
1207
  parts.append(self.space())
1123
- node.gen.doc_ir = self.finalize(parts)
1208
+ parts.pop()
1209
+ node.gen.doc_ir = self.group(self.concat(parts))
1124
1210
 
1125
1211
  def exit_match_k_v_pair(self, node: uni.MatchKVPair) -> None:
1126
1212
  """Generate DocIR for match key-value pairs."""
@@ -1131,7 +1217,8 @@ class DocIRGenPass(UniPass):
1131
1217
  else:
1132
1218
  parts.append(i.gen.doc_ir)
1133
1219
  parts.append(self.space())
1134
- node.gen.doc_ir = self.finalize(parts)
1220
+ parts.pop()
1221
+ node.gen.doc_ir = self.group(self.concat(parts))
1135
1222
 
1136
1223
  def exit_match_arch(self, node: uni.MatchArch) -> None:
1137
1224
  """Generate DocIR for match architecture patterns."""
@@ -1142,7 +1229,7 @@ class DocIRGenPass(UniPass):
1142
1229
  parts.append(self.space())
1143
1230
  else:
1144
1231
  parts.append(i.gen.doc_ir)
1145
- node.gen.doc_ir = self.finalize(parts)
1232
+ node.gen.doc_ir = self.group(self.concat(parts))
1146
1233
 
1147
1234
  def exit_enum(self, node: uni.Enum) -> None:
1148
1235
  """Generate DocIR for enum declarations."""
@@ -1160,30 +1247,74 @@ class DocIRGenPass(UniPass):
1160
1247
  else:
1161
1248
  parts.append(i.gen.doc_ir)
1162
1249
  parts.append(self.space())
1163
- node.gen.doc_ir = self.finalize(parts)
1250
+ node.gen.doc_ir = self.group(self.concat(parts))
1164
1251
 
1165
1252
  def exit_sub_tag(self, node: uni.SubTag) -> None:
1166
1253
  """Generate DocIR for sub-tag nodes."""
1167
- parts: list[doc.DocType] = []
1254
+ before_colon: list[doc.DocType] = []
1255
+ after_colon: list[doc.DocType] = []
1256
+ seen_colon = False
1257
+
1168
1258
  for i in node.kid:
1169
- parts.append(i.gen.doc_ir)
1170
- parts.append(self.space())
1171
- node.gen.doc_ir = self.finalize(parts)
1259
+ if isinstance(i, uni.Token) and i.name == Tok.COLON and not seen_colon:
1260
+ colon_tok = i.gen.doc_ir
1261
+ seen_colon = True
1262
+ elif seen_colon:
1263
+ after_colon.append(i.gen.doc_ir)
1264
+ else:
1265
+ before_colon.append(i.gen.doc_ir)
1266
+
1267
+ if seen_colon:
1268
+ flat = self.concat([*before_colon, colon_tok, self.space(), *after_colon])
1269
+ broke = self.concat(
1270
+ [
1271
+ *before_colon,
1272
+ colon_tok,
1273
+ self.indent(self.concat([self.line(), *after_colon])),
1274
+ ]
1275
+ )
1276
+ node.gen.doc_ir = self.group(self.if_break(broke, flat))
1277
+ else:
1278
+ node.gen.doc_ir = self.concat(before_colon + after_colon)
1172
1279
 
1173
1280
  def exit_impl_def(self, node: uni.ImplDef) -> None:
1174
1281
  """Generate DocIR for implementation definitions."""
1175
1282
  parts: list[doc.DocType] = []
1283
+ body_parts: list[doc.DocType] = []
1284
+ in_body = False
1176
1285
  for i in node.kid:
1177
1286
  if i == node.doc or (node.decorators and i in node.decorators):
1178
1287
  parts.append(i.gen.doc_ir)
1179
1288
  parts.append(self.hard_line())
1180
- elif i == node.target:
1289
+ elif i in node.target:
1290
+ parts.append(i.gen.doc_ir)
1291
+ elif (
1292
+ in_body
1293
+ or isinstance(node.body, Sequence)
1294
+ and node.body
1295
+ and i == node.body[0]
1296
+ ):
1297
+ if not in_body:
1298
+ parts.pop()
1299
+ body_parts.append(self.hard_line())
1300
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
1301
+ body_parts.pop()
1302
+ body_parts.append(i.gen.doc_ir)
1303
+ body_parts.append(self.hard_line())
1304
+ in_body = True
1305
+ if in_body and isinstance(node.body, Sequence) and i == node.body[-1]:
1306
+ in_body = False
1307
+ body_parts.pop()
1308
+ parts.append(self.indent(self.concat(body_parts)))
1309
+ parts.append(self.hard_line())
1310
+ elif isinstance(i, uni.Token) and i.name == Tok.SEMI:
1311
+ parts.pop()
1181
1312
  parts.append(i.gen.doc_ir)
1182
1313
  parts.append(self.space())
1183
1314
  else:
1184
1315
  parts.append(i.gen.doc_ir)
1185
1316
  parts.append(self.space())
1186
- node.gen.doc_ir = self.finalize(parts)
1317
+ node.gen.doc_ir = self.group(self.concat(parts))
1187
1318
 
1188
1319
  def exit_event_signature(self, node: uni.EventSignature) -> None:
1189
1320
  """Generate DocIR for event signatures."""
@@ -1191,7 +1322,8 @@ class DocIRGenPass(UniPass):
1191
1322
  for i in node.kid:
1192
1323
  parts.append(i.gen.doc_ir)
1193
1324
  parts.append(self.space())
1194
- node.gen.doc_ir = self.finalize(parts)
1325
+ parts.pop()
1326
+ node.gen.doc_ir = self.group(self.concat(parts))
1195
1327
 
1196
1328
  def exit_typed_ctx_block(self, node: uni.TypedCtxBlock) -> None:
1197
1329
  """Generate DocIR for typed context blocks."""