jaclang 0.8.1__py3-none-any.whl → 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jaclang might be problematic. Click here for more details.

Files changed (84) hide show
  1. jaclang/__init__.py +6 -0
  2. jaclang/cli/cli.py +21 -50
  3. jaclang/compiler/codeinfo.py +0 -1
  4. jaclang/compiler/constant.py +2 -0
  5. jaclang/compiler/jac.lark +17 -10
  6. jaclang/compiler/larkparse/jac_parser.py +2 -2
  7. jaclang/compiler/parser.py +34 -10
  8. jaclang/compiler/passes/main/__init__.py +2 -14
  9. jaclang/compiler/passes/main/annex_pass.py +2 -8
  10. jaclang/compiler/passes/main/cfg_build_pass.py +38 -12
  11. jaclang/compiler/passes/main/import_pass.py +3 -11
  12. jaclang/compiler/passes/main/pyast_gen_pass.py +246 -592
  13. jaclang/compiler/passes/main/sem_def_match_pass.py +67 -0
  14. jaclang/compiler/passes/main/sym_tab_build_pass.py +8 -0
  15. jaclang/compiler/passes/main/sym_tab_link_pass.py +2 -5
  16. jaclang/compiler/passes/main/tests/fixtures/sem_def_match.impl.jac +12 -0
  17. jaclang/compiler/passes/main/tests/fixtures/sem_def_match.jac +31 -0
  18. jaclang/compiler/passes/main/tests/test_cfg_build_pass.py +2 -8
  19. jaclang/compiler/passes/main/tests/test_decl_impl_match_pass.py +7 -8
  20. jaclang/compiler/passes/main/tests/test_import_pass.py +5 -18
  21. jaclang/compiler/passes/main/tests/test_pyast_gen_pass.py +2 -6
  22. jaclang/compiler/passes/main/tests/test_sem_def_match_pass.py +38 -0
  23. jaclang/compiler/passes/main/tests/test_sub_node_pass.py +1 -3
  24. jaclang/compiler/passes/main/tests/test_sym_tab_link_pass.py +20 -17
  25. jaclang/compiler/passes/tool/doc_ir_gen_pass.py +259 -106
  26. jaclang/compiler/passes/tool/jac_formatter_pass.py +2 -0
  27. jaclang/compiler/passes/tool/tests/fixtures/archetype_frmt.jac +14 -0
  28. jaclang/compiler/passes/tool/tests/fixtures/general_format_checks/triple_quoted_string.jac +5 -4
  29. jaclang/compiler/passes/tool/tests/fixtures/has_frmt.jac +13 -0
  30. jaclang/compiler/passes/tool/tests/fixtures/import_fmt.jac +6 -0
  31. jaclang/compiler/passes/tool/tests/fixtures/simple_walk_fmt.jac +3 -3
  32. jaclang/compiler/passes/tool/tests/fixtures/tagbreak.jac +9 -0
  33. jaclang/compiler/passes/tool/tests/test_jac_format_pass.py +25 -3
  34. jaclang/compiler/passes/tool/tests/test_unparse_validate.py +2 -2
  35. jaclang/compiler/program.py +23 -60
  36. jaclang/compiler/tests/fixtures/pkg_import_lib_py/__init__.py +2 -8
  37. jaclang/compiler/tests/fixtures/pkg_import_lib_py/sub/__init__.py +1 -5
  38. jaclang/compiler/tests/test_importer.py +10 -13
  39. jaclang/compiler/unitree.py +88 -16
  40. jaclang/langserve/__init__.jac +1 -1
  41. jaclang/langserve/engine.jac +113 -108
  42. jaclang/langserve/server.jac +17 -2
  43. jaclang/langserve/tests/server_test/test_lang_serve.py +138 -46
  44. jaclang/langserve/tests/server_test/utils.py +35 -9
  45. jaclang/langserve/tests/test_sem_tokens.py +1 -1
  46. jaclang/langserve/tests/test_server.py +3 -7
  47. jaclang/runtimelib/archetype.py +127 -5
  48. jaclang/runtimelib/importer.py +51 -94
  49. jaclang/runtimelib/machine.py +391 -268
  50. jaclang/runtimelib/meta_importer.py +86 -0
  51. jaclang/runtimelib/tests/fixtures/graph_purger.jac +24 -26
  52. jaclang/runtimelib/tests/fixtures/other_root_access.jac +25 -16
  53. jaclang/runtimelib/tests/test_jaseci.py +3 -1
  54. jaclang/tests/fixtures/arch_rel_import_creation.jac +23 -23
  55. jaclang/tests/fixtures/async_ability.jac +43 -10
  56. jaclang/tests/fixtures/async_function.jac +18 -0
  57. jaclang/tests/fixtures/async_walker.jac +17 -12
  58. jaclang/tests/fixtures/create_dynamic_archetype.jac +25 -28
  59. jaclang/tests/fixtures/deep/deeper/deep_outer_import.jac +7 -4
  60. jaclang/tests/fixtures/deep/deeper/snd_lev.jac +2 -2
  61. jaclang/tests/fixtures/deep/deeper/snd_lev_dup.jac +6 -0
  62. jaclang/tests/fixtures/deep/one_lev.jac +2 -2
  63. jaclang/tests/fixtures/deep/one_lev_dup.jac +4 -3
  64. jaclang/tests/fixtures/dynamic_archetype.jac +19 -12
  65. jaclang/tests/fixtures/foo.jac +14 -22
  66. jaclang/tests/fixtures/jac_from_py.py +1 -1
  67. jaclang/tests/fixtures/jp_importer.jac +6 -6
  68. jaclang/tests/fixtures/jp_importer_auto.jac +5 -3
  69. jaclang/tests/fixtures/unicode_strings.jac +24 -0
  70. jaclang/tests/fixtures/walker_update.jac +5 -7
  71. jaclang/tests/test_language.py +138 -140
  72. jaclang/tests/test_reference.py +9 -4
  73. jaclang/tests/test_typecheck.py +13 -26
  74. jaclang/utils/lang_tools.py +7 -5
  75. jaclang/utils/module_resolver.py +23 -0
  76. {jaclang-0.8.1.dist-info → jaclang-0.8.3.dist-info}/METADATA +1 -1
  77. {jaclang-0.8.1.dist-info → jaclang-0.8.3.dist-info}/RECORD +79 -72
  78. jaclang/compiler/passes/main/tests/fixtures/main_err.jac +0 -6
  79. jaclang/compiler/passes/main/tests/fixtures/second_err.jac +0 -4
  80. jaclang/compiler/passes/tool/tests/fixtures/corelib.jac +0 -644
  81. jaclang/compiler/passes/tool/tests/test_doc_ir_gen_pass.py +0 -29
  82. jaclang/tests/fixtures/deep/deeper/__init__.jac +0 -1
  83. {jaclang-0.8.1.dist-info → jaclang-0.8.3.dist-info}/WHEEL +0 -0
  84. {jaclang-0.8.1.dist-info → jaclang-0.8.3.dist-info}/entry_points.txt +0 -0
@@ -78,26 +78,6 @@ class DocIRGenPass(UniPass):
78
78
 
79
79
  return self.concat(result)
80
80
 
81
- def _strip_trailing_ws(self, part: doc.DocType) -> doc.DocType:
82
- """Recursively strip trailing whitespace from a Doc node."""
83
- if isinstance(part, doc.Concat) and part.parts:
84
- while (
85
- part.parts
86
- and isinstance(part.parts[-1], doc.Text)
87
- and getattr(part.parts[-1], "text", "") == " "
88
- ):
89
- part.parts.pop()
90
- if part.parts:
91
- part.parts[-1] = self._strip_trailing_ws(part.parts[-1])
92
- elif isinstance(part, (doc.Group, doc.Indent, doc.Align)):
93
- part.contents = self._strip_trailing_ws(part.contents)
94
- return part
95
-
96
- def finalize(self, parts: List[doc.DocType], group: bool = True) -> doc.DocType:
97
- """Concat parts and remove trailing whitespace before grouping."""
98
- result = self._strip_trailing_ws(self.concat(parts))
99
- return self.group(result) if group else result
100
-
101
81
  def is_one_line(self, node: uni.UniNode) -> bool:
102
82
  """Check if the node is a one line node."""
103
83
  kid = [i for i in node.kid if not isinstance(i, uni.CommentToken)]
@@ -139,9 +119,15 @@ class DocIRGenPass(UniPass):
139
119
  """Exit import node."""
140
120
  parts: list[doc.DocType] = []
141
121
  for i in node.kid:
142
- if isinstance(i, uni.Token) and i.name == Tok.SEMI:
122
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
143
123
  parts.pop()
144
124
  parts.append(i.gen.doc_ir)
125
+ parts.append(self.space())
126
+ elif isinstance(i, uni.Token) and i.name == Tok.SEMI:
127
+ parts.pop()
128
+ parts.append(i.gen.doc_ir)
129
+ elif isinstance(i, uni.Token) and i.name == Tok.RBRACE:
130
+ parts.append(i.gen.doc_ir)
145
131
  else:
146
132
  parts.append(i.gen.doc_ir)
147
133
  parts.append(self.space())
@@ -188,6 +174,13 @@ class DocIRGenPass(UniPass):
188
174
  parts.append(self.space())
189
175
  elif isinstance(i, uni.Token) and i.name == Tok.LBRACE:
190
176
  parts.append(i.gen.doc_ir)
177
+ elif isinstance(i, uni.Token) and i.name == Tok.LPAREN:
178
+ parts.pop()
179
+ parts.append(i.gen.doc_ir)
180
+ elif isinstance(i, uni.Token) and i.name == Tok.RPAREN:
181
+ parts.pop()
182
+ parts.append(i.gen.doc_ir)
183
+ parts.append(self.space())
191
184
  elif isinstance(node.body, Sequence) and i in node.body:
192
185
  if not in_body:
193
186
  body_parts.append(self.hard_line())
@@ -214,7 +207,7 @@ class DocIRGenPass(UniPass):
214
207
  parts.append(i.gen.doc_ir)
215
208
  parts.append(self.space())
216
209
 
217
- node.gen.doc_ir = self.finalize(parts)
210
+ node.gen.doc_ir = self.group(self.concat(parts))
218
211
 
219
212
  def exit_ability(self, node: uni.Ability) -> None:
220
213
  """Generate DocIR for abilities."""
@@ -250,19 +243,21 @@ class DocIRGenPass(UniPass):
250
243
  else:
251
244
  parts.append(i.gen.doc_ir)
252
245
  parts.append(self.space())
253
- node.gen.doc_ir = self.finalize(parts)
246
+ node.gen.doc_ir = self.group(self.concat(parts))
254
247
 
255
248
  def exit_func_signature(self, node: uni.FuncSignature) -> None:
256
249
  """Generate DocIR for function signatures."""
257
250
  parts: list[doc.DocType] = []
258
251
  indent_parts: list[doc.DocType] = []
259
252
  in_params = False
253
+ has_parens = False
260
254
  for i in node.kid:
261
255
  if isinstance(i, uni.Token) and i.name == Tok.LPAREN and node.params:
262
256
  in_params = True
263
257
  parts.append(i.gen.doc_ir)
264
258
  elif isinstance(i, uni.Token) and i.name == Tok.RPAREN and node.params:
265
259
  in_params = False
260
+ has_parens = True
266
261
  parts.append(
267
262
  self.indent(self.concat([self.tight_line(), *indent_parts]))
268
263
  )
@@ -280,9 +275,16 @@ class DocIRGenPass(UniPass):
280
275
  else:
281
276
  indent_parts.append(i.gen.doc_ir)
282
277
  else:
278
+ if (
279
+ isinstance(i, uni.Token)
280
+ and i.name == Tok.RETURN_HINT
281
+ and not has_parens
282
+ ):
283
+ parts.append(self.space())
283
284
  parts.append(i.gen.doc_ir)
284
285
  parts.append(self.space())
285
- node.gen.doc_ir = self.finalize(parts)
286
+ parts.pop()
287
+ node.gen.doc_ir = self.group(self.concat(parts))
286
288
 
287
289
  def exit_param_var(self, node: uni.ParamVar) -> None:
288
290
  """Generate DocIR for parameter variables."""
@@ -294,27 +296,52 @@ class DocIRGenPass(UniPass):
294
296
  parts.append(self.space())
295
297
  else:
296
298
  parts.append(i.gen.doc_ir)
297
- node.gen.doc_ir = self.finalize(parts)
299
+ node.gen.doc_ir = self.group(self.concat(parts))
298
300
 
299
301
  def exit_type_ref(self, node: uni.TypeRef) -> None:
300
302
  """Generate DocIR for type references."""
301
303
  parts: list[doc.DocType] = []
302
304
  for i in node.kid:
303
305
  parts.append(i.gen.doc_ir)
304
- node.gen.doc_ir = self.finalize(parts)
306
+ node.gen.doc_ir = self.group(self.concat(parts))
305
307
 
306
308
  def exit_assignment(self, node: uni.Assignment) -> None:
307
309
  """Generate DocIR for assignments."""
308
- parts: list[doc.DocType] = []
309
- for i in node.kid:
310
- if i == node.type_tag or (isinstance(i, uni.Token) and i.name == Tok.SEMI):
311
- parts.pop()
312
- parts.append(i.gen.doc_ir)
313
- parts.append(self.space())
310
+ lhs_parts: list[doc.DocType] = []
311
+ rhs_parts: list[doc.DocType] = []
312
+ eq_tok: Optional[doc.DocType] = None
313
+ seen_eq = False
314
+
315
+ for i in node.kid:
316
+ if isinstance(i, uni.Token) and i.name == Tok.KW_LET:
317
+ lhs_parts.append(i.gen.doc_ir)
318
+ lhs_parts.append(self.space())
319
+ elif isinstance(i, uni.Token) and i.name == Tok.EQ and not seen_eq:
320
+ eq_tok = i.gen.doc_ir
321
+ seen_eq = True
322
+ elif seen_eq:
323
+ rhs_parts.append(i.gen.doc_ir)
314
324
  else:
315
- parts.append(i.gen.doc_ir)
316
- parts.append(self.space())
317
- node.gen.doc_ir = self.finalize(parts)
325
+ if i == node.aug_op:
326
+ lhs_parts.append(self.space())
327
+ lhs_parts.append(i.gen.doc_ir)
328
+ if i == node.aug_op:
329
+ lhs_parts.append(self.space())
330
+
331
+ if eq_tok is not None:
332
+ rhs_concat = self.concat(rhs_parts)
333
+ node.gen.doc_ir = self.group(
334
+ self.concat(
335
+ [
336
+ *lhs_parts,
337
+ self.space(),
338
+ eq_tok,
339
+ self.indent(self.concat([self.line(), rhs_concat])),
340
+ ]
341
+ )
342
+ )
343
+ else:
344
+ node.gen.doc_ir = self.group(self.concat(lhs_parts + rhs_parts))
318
345
 
319
346
  def exit_if_stmt(self, node: uni.IfStmt) -> None:
320
347
  """Generate DocIR for if statements."""
@@ -343,7 +370,8 @@ class DocIRGenPass(UniPass):
343
370
  else:
344
371
  parts.append(i.gen.doc_ir)
345
372
  parts.append(self.space())
346
- node.gen.doc_ir = self.finalize(parts)
373
+ parts.pop()
374
+ node.gen.doc_ir = self.group(self.concat(parts))
347
375
 
348
376
  def exit_else_if(self, node: uni.ElseIf) -> None:
349
377
  """Generate DocIR for else if statements."""
@@ -372,7 +400,7 @@ class DocIRGenPass(UniPass):
372
400
  else:
373
401
  parts.append(i.gen.doc_ir)
374
402
  parts.append(self.space())
375
- node.gen.doc_ir = self.finalize(parts)
403
+ node.gen.doc_ir = self.group(self.concat(parts))
376
404
 
377
405
  def exit_else_stmt(self, node: uni.ElseStmt) -> None:
378
406
  """Generate DocIR for else statements."""
@@ -401,7 +429,7 @@ class DocIRGenPass(UniPass):
401
429
  else:
402
430
  parts.append(i.gen.doc_ir)
403
431
  parts.append(self.space())
404
- node.gen.doc_ir = self.finalize(parts)
432
+ node.gen.doc_ir = self.group(self.concat(parts))
405
433
 
406
434
  def exit_binary_expr(self, node: uni.BinaryExpr) -> None:
407
435
  """Generate DocIR for binary expressions."""
@@ -409,7 +437,8 @@ class DocIRGenPass(UniPass):
409
437
  for i in node.kid:
410
438
  parts.append(i.gen.doc_ir)
411
439
  parts.append(self.space())
412
- node.gen.doc_ir = self.finalize(parts)
440
+ parts.pop()
441
+ node.gen.doc_ir = self.group(self.concat(parts))
413
442
 
414
443
  def exit_expr_stmt(self, node: uni.ExprStmt) -> None:
415
444
  """Generate DocIR for expression statements."""
@@ -424,7 +453,7 @@ class DocIRGenPass(UniPass):
424
453
  for i in node.kid:
425
454
  parts.append(i.gen.doc_ir)
426
455
  parts.append(self.space())
427
- node.gen.doc_ir = self.finalize(parts)
456
+ node.gen.doc_ir = self.group(self.concat(parts))
428
457
 
429
458
  def exit_return_stmt(self, node: uni.ReturnStmt) -> None:
430
459
  """Generate DocIR for return statements."""
@@ -436,7 +465,7 @@ class DocIRGenPass(UniPass):
436
465
  else:
437
466
  parts.append(i.gen.doc_ir)
438
467
  parts.append(self.space())
439
- node.gen.doc_ir = self.finalize(parts)
468
+ node.gen.doc_ir = self.group(self.concat(parts))
440
469
 
441
470
  def exit_func_call(self, node: uni.FuncCall) -> None:
442
471
  """Generate DocIR for function calls."""
@@ -475,8 +504,24 @@ class DocIRGenPass(UniPass):
475
504
  """Generate DocIR for list values."""
476
505
  parts: list[doc.DocType] = []
477
506
  for i in node.kid:
478
- parts.append(i.gen.doc_ir)
479
- node.gen.doc_ir = self.group(self.concat(parts))
507
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
508
+ parts.append(i.gen.doc_ir)
509
+ parts.append(self.space())
510
+ else:
511
+ parts.append(i.gen.doc_ir)
512
+ not_broke = self.concat(parts)
513
+ parts = []
514
+ for i in node.kid:
515
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
516
+ parts.append(i.gen.doc_ir)
517
+ parts.append(self.hard_line())
518
+ elif isinstance(i, uni.Token) and i.name == Tok.LSQUARE:
519
+ parts.append(self.hard_line())
520
+ parts.append(i.gen.doc_ir)
521
+ else:
522
+ parts.append(i.gen.doc_ir)
523
+ broke = self.concat(parts)
524
+ node.gen.doc_ir = self.group(self.if_break(broke, not_broke))
480
525
 
481
526
  def exit_dict_val(self, node: uni.DictVal) -> None:
482
527
  """Generate DocIR for dictionary values."""
@@ -485,10 +530,12 @@ class DocIRGenPass(UniPass):
485
530
  if isinstance(i, uni.Token) and i.name == Tok.LBRACE:
486
531
  parts.append(self.tight_line())
487
532
  parts.append(i.gen.doc_ir)
533
+ elif isinstance(i, uni.Token) and i.name == Tok.RBRACE:
534
+ parts.append(i.gen.doc_ir)
488
535
  else:
489
536
  parts.append(i.gen.doc_ir)
490
537
  parts.append(self.space())
491
- node.gen.doc_ir = self.finalize(parts)
538
+ node.gen.doc_ir = self.group(self.concat(parts))
492
539
 
493
540
  def exit_k_v_pair(self, node: uni.KVPair) -> None:
494
541
  """Generate DocIR for key-value pairs."""
@@ -496,7 +543,8 @@ class DocIRGenPass(UniPass):
496
543
  for i in node.kid:
497
544
  parts.append(i.gen.doc_ir)
498
545
  parts.append(self.space())
499
- node.gen.doc_ir = self.finalize(parts)
546
+ parts.pop()
547
+ node.gen.doc_ir = self.group(self.concat(parts))
500
548
 
501
549
  def exit_has_var(self, node: uni.HasVar) -> None:
502
550
  """Generate DocIR for has variable declarations."""
@@ -511,6 +559,7 @@ class DocIRGenPass(UniPass):
511
559
  parts.append(i.gen.doc_ir)
512
560
  else:
513
561
  parts.append(i.gen.doc_ir)
562
+ # parts.pop()
514
563
  node.gen.doc_ir = self.group(self.concat(parts))
515
564
 
516
565
  def exit_arch_has(self, node: uni.ArchHas) -> None:
@@ -523,11 +572,14 @@ class DocIRGenPass(UniPass):
523
572
  elif isinstance(i, uni.Token) and i.name == Tok.SEMI:
524
573
  parts.pop()
525
574
  parts.append(i.gen.doc_ir)
526
- parts.append(self.space())
575
+ elif isinstance(i, uni.Token) and i.name == Tok.COMMA:
576
+ parts.pop()
577
+ parts.append(i.gen.doc_ir)
578
+ parts.append(self.indent(self.hard_line()))
527
579
  else:
528
580
  parts.append(i.gen.doc_ir)
529
581
  parts.append(self.space())
530
- node.gen.doc_ir = self.finalize(parts)
582
+ node.gen.doc_ir = self.group(self.concat(parts))
531
583
 
532
584
  def exit_while_stmt(self, node: uni.WhileStmt) -> None:
533
585
  """Generate DocIR for while statements."""
@@ -535,7 +587,7 @@ class DocIRGenPass(UniPass):
535
587
  for i in node.kid:
536
588
  parts.append(i.gen.doc_ir)
537
589
  parts.append(self.space())
538
- node.gen.doc_ir = self.finalize(parts)
590
+ node.gen.doc_ir = self.group(self.concat(parts))
539
591
 
540
592
  def exit_in_for_stmt(self, node: uni.InForStmt) -> None:
541
593
  """Generate DocIR for for-in statements."""
@@ -543,7 +595,7 @@ class DocIRGenPass(UniPass):
543
595
  for i in node.kid:
544
596
  parts.append(i.gen.doc_ir)
545
597
  parts.append(self.space())
546
- node.gen.doc_ir = self.finalize(parts)
598
+ node.gen.doc_ir = self.group(self.concat(parts))
547
599
 
548
600
  def exit_iter_for_stmt(self, node: uni.IterForStmt) -> None:
549
601
  """Generate DocIR for iterative for statements."""
@@ -551,7 +603,7 @@ class DocIRGenPass(UniPass):
551
603
  for i in node.kid:
552
604
  parts.append(i.gen.doc_ir)
553
605
  parts.append(self.space())
554
- node.gen.doc_ir = self.finalize(parts)
606
+ node.gen.doc_ir = self.group(self.concat(parts))
555
607
 
556
608
  def exit_try_stmt(self, node: uni.TryStmt) -> None:
557
609
  """Generate DocIR for try statements."""
@@ -559,7 +611,7 @@ class DocIRGenPass(UniPass):
559
611
  for i in node.kid:
560
612
  parts.append(i.gen.doc_ir)
561
613
  parts.append(self.space())
562
- node.gen.doc_ir = self.finalize(parts)
614
+ node.gen.doc_ir = self.group(self.concat(parts))
563
615
 
564
616
  def exit_except(self, node: uni.Except) -> None:
565
617
  """Generate DocIR for except clauses."""
@@ -567,7 +619,7 @@ class DocIRGenPass(UniPass):
567
619
  for i in node.kid:
568
620
  parts.append(i.gen.doc_ir)
569
621
  parts.append(self.space())
570
- node.gen.doc_ir = self.finalize(parts)
622
+ node.gen.doc_ir = self.group(self.concat(parts))
571
623
 
572
624
  def exit_finally_stmt(self, node: uni.FinallyStmt) -> None:
573
625
  """Generate DocIR for finally statements."""
@@ -575,14 +627,30 @@ class DocIRGenPass(UniPass):
575
627
  for i in node.kid:
576
628
  parts.append(i.gen.doc_ir)
577
629
  parts.append(self.space())
578
- node.gen.doc_ir = self.finalize(parts)
630
+ node.gen.doc_ir = self.group(self.concat(parts))
579
631
 
580
632
  def exit_tuple_val(self, node: uni.TupleVal) -> None:
581
633
  """Generate DocIR for tuple values."""
582
634
  parts: list[doc.DocType] = []
583
635
  for i in node.kid:
584
- parts.append(i.gen.doc_ir)
585
- node.gen.doc_ir = self.group(self.concat(parts))
636
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
637
+ parts.append(i.gen.doc_ir)
638
+ parts.append(self.space())
639
+ else:
640
+ parts.append(i.gen.doc_ir)
641
+ not_broke = self.concat(parts)
642
+ parts = []
643
+ for i in node.kid:
644
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
645
+ parts.append(i.gen.doc_ir)
646
+ parts.append(self.hard_line())
647
+ elif isinstance(i, uni.Token) and i.name == Tok.LPAREN:
648
+ parts.append(self.hard_line())
649
+ parts.append(i.gen.doc_ir)
650
+ else:
651
+ parts.append(i.gen.doc_ir)
652
+ broke = self.concat(parts)
653
+ node.gen.doc_ir = self.group(self.if_break(broke, not_broke))
586
654
 
587
655
  def exit_multi_string(self, node: uni.MultiString) -> None:
588
656
  """Generate DocIR for multiline strings."""
@@ -605,7 +673,8 @@ class DocIRGenPass(UniPass):
605
673
  for i in node.kid:
606
674
  parts.append(i.gen.doc_ir)
607
675
  parts.append(self.space())
608
- node.gen.doc_ir = self.finalize(parts)
676
+ parts.pop()
677
+ node.gen.doc_ir = self.group(self.concat(parts))
609
678
 
610
679
  def exit_list_compr(self, node: uni.ListCompr) -> None:
611
680
  """Generate DocIR for list comprehensions."""
@@ -613,7 +682,8 @@ class DocIRGenPass(UniPass):
613
682
  for i in node.kid:
614
683
  parts.append(i.gen.doc_ir)
615
684
  parts.append(self.space())
616
- node.gen.doc_ir = self.finalize(parts)
685
+ parts.pop()
686
+ node.gen.doc_ir = self.group(self.concat(parts))
617
687
 
618
688
  def exit_inner_compr(self, node: uni.InnerCompr) -> None:
619
689
  """Generate DocIR for inner comprehension clauses."""
@@ -621,7 +691,8 @@ class DocIRGenPass(UniPass):
621
691
  for i in node.kid:
622
692
  parts.append(i.gen.doc_ir)
623
693
  parts.append(self.space())
624
- node.gen.doc_ir = self.finalize(parts)
694
+ parts.pop()
695
+ node.gen.doc_ir = self.group(self.concat(parts))
625
696
 
626
697
  def exit_f_string(self, node: uni.FString) -> None:
627
698
  """Generate DocIR for formatted strings."""
@@ -644,7 +715,7 @@ class DocIRGenPass(UniPass):
644
715
  else:
645
716
  parts.append(i.gen.doc_ir)
646
717
  parts.append(self.space())
647
-
718
+ parts.pop()
648
719
  node.gen.doc_ir = self.group(self.concat(parts))
649
720
 
650
721
  def exit_bool_expr(self, node: uni.BoolExpr) -> None:
@@ -657,7 +728,6 @@ class DocIRGenPass(UniPass):
657
728
  else:
658
729
  parts.append(i.gen.doc_ir)
659
730
  parts.append(self.line()) # Potential break
660
-
661
731
  node.gen.doc_ir = self.group(self.concat(parts))
662
732
 
663
733
  def exit_unary_expr(self, node: uni.UnaryExpr) -> None:
@@ -723,7 +793,8 @@ class DocIRGenPass(UniPass):
723
793
  for i in node.kid:
724
794
  parts.append(i.gen.doc_ir)
725
795
  parts.append(self.space())
726
- node.gen.doc_ir = self.finalize(parts)
796
+ parts.pop()
797
+ node.gen.doc_ir = self.group(self.concat(parts))
727
798
 
728
799
  def exit_gen_compr(self, node: uni.GenCompr) -> None:
729
800
  """Generate DocIR for generator comprehensions."""
@@ -731,7 +802,8 @@ class DocIRGenPass(UniPass):
731
802
  for i in node.kid:
732
803
  parts.append(i.gen.doc_ir)
733
804
  parts.append(self.space())
734
- node.gen.doc_ir = self.finalize(parts)
805
+ parts.pop()
806
+ node.gen.doc_ir = self.group(self.concat(parts))
735
807
 
736
808
  def exit_set_compr(self, node: uni.SetCompr) -> None:
737
809
  """Generate DocIR for set comprehensions."""
@@ -739,7 +811,8 @@ class DocIRGenPass(UniPass):
739
811
  for i in node.kid:
740
812
  parts.append(i.gen.doc_ir)
741
813
  parts.append(self.space())
742
- node.gen.doc_ir = self.finalize(parts)
814
+ parts.pop()
815
+ node.gen.doc_ir = self.group(self.concat(parts))
743
816
 
744
817
  def exit_dict_compr(self, node: uni.DictCompr) -> None:
745
818
  """Generate DocIR for dictionary comprehensions."""
@@ -750,14 +823,15 @@ class DocIRGenPass(UniPass):
750
823
  else:
751
824
  parts.append(i.gen.doc_ir)
752
825
  parts.append(self.space())
753
- node.gen.doc_ir = self.finalize(parts)
826
+ parts.pop()
827
+ node.gen.doc_ir = self.group(self.concat(parts))
754
828
 
755
829
  def exit_k_w_pair(self, node: uni.KWPair) -> None:
756
830
  """Generate DocIR for keyword arguments."""
757
831
  parts: list[doc.DocType] = []
758
832
  for i in node.kid:
759
833
  parts.append(i.gen.doc_ir)
760
- node.gen.doc_ir = self.finalize(parts)
834
+ node.gen.doc_ir = self.group(self.concat(parts))
761
835
 
762
836
  def exit_await_expr(self, node: uni.AwaitExpr) -> None:
763
837
  """Generate DocIR for await expressions."""
@@ -765,7 +839,8 @@ class DocIRGenPass(UniPass):
765
839
  for i in node.kid:
766
840
  parts.append(i.gen.doc_ir)
767
841
  parts.append(self.space())
768
- node.gen.doc_ir = self.finalize(parts)
842
+ parts.pop()
843
+ node.gen.doc_ir = self.group(self.concat(parts))
769
844
 
770
845
  def exit_yield_expr(self, node: uni.YieldExpr) -> None:
771
846
  """Generate DocIR for yield expressions."""
@@ -773,7 +848,8 @@ class DocIRGenPass(UniPass):
773
848
  for i in node.kid:
774
849
  parts.append(i.gen.doc_ir)
775
850
  parts.append(self.space())
776
- node.gen.doc_ir = self.finalize(parts)
851
+ parts.pop()
852
+ node.gen.doc_ir = self.group(self.concat(parts))
777
853
 
778
854
  def exit_ctrl_stmt(self, node: uni.CtrlStmt) -> None:
779
855
  """Generate DocIR for control statements (break, continue, skip)."""
@@ -788,7 +864,8 @@ class DocIRGenPass(UniPass):
788
864
  for i in node.kid:
789
865
  parts.append(i.gen.doc_ir)
790
866
  parts.append(self.space())
791
- node.gen.doc_ir = self.finalize(parts)
867
+ parts.pop()
868
+ node.gen.doc_ir = self.group(self.concat(parts))
792
869
 
793
870
  def exit_disengage_stmt(self, node: uni.DisengageStmt) -> None:
794
871
  """Generate DocIR for disengage statements."""
@@ -803,7 +880,8 @@ class DocIRGenPass(UniPass):
803
880
  for i in node.kid:
804
881
  parts.append(i.gen.doc_ir)
805
882
  parts.append(self.space())
806
- node.gen.doc_ir = self.finalize(parts)
883
+ parts.pop()
884
+ node.gen.doc_ir = self.group(self.concat(parts))
807
885
 
808
886
  def exit_assert_stmt(self, node: uni.AssertStmt) -> None:
809
887
  """Generate DocIR for assert statements."""
@@ -811,7 +889,8 @@ class DocIRGenPass(UniPass):
811
889
  for i in node.kid:
812
890
  parts.append(i.gen.doc_ir)
813
891
  parts.append(self.space())
814
- node.gen.doc_ir = self.finalize(parts)
892
+ parts.pop()
893
+ node.gen.doc_ir = self.group(self.concat(parts))
815
894
 
816
895
  def exit_raise_stmt(self, node: uni.RaiseStmt) -> None:
817
896
  """Generate DocIR for raise statements."""
@@ -819,7 +898,8 @@ class DocIRGenPass(UniPass):
819
898
  for i in node.kid:
820
899
  parts.append(i.gen.doc_ir)
821
900
  parts.append(self.space())
822
- node.gen.doc_ir = self.finalize(parts)
901
+ parts.pop()
902
+ node.gen.doc_ir = self.group(self.concat(parts))
823
903
 
824
904
  def exit_global_vars(self, node: uni.GlobalVars) -> None:
825
905
  """Generate DocIR for global variables."""
@@ -835,7 +915,7 @@ class DocIRGenPass(UniPass):
835
915
  else:
836
916
  parts.append(i.gen.doc_ir)
837
917
  parts.append(self.space())
838
- node.gen.doc_ir = self.finalize(parts)
918
+ node.gen.doc_ir = self.group(self.concat(parts))
839
919
 
840
920
  def exit_module_code(self, node: uni.ModuleCode) -> None:
841
921
  """Generate DocIR for module code."""
@@ -870,7 +950,7 @@ class DocIRGenPass(UniPass):
870
950
  else:
871
951
  parts.append(i.gen.doc_ir)
872
952
  parts.append(self.space())
873
- node.gen.doc_ir = self.finalize(parts)
953
+ node.gen.doc_ir = self.group(self.concat(parts))
874
954
 
875
955
  def exit_global_stmt(self, node: uni.GlobalStmt) -> None:
876
956
  """Generate DocIR for global statements."""
@@ -885,7 +965,7 @@ class DocIRGenPass(UniPass):
885
965
  for i in node.kid:
886
966
  parts.append(i.gen.doc_ir)
887
967
  parts.append(self.space())
888
- node.gen.doc_ir = self.finalize(parts)
968
+ node.gen.doc_ir = self.group(self.concat(parts))
889
969
 
890
970
  def exit_visit_stmt(self, node: uni.VisitStmt) -> None:
891
971
  """Generate DocIR for visit statements."""
@@ -898,7 +978,7 @@ class DocIRGenPass(UniPass):
898
978
  else:
899
979
  parts.append(i.gen.doc_ir)
900
980
  parts.append(self.space())
901
- node.gen.doc_ir = self.finalize(parts)
981
+ node.gen.doc_ir = self.group(self.concat(parts))
902
982
 
903
983
  def exit_ignore_stmt(self, node: uni.IgnoreStmt) -> None:
904
984
  """Generate DocIR for ignore statements."""
@@ -906,7 +986,7 @@ class DocIRGenPass(UniPass):
906
986
  for i in node.kid:
907
987
  parts.append(i.gen.doc_ir)
908
988
  parts.append(self.space())
909
- node.gen.doc_ir = self.finalize(parts)
989
+ node.gen.doc_ir = self.group(self.concat(parts))
910
990
 
911
991
  def exit_connect_op(self, node: uni.ConnectOp) -> None:
912
992
  """Generate DocIR for connect operator."""
@@ -914,7 +994,8 @@ class DocIRGenPass(UniPass):
914
994
  for i in node.kid:
915
995
  parts.append(i.gen.doc_ir)
916
996
  parts.append(self.space())
917
- node.gen.doc_ir = self.finalize(parts)
997
+ parts.pop()
998
+ node.gen.doc_ir = self.group(self.concat(parts))
918
999
 
919
1000
  def exit_disconnect_op(self, node: uni.DisconnectOp) -> None:
920
1001
  """Generate DocIR for disconnect operator."""
@@ -922,7 +1003,8 @@ class DocIRGenPass(UniPass):
922
1003
  for i in node.kid:
923
1004
  parts.append(i.gen.doc_ir)
924
1005
  parts.append(self.space())
925
- node.gen.doc_ir = self.finalize(parts)
1006
+ parts.pop()
1007
+ node.gen.doc_ir = self.group(self.concat(parts))
926
1008
 
927
1009
  def exit_compare_expr(self, node: uni.CompareExpr) -> None:
928
1010
  """Generate DocIR for comparison expressions."""
@@ -930,7 +1012,8 @@ class DocIRGenPass(UniPass):
930
1012
  for i in node.kid:
931
1013
  parts.append(i.gen.doc_ir)
932
1014
  parts.append(self.space())
933
- node.gen.doc_ir = self.finalize(parts)
1015
+ parts.pop()
1016
+ node.gen.doc_ir = self.group(self.concat(parts))
934
1017
 
935
1018
  def exit_atom_unit(self, node: uni.AtomUnit) -> None:
936
1019
  """Generate DocIR for atom units (parenthesized expressions)."""
@@ -952,7 +1035,8 @@ class DocIRGenPass(UniPass):
952
1035
  parts.append(i.gen.doc_ir)
953
1036
  parts.append(self.space())
954
1037
  prev_item = i
955
- node.gen.doc_ir = self.finalize(parts)
1038
+ parts.pop()
1039
+ node.gen.doc_ir = self.group(self.concat(parts))
956
1040
 
957
1041
  def exit_expr_as_item(self, node: uni.ExprAsItem) -> None:
958
1042
  """Generate DocIR for expression as item nodes."""
@@ -960,7 +1044,7 @@ class DocIRGenPass(UniPass):
960
1044
  for i in node.kid:
961
1045
  parts.append(i.gen.doc_ir)
962
1046
  parts.append(self.space())
963
- node.gen.doc_ir = self.finalize(parts)
1047
+ node.gen.doc_ir = self.group(self.concat(parts))
964
1048
 
965
1049
  def exit_filter_compr(self, node: uni.FilterCompr) -> None:
966
1050
  """Generate DocIR for filter comprehensions."""
@@ -968,7 +1052,7 @@ class DocIRGenPass(UniPass):
968
1052
  for i in node.kid:
969
1053
  parts.append(i.gen.doc_ir)
970
1054
  parts.append(self.space())
971
- node.gen.doc_ir = self.finalize(parts)
1055
+ node.gen.doc_ir = self.group(self.concat(parts))
972
1056
 
973
1057
  def exit_assign_compr(self, node: uni.AssignCompr) -> None:
974
1058
  """Generate DocIR for assignment comprehensions."""
@@ -976,7 +1060,7 @@ class DocIRGenPass(UniPass):
976
1060
  for i in node.kid:
977
1061
  parts.append(i.gen.doc_ir)
978
1062
  parts.append(self.space())
979
- node.gen.doc_ir = self.finalize(parts)
1063
+ node.gen.doc_ir = self.group(self.concat(parts))
980
1064
 
981
1065
  def exit_py_inline_code(self, node: uni.PyInlineCode) -> None:
982
1066
  """Generate DocIR for Python inline code blocks."""
@@ -993,7 +1077,7 @@ class DocIRGenPass(UniPass):
993
1077
  else:
994
1078
  parts.append(i.gen.doc_ir)
995
1079
  parts.append(self.space())
996
- node.gen.doc_ir = self.finalize(parts)
1080
+ node.gen.doc_ir = self.group(self.concat(parts))
997
1081
 
998
1082
  def exit_test(self, node: uni.Test) -> None:
999
1083
  """Generate DocIR for test nodes."""
@@ -1009,7 +1093,7 @@ class DocIRGenPass(UniPass):
1009
1093
  else:
1010
1094
  parts.append(i.gen.doc_ir)
1011
1095
  parts.append(self.space())
1012
- node.gen.doc_ir = self.finalize(parts)
1096
+ node.gen.doc_ir = self.group(self.concat(parts))
1013
1097
 
1014
1098
  def exit_check_stmt(self, node: uni.CheckStmt) -> None:
1015
1099
  """Generate DocIR for check statements."""
@@ -1019,7 +1103,7 @@ class DocIRGenPass(UniPass):
1019
1103
  parts.pop()
1020
1104
  parts.append(i.gen.doc_ir)
1021
1105
  parts.append(self.space())
1022
- node.gen.doc_ir = self.finalize(parts)
1106
+ node.gen.doc_ir = self.group(self.concat(parts))
1023
1107
 
1024
1108
  def exit_match_stmt(self, node: uni.MatchStmt) -> None:
1025
1109
  """Generate DocIR for match statements."""
@@ -1039,7 +1123,7 @@ class DocIRGenPass(UniPass):
1039
1123
  else:
1040
1124
  parts.append(i.gen.doc_ir)
1041
1125
  parts.append(self.space())
1042
- node.gen.doc_ir = self.finalize(parts)
1126
+ node.gen.doc_ir = self.group(self.concat(parts))
1043
1127
 
1044
1128
  def exit_match_case(self, node: uni.MatchCase) -> None:
1045
1129
  """Generate DocIR for match cases."""
@@ -1056,7 +1140,7 @@ class DocIRGenPass(UniPass):
1056
1140
  parts.append(i.gen.doc_ir)
1057
1141
  parts.append(self.space())
1058
1142
  parts.append(self.indent(self.concat([self.hard_line()] + indent_parts)))
1059
- node.gen.doc_ir = self.finalize(parts)
1143
+ node.gen.doc_ir = self.group(self.concat(parts))
1060
1144
 
1061
1145
  def exit_match_value(self, node: uni.MatchValue) -> None:
1062
1146
  """Generate DocIR for match value patterns."""
@@ -1064,7 +1148,8 @@ class DocIRGenPass(UniPass):
1064
1148
  for i in node.kid:
1065
1149
  parts.append(i.gen.doc_ir)
1066
1150
  parts.append(self.space())
1067
- node.gen.doc_ir = self.finalize(parts)
1151
+ parts.pop()
1152
+ node.gen.doc_ir = self.group(self.concat(parts))
1068
1153
 
1069
1154
  def exit_match_singleton(self, node: uni.MatchSingleton) -> None:
1070
1155
  """Generate DocIR for match singleton patterns."""
@@ -1072,7 +1157,8 @@ class DocIRGenPass(UniPass):
1072
1157
  for i in node.kid:
1073
1158
  parts.append(i.gen.doc_ir)
1074
1159
  parts.append(self.space())
1075
- node.gen.doc_ir = self.finalize(parts)
1160
+ parts.pop()
1161
+ node.gen.doc_ir = self.group(self.concat(parts))
1076
1162
 
1077
1163
  def exit_match_sequence(self, node: uni.MatchSequence) -> None:
1078
1164
  """Generate DocIR for match sequence patterns."""
@@ -1080,7 +1166,8 @@ class DocIRGenPass(UniPass):
1080
1166
  for i in node.kid:
1081
1167
  parts.append(i.gen.doc_ir)
1082
1168
  parts.append(self.space())
1083
- node.gen.doc_ir = self.finalize(parts)
1169
+ parts.pop()
1170
+ node.gen.doc_ir = self.group(self.concat(parts))
1084
1171
 
1085
1172
  def exit_match_mapping(self, node: uni.MatchMapping) -> None:
1086
1173
  """Generate DocIR for match mapping patterns."""
@@ -1088,7 +1175,8 @@ class DocIRGenPass(UniPass):
1088
1175
  for i in node.kid:
1089
1176
  parts.append(i.gen.doc_ir)
1090
1177
  parts.append(self.space())
1091
- node.gen.doc_ir = self.finalize(parts)
1178
+ parts.pop()
1179
+ node.gen.doc_ir = self.group(self.concat(parts))
1092
1180
 
1093
1181
  def exit_match_or(self, node: uni.MatchOr) -> None:
1094
1182
  """Generate DocIR for match OR patterns."""
@@ -1096,7 +1184,8 @@ class DocIRGenPass(UniPass):
1096
1184
  for i in node.kid:
1097
1185
  parts.append(i.gen.doc_ir)
1098
1186
  parts.append(self.space())
1099
- node.gen.doc_ir = self.finalize(parts)
1187
+ parts.pop()
1188
+ node.gen.doc_ir = self.group(self.concat(parts))
1100
1189
 
1101
1190
  def exit_match_as(self, node: uni.MatchAs) -> None:
1102
1191
  """Generate DocIR for match AS patterns."""
@@ -1104,7 +1193,8 @@ class DocIRGenPass(UniPass):
1104
1193
  for i in node.kid:
1105
1194
  parts.append(i.gen.doc_ir)
1106
1195
  parts.append(self.space())
1107
- node.gen.doc_ir = self.finalize(parts)
1196
+ parts.pop()
1197
+ node.gen.doc_ir = self.group(self.concat(parts))
1108
1198
 
1109
1199
  def exit_match_wild(self, node: uni.MatchWild) -> None:
1110
1200
  """Generate DocIR for match wildcard patterns."""
@@ -1112,7 +1202,8 @@ class DocIRGenPass(UniPass):
1112
1202
  for i in node.kid:
1113
1203
  parts.append(i.gen.doc_ir)
1114
1204
  parts.append(self.space())
1115
- node.gen.doc_ir = self.finalize(parts)
1205
+ parts.pop()
1206
+ node.gen.doc_ir = self.group(self.concat(parts))
1116
1207
 
1117
1208
  def exit_match_star(self, node: uni.MatchStar) -> None:
1118
1209
  """Generate DocIR for match star patterns (e.g., *args, **kwargs)."""
@@ -1120,7 +1211,8 @@ class DocIRGenPass(UniPass):
1120
1211
  for i in node.kid:
1121
1212
  parts.append(i.gen.doc_ir)
1122
1213
  parts.append(self.space())
1123
- node.gen.doc_ir = self.finalize(parts)
1214
+ parts.pop()
1215
+ node.gen.doc_ir = self.group(self.concat(parts))
1124
1216
 
1125
1217
  def exit_match_k_v_pair(self, node: uni.MatchKVPair) -> None:
1126
1218
  """Generate DocIR for match key-value pairs."""
@@ -1131,7 +1223,8 @@ class DocIRGenPass(UniPass):
1131
1223
  else:
1132
1224
  parts.append(i.gen.doc_ir)
1133
1225
  parts.append(self.space())
1134
- node.gen.doc_ir = self.finalize(parts)
1226
+ parts.pop()
1227
+ node.gen.doc_ir = self.group(self.concat(parts))
1135
1228
 
1136
1229
  def exit_match_arch(self, node: uni.MatchArch) -> None:
1137
1230
  """Generate DocIR for match architecture patterns."""
@@ -1142,7 +1235,7 @@ class DocIRGenPass(UniPass):
1142
1235
  parts.append(self.space())
1143
1236
  else:
1144
1237
  parts.append(i.gen.doc_ir)
1145
- node.gen.doc_ir = self.finalize(parts)
1238
+ node.gen.doc_ir = self.group(self.concat(parts))
1146
1239
 
1147
1240
  def exit_enum(self, node: uni.Enum) -> None:
1148
1241
  """Generate DocIR for enum declarations."""
@@ -1160,30 +1253,89 @@ class DocIRGenPass(UniPass):
1160
1253
  else:
1161
1254
  parts.append(i.gen.doc_ir)
1162
1255
  parts.append(self.space())
1163
- node.gen.doc_ir = self.finalize(parts)
1256
+ node.gen.doc_ir = self.group(self.concat(parts))
1164
1257
 
1165
1258
  def exit_sub_tag(self, node: uni.SubTag) -> None:
1166
1259
  """Generate DocIR for sub-tag nodes."""
1167
- parts: list[doc.DocType] = []
1260
+ before_colon: list[doc.DocType] = []
1261
+ after_colon: list[doc.DocType] = []
1262
+ seen_colon = False
1263
+
1168
1264
  for i in node.kid:
1169
- parts.append(i.gen.doc_ir)
1170
- parts.append(self.space())
1171
- node.gen.doc_ir = self.finalize(parts)
1265
+ if isinstance(i, uni.Token) and i.name == Tok.COLON and not seen_colon:
1266
+ colon_tok = i.gen.doc_ir
1267
+ seen_colon = True
1268
+ elif seen_colon:
1269
+ after_colon.append(i.gen.doc_ir)
1270
+ else:
1271
+ before_colon.append(i.gen.doc_ir)
1272
+
1273
+ if seen_colon:
1274
+ flat = self.concat([*before_colon, colon_tok, self.space(), *after_colon])
1275
+ broke = self.concat(
1276
+ [
1277
+ *before_colon,
1278
+ colon_tok,
1279
+ self.indent(self.concat([self.line(), *after_colon])),
1280
+ ]
1281
+ )
1282
+ node.gen.doc_ir = self.group(self.if_break(broke, flat))
1283
+ else:
1284
+ node.gen.doc_ir = self.concat(before_colon + after_colon)
1172
1285
 
1173
1286
  def exit_impl_def(self, node: uni.ImplDef) -> None:
1174
1287
  """Generate DocIR for implementation definitions."""
1175
1288
  parts: list[doc.DocType] = []
1289
+ body_parts: list[doc.DocType] = []
1290
+ in_body = False
1176
1291
  for i in node.kid:
1177
1292
  if i == node.doc or (node.decorators and i in node.decorators):
1178
1293
  parts.append(i.gen.doc_ir)
1179
1294
  parts.append(self.hard_line())
1180
- elif i == node.target:
1295
+ elif i in node.target:
1296
+ parts.append(i.gen.doc_ir)
1297
+ elif (
1298
+ in_body
1299
+ or isinstance(node.body, Sequence)
1300
+ and node.body
1301
+ and i == node.body[0]
1302
+ ):
1303
+ if not in_body:
1304
+ parts.pop()
1305
+ body_parts.append(self.hard_line())
1306
+ if isinstance(i, uni.Token) and i.name == Tok.COMMA:
1307
+ body_parts.pop()
1308
+ body_parts.append(i.gen.doc_ir)
1309
+ body_parts.append(self.hard_line())
1310
+ in_body = True
1311
+ if in_body and isinstance(node.body, Sequence) and i == node.body[-1]:
1312
+ in_body = False
1313
+ body_parts.pop()
1314
+ parts.append(self.indent(self.concat(body_parts)))
1315
+ parts.append(self.hard_line())
1316
+ elif isinstance(i, uni.Token) and i.name == Tok.SEMI:
1317
+ parts.pop()
1318
+ parts.append(i.gen.doc_ir)
1319
+ parts.append(self.space())
1320
+ else:
1321
+ parts.append(i.gen.doc_ir)
1322
+ parts.append(self.space())
1323
+ node.gen.doc_ir = self.group(self.concat(parts))
1324
+
1325
+ def exit_sem_def(self, node: uni.SemDef) -> None:
1326
+ """Generate DocIR for semantic definitions."""
1327
+ parts: list[doc.DocType] = []
1328
+ for i in node.kid:
1329
+ if i in node.target:
1330
+ parts.append(i.gen.doc_ir)
1331
+ elif isinstance(i, uni.Token) and i.name == Tok.SEMI:
1332
+ parts.pop()
1181
1333
  parts.append(i.gen.doc_ir)
1182
1334
  parts.append(self.space())
1183
1335
  else:
1184
1336
  parts.append(i.gen.doc_ir)
1185
1337
  parts.append(self.space())
1186
- node.gen.doc_ir = self.finalize(parts)
1338
+ node.gen.doc_ir = self.group(self.concat(parts))
1187
1339
 
1188
1340
  def exit_event_signature(self, node: uni.EventSignature) -> None:
1189
1341
  """Generate DocIR for event signatures."""
@@ -1191,7 +1343,8 @@ class DocIRGenPass(UniPass):
1191
1343
  for i in node.kid:
1192
1344
  parts.append(i.gen.doc_ir)
1193
1345
  parts.append(self.space())
1194
- node.gen.doc_ir = self.finalize(parts)
1346
+ parts.pop()
1347
+ node.gen.doc_ir = self.group(self.concat(parts))
1195
1348
 
1196
1349
  def exit_typed_ctx_block(self, node: uni.TypedCtxBlock) -> None:
1197
1350
  """Generate DocIR for typed context blocks."""