@logixjs/core 0.0.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/dist/{Action-mqVvtEHt.d.ts → Action-DYl88bwj.d.ts} +1 -1
- package/dist/{Action-BkRHy2vg.d.cts → Action-DkxsI_DK.d.cts} +1 -1
- package/dist/Action.cjs.map +1 -1
- package/dist/Action.d.cts +1 -1
- package/dist/Action.d.ts +1 -1
- package/dist/Action.js +2 -2
- package/dist/{Actions-AsQ07yTP.d.cts → Actions-Dicm7jdc.d.cts} +2 -2
- package/dist/{Actions-AsQ07yTP.d.ts → Actions-Dicm7jdc.d.ts} +2 -2
- package/dist/Actions.cjs.map +1 -1
- package/dist/Actions.d.cts +1 -1
- package/dist/Actions.d.ts +1 -1
- package/dist/Actions.js +1 -1
- package/dist/{Bound-BN1DQ_lM.d.ts → Bound-1OJLzVIS.d.ts} +2 -2
- package/dist/{Bound-BPIfH9SS.d.cts → Bound-BMLrtQ1V.d.cts} +2 -2
- package/dist/Bound.cjs +1737 -399
- package/dist/Bound.cjs.map +1 -1
- package/dist/Bound.d.cts +5 -5
- package/dist/Bound.d.ts +5 -5
- package/dist/Bound.js +19 -17
- package/dist/{Debug-B5q5Bkzx.d.ts → Debug-DKrWP5H1.d.ts} +40 -22
- package/dist/{Debug-Bq8Sqjcr.d.cts → Debug-hIT44XsY.d.cts} +40 -22
- package/dist/Debug.cjs +1348 -318
- package/dist/Debug.cjs.map +1 -1
- package/dist/Debug.d.cts +12 -11
- package/dist/Debug.d.ts +12 -11
- package/dist/Debug.js +20 -11
- package/dist/EffectOp.cjs +15 -6
- package/dist/EffectOp.cjs.map +1 -1
- package/dist/EffectOp.js +3 -3
- package/dist/Env.cjs +772 -6
- package/dist/Env.cjs.map +1 -1
- package/dist/Env.js +5 -2
- package/dist/ExternalStore-DqJKKRJ4.d.ts +61 -0
- package/dist/ExternalStore-JC-gAgEI.d.cts +61 -0
- package/dist/ExternalStore.cjs +774 -0
- package/dist/ExternalStore.cjs.map +1 -0
- package/dist/ExternalStore.d.cts +8 -0
- package/dist/ExternalStore.d.ts +8 -0
- package/dist/ExternalStore.js +19 -0
- package/dist/ExternalStore.js.map +1 -0
- package/dist/{Flow-BhpjE22E.d.ts → Flow-CZmXRDqp.d.cts} +13 -4
- package/dist/{Flow-1fZT8MpX.d.cts → Flow-DIVDxz7R.d.ts} +13 -4
- package/dist/Flow.cjs +765 -148
- package/dist/Flow.cjs.map +1 -1
- package/dist/Flow.d.cts +6 -6
- package/dist/Flow.d.ts +6 -6
- package/dist/Flow.js +9 -8
- package/dist/{Handle-D_cLW1Z3.d.ts → Handle-Bo6cAFut.d.ts} +1 -1
- package/dist/{Handle-D8D1zPb_.d.cts → Handle-CfDvSqN7.d.cts} +1 -1
- package/dist/Handle.d.cts +5 -5
- package/dist/Handle.d.ts +5 -5
- package/dist/{Kernel-8kC-jOda.d.cts → Kernel-CuXBF9S_.d.cts} +16 -7
- package/dist/{Kernel-CnGE1Fyk.d.ts → Kernel-D9guNwRL.d.ts} +16 -7
- package/dist/Kernel.cjs +814 -26
- package/dist/Kernel.cjs.map +1 -1
- package/dist/Kernel.d.cts +13 -12
- package/dist/Kernel.d.ts +13 -12
- package/dist/Kernel.js +7 -4
- package/dist/{Link-Db7975nU.d.ts → Link-CUM0yUCH.d.ts} +10 -3
- package/dist/{Link-fX8x1eCK.d.cts → Link-NAfR6uGD.d.cts} +10 -3
- package/dist/Link.cjs +1294 -121
- package/dist/Link.cjs.map +1 -1
- package/dist/Link.d.cts +5 -5
- package/dist/Link.d.ts +5 -5
- package/dist/Link.js +37 -29
- package/dist/{Logic-DRh4sDZj.d.cts → Logic-09VQpIj3.d.cts} +7 -4
- package/dist/{Logic-BRjEMr-W.d.ts → Logic-DKg7ghGy.d.ts} +7 -4
- package/dist/Logic.cjs +2 -1
- package/dist/Logic.cjs.map +1 -1
- package/dist/Logic.d.cts +5 -5
- package/dist/Logic.d.ts +5 -5
- package/dist/Logic.js +1 -1
- package/dist/{MatchBuilder-CJk5oCkR.d.cts → MatchBuilder-CsW5jgrL.d.ts} +1 -1
- package/dist/{MatchBuilder-0QOc-nlU.d.ts → MatchBuilder-Dksk07F4.d.cts} +1 -1
- package/dist/MatchBuilder.cjs +2 -2
- package/dist/MatchBuilder.cjs.map +1 -1
- package/dist/MatchBuilder.d.cts +6 -6
- package/dist/MatchBuilder.d.ts +6 -6
- package/dist/MatchBuilder.js +2 -2
- package/dist/Middleware-D8tUDLv_.d.cts +100 -0
- package/dist/Middleware-DS7CbTTN.d.ts +100 -0
- package/dist/Middleware.cjs +678 -58
- package/dist/Middleware.cjs.map +1 -1
- package/dist/Middleware.d.cts +2 -86
- package/dist/Middleware.d.ts +2 -86
- package/dist/Middleware.js +15 -12
- package/dist/{Module-DnzluX2J.d.ts → Module-B_Cntyms.d.ts} +54 -25
- package/dist/{Module-B_0xRDMR.d.cts → Module-CmNOVXzf.d.cts} +54 -25
- package/dist/Module.cjs +9331 -3317
- package/dist/Module.cjs.map +1 -1
- package/dist/Module.d.cts +7 -6
- package/dist/Module.d.ts +7 -6
- package/dist/Module.js +39 -31
- package/dist/ModuleTag-CGho_InD.d.ts +113 -0
- package/dist/ModuleTag-CITb8L_G.d.cts +113 -0
- package/dist/ModuleTag.cjs +7248 -2847
- package/dist/ModuleTag.cjs.map +1 -1
- package/dist/ModuleTag.d.cts +6 -6
- package/dist/ModuleTag.d.ts +6 -6
- package/dist/ModuleTag.js +35 -29
- package/dist/Observability-Bdhnx2Dv.d.ts +385 -0
- package/dist/Observability-DXGAFBIT.d.cts +385 -0
- package/dist/Observability.cjs +5093 -1556
- package/dist/Observability.cjs.map +1 -1
- package/dist/Observability.d.cts +6 -7
- package/dist/Observability.d.ts +6 -7
- package/dist/Observability.js +28 -23
- package/dist/{Platform-CHX8o-U4.d.ts → Platform-B4s8tg6C.d.cts} +4 -5
- package/dist/{Platform-C49Pv956.d.cts → Platform-BV_0MW7g.d.cts} +5 -2
- package/dist/{Platform-C49Pv956.d.ts → Platform-BV_0MW7g.d.ts} +5 -2
- package/dist/{Platform-CVlv0xLQ.d.cts → Platform-W0Mefy_e.d.ts} +4 -5
- package/dist/Platform.cjs +2 -1
- package/dist/Platform.cjs.map +1 -1
- package/dist/Platform.d.cts +2 -3
- package/dist/Platform.d.ts +2 -3
- package/dist/Platform.js +2 -2
- package/dist/{Process-CM9xbMdP.d.ts → Process-CO8G7HO9.d.cts} +30 -5
- package/dist/{Process-mL8fHDSB.d.cts → Process-Cyf6VNDR.d.ts} +30 -5
- package/dist/Process.cjs +1288 -120
- package/dist/Process.cjs.map +1 -1
- package/dist/Process.d.cts +6 -6
- package/dist/Process.d.ts +6 -6
- package/dist/Process.js +34 -26
- package/dist/ReadQuery-C_or5nLC.d.ts +128 -0
- package/dist/ReadQuery-DXLzCE0E.d.cts +614 -0
- package/dist/ReadQuery-DXLzCE0E.d.ts +614 -0
- package/dist/ReadQuery-Yve1lmUo.d.cts +128 -0
- package/dist/ReadQuery.cjs +290 -5
- package/dist/ReadQuery.cjs.map +1 -1
- package/dist/ReadQuery.d.cts +3 -2
- package/dist/ReadQuery.d.ts +3 -2
- package/dist/ReadQuery.js +23 -5
- package/dist/{Reflection-CQnKwPXj.d.ts → Reflection-B2Xi1e4Q.d.ts} +89 -7
- package/dist/{Reflection-Kabo1mlU.d.cts → Reflection-DNB4V4_e.d.cts} +89 -7
- package/dist/Reflection.cjs +3227 -1617
- package/dist/Reflection.cjs.map +1 -1
- package/dist/Reflection.d.cts +17 -15
- package/dist/Reflection.d.ts +17 -15
- package/dist/Reflection.js +33 -25
- package/dist/{Resource-Dy1xD_DG.d.cts → Resource-pKvQQ4x5.d.cts} +3 -3
- package/dist/{Resource-Dy1xD_DG.d.ts → Resource-pKvQQ4x5.d.ts} +3 -3
- package/dist/Resource.cjs +781 -15
- package/dist/Resource.cjs.map +1 -1
- package/dist/Resource.d.cts +1 -1
- package/dist/Resource.d.ts +1 -1
- package/dist/Resource.js +6 -3
- package/dist/{Root-7ADUMk4t.d.cts → Root-CCVuFHB6.d.cts} +3 -3
- package/dist/{Root-7ADUMk4t.d.ts → Root-CCVuFHB6.d.ts} +3 -3
- package/dist/Root.cjs +786 -20
- package/dist/Root.cjs.map +1 -1
- package/dist/Root.d.cts +2 -2
- package/dist/Root.d.ts +2 -2
- package/dist/Root.js +7 -3
- package/dist/{Runtime-CtyzZG4i.d.ts → Runtime-CRmvwK4I.d.ts} +70 -14
- package/dist/{Runtime-B-aL-f29.d.cts → Runtime-C_wJM9mN.d.cts} +70 -14
- package/dist/Runtime.cjs +4942 -1601
- package/dist/Runtime.cjs.map +1 -1
- package/dist/Runtime.d.cts +17 -15
- package/dist/Runtime.d.ts +17 -15
- package/dist/Runtime.js +44 -32
- package/dist/{ScopeRegistry-D1owDNSm.d.cts → ScopeRegistry-BhYzqWri.d.cts} +6 -6
- package/dist/{ScopeRegistry-D1owDNSm.d.ts → ScopeRegistry-BhYzqWri.d.ts} +6 -6
- package/dist/ScopeRegistry.cjs +776 -10
- package/dist/ScopeRegistry.cjs.map +1 -1
- package/dist/ScopeRegistry.d.cts +1 -1
- package/dist/ScopeRegistry.d.ts +1 -1
- package/dist/ScopeRegistry.js +6 -3
- package/dist/{State-CU50R26M.d.cts → State-rNFsFPTl.d.cts} +2 -2
- package/dist/{State-CU50R26M.d.ts → State-rNFsFPTl.d.ts} +2 -2
- package/dist/State.cjs.map +1 -1
- package/dist/State.d.cts +1 -1
- package/dist/State.d.ts +1 -1
- package/dist/State.js +1 -1
- package/dist/{StateTrait-BGsZghTz.d.ts → StateTrait-CijdwNb6.d.ts} +25 -8
- package/dist/{StateTrait-OWhbj12c.d.cts → StateTrait-Dltto6PU.d.cts} +25 -8
- package/dist/StateTrait.cjs +1890 -528
- package/dist/StateTrait.cjs.map +1 -1
- package/dist/StateTrait.d.cts +9 -7
- package/dist/StateTrait.d.ts +9 -7
- package/dist/StateTrait.js +18 -14
- package/dist/{TraitLifecycle-CwV5WPFX.d.cts → TraitLifecycle-BKzDqzLu.d.cts} +2 -2
- package/dist/{TraitLifecycle-LdIWmKlg.d.ts → TraitLifecycle-Cvo94uDB.d.ts} +2 -2
- package/dist/TraitLifecycle.cjs +630 -67
- package/dist/TraitLifecycle.cjs.map +1 -1
- package/dist/TraitLifecycle.d.cts +6 -6
- package/dist/TraitLifecycle.d.ts +6 -6
- package/dist/TraitLifecycle.js +8 -7
- package/dist/Workflow-C_OWr4dV.d.ts +415 -0
- package/dist/Workflow-DmydkHO8.d.cts +415 -0
- package/dist/Workflow.cjs +3150 -0
- package/dist/Workflow.cjs.map +1 -0
- package/dist/Workflow.d.cts +7 -0
- package/dist/Workflow.d.ts +7 -0
- package/dist/Workflow.js +58 -0
- package/dist/Workflow.js.map +1 -0
- package/dist/{action-DiMDD_0v.d.cts → action-BQxjPFEw.d.cts} +5 -5
- package/dist/{action-DiMDD_0v.d.ts → action-BQxjPFEw.d.ts} +5 -5
- package/dist/chunk-2XRLXDWR.js +276 -0
- package/dist/chunk-2XRLXDWR.js.map +1 -0
- package/dist/chunk-3L6QGFMM.js +701 -0
- package/dist/chunk-3L6QGFMM.js.map +1 -0
- package/dist/{chunk-GMPEOUP2.js → chunk-4MZ7BT3R.js} +2 -2
- package/dist/chunk-4MZ7BT3R.js.map +1 -0
- package/dist/{chunk-3IYZ5IGG.js → chunk-5WKUGEBY.js} +2 -2
- package/dist/{chunk-3RMKLXHX.js → chunk-63ZQ5RIN.js} +2 -2
- package/dist/{chunk-M3WTHJHJ.js → chunk-67DIEA53.js} +385 -148
- package/dist/chunk-67DIEA53.js.map +1 -0
- package/dist/{chunk-YS3AZQ2G.js → chunk-6HFAW2MH.js} +1 -1
- package/dist/chunk-6HFAW2MH.js.map +1 -0
- package/dist/{chunk-EY4NZKDR.js → chunk-6Y2TKCNY.js} +2 -2
- package/dist/{chunk-76WT3HOR.js → chunk-6YZOXFPQ.js} +25 -24
- package/dist/chunk-6YZOXFPQ.js.map +1 -0
- package/dist/{chunk-G5ZBFPNU.js → chunk-A2RQOJC7.js} +2 -2
- package/dist/{chunk-AUIR5O6W.js → chunk-AFSB6NKM.js} +13 -19
- package/dist/chunk-AFSB6NKM.js.map +1 -0
- package/dist/{chunk-JCXGZRMU.js → chunk-AO4JEOKD.js} +22 -23
- package/dist/chunk-AO4JEOKD.js.map +1 -0
- package/dist/{chunk-TAAPQVZN.js → chunk-AYELIQXR.js} +2 -2
- package/dist/{chunk-QMM6O4CD.js → chunk-BLHZW7DG.js} +15 -3
- package/dist/{chunk-QMM6O4CD.js.map → chunk-BLHZW7DG.js.map} +1 -1
- package/dist/{chunk-TQOBJYDP.js → chunk-CD4N74YC.js} +1 -1
- package/dist/chunk-CD4N74YC.js.map +1 -0
- package/dist/{chunk-ANLBCBDC.js → chunk-CGE2HBTH.js} +11 -11
- package/dist/chunk-CGE2HBTH.js.map +1 -0
- package/dist/{chunk-OFADUJWJ.js → chunk-CYYSQMLO.js} +5 -5
- package/dist/chunk-CYYSQMLO.js.map +1 -0
- package/dist/{chunk-66ALHVEX.js → chunk-EB46EYI7.js} +3 -3
- package/dist/{chunk-NZJKFF45.js → chunk-EKCDHWRK.js} +4 -4
- package/dist/chunk-EKCDHWRK.js.map +1 -0
- package/dist/{chunk-BABLDP24.js → chunk-EPQFNJU3.js} +152 -7
- package/dist/chunk-EPQFNJU3.js.map +1 -0
- package/dist/{chunk-OGWBVHB3.js → chunk-ESR6HGOY.js} +73 -14
- package/dist/chunk-ESR6HGOY.js.map +1 -0
- package/dist/{chunk-NBD3KUOZ.js → chunk-F6RP62H3.js} +150 -98
- package/dist/chunk-F6RP62H3.js.map +1 -0
- package/dist/chunk-FBYW3QDI.js +252 -0
- package/dist/chunk-FBYW3QDI.js.map +1 -0
- package/dist/{chunk-IPF7E66P.js → chunk-FYAODKVP.js} +2 -2
- package/dist/chunk-GNEN7NKO.js +908 -0
- package/dist/chunk-GNEN7NKO.js.map +1 -0
- package/dist/chunk-GWSM4KLB.js +763 -0
- package/dist/chunk-GWSM4KLB.js.map +1 -0
- package/dist/{chunk-4SO6JMZL.js → chunk-HDMXCUZL.js} +1 -1
- package/dist/chunk-HDMXCUZL.js.map +1 -0
- package/dist/{chunk-ZFY7U2FR.js → chunk-HJM5Y5NU.js} +43 -3
- package/dist/chunk-HJM5Y5NU.js.map +1 -0
- package/dist/{chunk-ZGDVUPTM.js → chunk-IOZ3VKPK.js} +129 -68
- package/dist/chunk-IOZ3VKPK.js.map +1 -0
- package/dist/{chunk-PYOE4VSI.js → chunk-IVXSVHO4.js} +303 -247
- package/dist/chunk-IVXSVHO4.js.map +1 -0
- package/dist/chunk-J3CWXIPV.js +242 -0
- package/dist/chunk-J3CWXIPV.js.map +1 -0
- package/dist/chunk-K6JQW266.js +42 -0
- package/dist/chunk-K6JQW266.js.map +1 -0
- package/dist/chunk-KMZYQF6Q.js +202 -0
- package/dist/chunk-KMZYQF6Q.js.map +1 -0
- package/dist/{chunk-JWOYLO27.js → chunk-LPPZDFTD.js} +22 -12
- package/dist/chunk-LPPZDFTD.js.map +1 -0
- package/dist/{chunk-PAYXCY6A.js → chunk-MYB2B5WX.js} +997 -576
- package/dist/chunk-MYB2B5WX.js.map +1 -0
- package/dist/chunk-MYKNINNN.js +228 -0
- package/dist/chunk-MYKNINNN.js.map +1 -0
- package/dist/chunk-NSQIRMVF.js +27 -0
- package/dist/{chunk-QCHIQWAJ.js.map → chunk-NSQIRMVF.js.map} +1 -1
- package/dist/chunk-NUDBM4MM.js +30 -0
- package/dist/chunk-NUDBM4MM.js.map +1 -0
- package/dist/chunk-NZMWWDAY.js +23 -0
- package/dist/chunk-NZMWWDAY.js.map +1 -0
- package/dist/{chunk-RNFE3ML2.js → chunk-OCUV2Y25.js} +4 -3
- package/dist/chunk-OCUV2Y25.js.map +1 -0
- package/dist/chunk-P4ZJOQA7.js +271 -0
- package/dist/chunk-P4ZJOQA7.js.map +1 -0
- package/dist/chunk-P6C5EZ3D.js +342 -0
- package/dist/chunk-P6C5EZ3D.js.map +1 -0
- package/dist/{chunk-CW6T36TN.js → chunk-PBD7BJUN.js} +62 -4
- package/dist/chunk-PBD7BJUN.js.map +1 -0
- package/dist/chunk-PBIUCQY3.js +696 -0
- package/dist/chunk-PBIUCQY3.js.map +1 -0
- package/dist/chunk-PD6YECQH.js +845 -0
- package/dist/chunk-PD6YECQH.js.map +1 -0
- package/dist/{chunk-M7IYCTJV.js → chunk-R4LFQGP3.js} +2 -2
- package/dist/chunk-RHJIGDUE.js +21 -0
- package/dist/chunk-RHJIGDUE.js.map +1 -0
- package/dist/{chunk-KP7MUZNX.js → chunk-RLXO27MW.js} +30 -8
- package/dist/chunk-RLXO27MW.js.map +1 -0
- package/dist/{chunk-DFNM3WX2.js → chunk-S44BEV4B.js} +168 -45
- package/dist/chunk-S44BEV4B.js.map +1 -0
- package/dist/chunk-S4S5N4BJ.js +1461 -0
- package/dist/chunk-S4S5N4BJ.js.map +1 -0
- package/dist/{chunk-BZ2SHDN2.js → chunk-SGTRAXXX.js} +3 -3
- package/dist/chunk-SGTRAXXX.js.map +1 -0
- package/dist/{chunk-M2RGJPXX.js → chunk-SJAE5PB5.js} +3 -3
- package/dist/{chunk-JGIWG6SR.js → chunk-SNPNHU3H.js} +3937 -1776
- package/dist/chunk-SNPNHU3H.js.map +1 -0
- package/dist/{chunk-IHVBV5C2.js → chunk-SOOBFXRR.js} +94 -71
- package/dist/chunk-SOOBFXRR.js.map +1 -0
- package/dist/{chunk-ZDTRWK5F.js → chunk-TAHFWKS6.js} +2 -2
- package/dist/chunk-UEFFTVPY.js +9 -0
- package/dist/chunk-UEFFTVPY.js.map +1 -0
- package/dist/{chunk-24VULZ7A.js → chunk-UR5BXLBP.js} +3 -3
- package/dist/chunk-UR5BXLBP.js.map +1 -0
- package/dist/{chunk-DMBALCE2.js → chunk-V2SBGVDO.js} +471 -186
- package/dist/chunk-V2SBGVDO.js.map +1 -0
- package/dist/chunk-VJLWD47W.js +23 -0
- package/dist/chunk-VJLWD47W.js.map +1 -0
- package/dist/{chunk-4CQAV7YB.js → chunk-W647DX5Z.js} +2 -2
- package/dist/{chunk-THATMZXD.js → chunk-WFIIU3YZ.js} +2 -2
- package/dist/{chunk-THATMZXD.js.map → chunk-WFIIU3YZ.js.map} +1 -1
- package/dist/chunk-YZDJMAKL.js +82 -0
- package/dist/chunk-YZDJMAKL.js.map +1 -0
- package/dist/{chunk-3TMODYZV.js → chunk-Z5XH6VHY.js} +5 -5
- package/dist/chunk-Z5XH6VHY.js.map +1 -0
- package/dist/{chunk-BE3HW4FY.js → chunk-ZBBMZMA6.js} +377 -170
- package/dist/chunk-ZBBMZMA6.js.map +1 -0
- package/dist/index.cjs +21224 -11714
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +195 -49
- package/dist/index.d.ts +195 -49
- package/dist/index.js +150 -74
- package/dist/index.js.map +1 -1
- package/dist/{ir-BMP7yxJJ.d.cts → ir-BSosEwc8.d.cts} +1 -1
- package/dist/{ir-DUOz6H-5.d.ts → ir-D-uqwL_4.d.ts} +1 -1
- package/dist/{module-B8CBqIZ_.d.cts → module-Ds4tarcI.d.cts} +230 -140
- package/dist/{module-k7m3txak.d.ts → module-Zd1Gn-Nj.d.ts} +230 -140
- package/package.json +20 -4
- package/dist/ModuleTag-C8FHY_sY.d.ts +0 -93
- package/dist/ModuleTag-EGbgBMpZ.d.cts +0 -93
- package/dist/Observability-COqEvp2C.d.cts +0 -713
- package/dist/Observability-cY4kLn0S.d.ts +0 -713
- package/dist/ReadQuery-BlMwhe-F.d.ts +0 -30
- package/dist/ReadQuery-CL5XlXts.d.cts +0 -30
- package/dist/ReadQuery-SinbStGF.d.cts +0 -38
- package/dist/ReadQuery-SinbStGF.d.ts +0 -38
- package/dist/chunk-24VULZ7A.js.map +0 -1
- package/dist/chunk-3QMIVH35.js +0 -43
- package/dist/chunk-3QMIVH35.js.map +0 -1
- package/dist/chunk-3TMODYZV.js.map +0 -1
- package/dist/chunk-4SO6JMZL.js.map +0 -1
- package/dist/chunk-76WT3HOR.js.map +0 -1
- package/dist/chunk-ANLBCBDC.js.map +0 -1
- package/dist/chunk-AUIR5O6W.js.map +0 -1
- package/dist/chunk-BABLDP24.js.map +0 -1
- package/dist/chunk-BE3HW4FY.js.map +0 -1
- package/dist/chunk-BZ2SHDN2.js.map +0 -1
- package/dist/chunk-CW6T36TN.js.map +0 -1
- package/dist/chunk-DFNM3WX2.js.map +0 -1
- package/dist/chunk-DMBALCE2.js.map +0 -1
- package/dist/chunk-EGK3KN7B.js +0 -406
- package/dist/chunk-EGK3KN7B.js.map +0 -1
- package/dist/chunk-GMPEOUP2.js.map +0 -1
- package/dist/chunk-IHVBV5C2.js.map +0 -1
- package/dist/chunk-JCXGZRMU.js.map +0 -1
- package/dist/chunk-JGIWG6SR.js.map +0 -1
- package/dist/chunk-JWOYLO27.js.map +0 -1
- package/dist/chunk-KIXAU3GM.js +0 -137
- package/dist/chunk-KIXAU3GM.js.map +0 -1
- package/dist/chunk-KL5ACTCT.js +0 -8
- package/dist/chunk-KL5ACTCT.js.map +0 -1
- package/dist/chunk-KP7MUZNX.js.map +0 -1
- package/dist/chunk-M3BFQ7HK.js +0 -13
- package/dist/chunk-M3BFQ7HK.js.map +0 -1
- package/dist/chunk-M3WTHJHJ.js.map +0 -1
- package/dist/chunk-NBD3KUOZ.js.map +0 -1
- package/dist/chunk-NQZ2OSGR.js +0 -151
- package/dist/chunk-NQZ2OSGR.js.map +0 -1
- package/dist/chunk-NZJKFF45.js.map +0 -1
- package/dist/chunk-OFADUJWJ.js.map +0 -1
- package/dist/chunk-OGWBVHB3.js.map +0 -1
- package/dist/chunk-PAYXCY6A.js.map +0 -1
- package/dist/chunk-PYOE4VSI.js.map +0 -1
- package/dist/chunk-QCHIQWAJ.js +0 -21
- package/dist/chunk-RNFE3ML2.js.map +0 -1
- package/dist/chunk-TKZ7MEIA.js +0 -27
- package/dist/chunk-TKZ7MEIA.js.map +0 -1
- package/dist/chunk-TQOBJYDP.js.map +0 -1
- package/dist/chunk-VZB726PE.js +0 -93
- package/dist/chunk-VZB726PE.js.map +0 -1
- package/dist/chunk-W3TEWHLO.js +0 -568
- package/dist/chunk-W3TEWHLO.js.map +0 -1
- package/dist/chunk-YS3AZQ2G.js.map +0 -1
- package/dist/chunk-ZFLHVFUC.js +0 -192
- package/dist/chunk-ZFLHVFUC.js.map +0 -1
- package/dist/chunk-ZFY7U2FR.js.map +0 -1
- package/dist/chunk-ZGDVUPTM.js.map +0 -1
- package/dist/protocol-g_1897M2.d.cts +0 -127
- package/dist/protocol-g_1897M2.d.ts +0 -127
- /package/dist/{chunk-3IYZ5IGG.js.map → chunk-5WKUGEBY.js.map} +0 -0
- /package/dist/{chunk-3RMKLXHX.js.map → chunk-63ZQ5RIN.js.map} +0 -0
- /package/dist/{chunk-EY4NZKDR.js.map → chunk-6Y2TKCNY.js.map} +0 -0
- /package/dist/{chunk-G5ZBFPNU.js.map → chunk-A2RQOJC7.js.map} +0 -0
- /package/dist/{chunk-TAAPQVZN.js.map → chunk-AYELIQXR.js.map} +0 -0
- /package/dist/{chunk-66ALHVEX.js.map → chunk-EB46EYI7.js.map} +0 -0
- /package/dist/{chunk-IPF7E66P.js.map → chunk-FYAODKVP.js.map} +0 -0
- /package/dist/{chunk-M7IYCTJV.js.map → chunk-R4LFQGP3.js.map} +0 -0
- /package/dist/{chunk-M2RGJPXX.js.map → chunk-SJAE5PB5.js.map} +0 -0
- /package/dist/{chunk-ZDTRWK5F.js.map → chunk-TAHFWKS6.js.map} +0 -0
- /package/dist/{chunk-4CQAV7YB.js.map → chunk-W647DX5Z.js.map} +0 -0
package/dist/Process.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/Process.ts","../src/internal/runtime/ModuleFactory.ts","../src/internal/runtime/core/ModuleRuntime.ts","../src/internal/runtime/core/Lifecycle.ts","../src/internal/runtime/core/errorSummary.ts","../src/internal/runtime/core/DebugSink.ts","../src/internal/observability/jsonValue.ts","../src/internal/runtime/core/EffectOpCore.ts","../src/internal/runtime/core/ConvergeStaticIrCollector.ts","../src/internal/runtime/core/StateTransaction.ts","../src/internal/runtime/core/RuntimeKernel.ts","../src/internal/runtime/core/env.ts","../src/internal/runtime/core/KernelRef.ts","../src/internal/runtime/core/RuntimeServiceBuiltins.ts","../src/internal/observability/evidenceCollector.ts","../src/internal/observability/runSession.ts","../src/internal/runtime/core/ModuleRuntime.operation.ts","../src/internal/effect-op.ts","../src/internal/runtime/core/ModuleRuntime.dispatch.ts","../src/internal/runtime/core/ReducerDiagnostics.ts","../src/internal/runtime/core/ModuleRuntime.effects.ts","../src/internal/runtime/core/ModuleRuntime.transaction.ts","../src/internal/runtime/core/TaskRunner.ts","../src/internal/state-trait/converge-in-transaction.ts","../src/internal/state-trait/converge-diagnostics.ts","../src/internal/state-trait/exec-vm-mode.ts","../src/internal/state-trait/converge-step.ts","../src/internal/state-trait/validate.ts","../src/internal/state-trait/source.ts","../src/internal/resource.ts","../src/internal/runtime/core/ReplayLog.ts","../src/internal/runtime/core/ModuleRuntime.concurrencyPolicy.ts","../src/internal/runtime/core/ModuleRuntime.txnLanePolicy.ts","../src/internal/runtime/core/ModuleRuntime.traitConvergeConfig.ts","../src/internal/state-trait/build.ts","../src/internal/runtime/core/ModuleRuntime.internalHooks.ts","../src/internal/runtime/core/RootContext.ts","../src/internal/runtime/core/process/ProcessRuntime.ts","../src/internal/runtime/core/process/concurrency.ts","../src/internal/runtime/core/process/events.ts","../src/internal/runtime/core/process/meta.ts","../src/internal/runtime/core/process/selectorSchema.ts","../src/internal/runtime/core/SelectorGraph.ts","../src/internal/runtime/core/ModuleRuntime.txnQueue.ts","../src/internal/runtime/core/ModuleRuntime.logics.ts","../src/internal/runtime/core/LifecycleDiagnostics.ts","../src/internal/runtime/core/LogicDiagnostics.ts","../src/internal/runtime/core/Platform.ts","../src/internal/runtime/core/ConcurrencyDiagnostics.ts","../src/internal/runtime/core/BoundApiRuntime.ts","../src/internal/action.ts","../src/internal/runtime/core/mutativePatches.ts","../src/internal/runtime/core/FlowRuntime.ts","../src/internal/runtime/core/MatchBuilder.ts","../src/internal/root.ts"],"sourcesContent":["import { Effect } from 'effect'\nimport * as ModuleFactory from './internal/runtime/ModuleFactory.js'\nimport type * as Protocol from './internal/runtime/core/process/protocol.js'\nimport * as Meta from './internal/runtime/core/process/meta.js'\nimport type { AnyModuleShape, ModuleHandle, ModuleLike, ModuleTag } from './internal/module.js'\n\nexport type DiagnosticsLevel = Protocol.DiagnosticsLevel\nexport type ProcessScope = Protocol.ProcessScope\nexport type ProcessIdentity = Protocol.ProcessIdentity\nexport type ProcessInstanceIdentity = Protocol.ProcessInstanceIdentity\nexport type ProcessTriggerSpec = Protocol.ProcessTriggerSpec\nexport type ProcessTrigger = Protocol.ProcessTrigger\nexport type ProcessConcurrencyPolicy = Protocol.ProcessConcurrencyPolicy\nexport type ProcessErrorPolicy = Protocol.ProcessErrorPolicy\nexport type ProcessDefinition = Protocol.ProcessDefinition\nexport type ProcessInstallation = Protocol.ProcessInstallation\nexport type SerializableErrorSummary = Protocol.SerializableErrorSummary\nexport type ProcessInstanceStatus = Protocol.ProcessInstanceStatus\nexport type ProcessEvent = Protocol.ProcessEvent\nexport type ProcessControlRequest = Protocol.ProcessControlRequest\nexport type ProcessPlatformEvent = Protocol.ProcessPlatformEvent\n\nexport type ProcessEffect<E = never, R = never> = Meta.ProcessEffect<E, R>\n\nexport type ProcessMakeDefinition =\n | string\n | {\n readonly processId: string\n readonly name?: string\n readonly description?: string\n readonly requires?: ReadonlyArray<string>\n readonly triggers?: ReadonlyArray<ProcessTriggerSpec>\n readonly concurrency?: ProcessConcurrencyPolicy\n readonly errorPolicy?: ProcessErrorPolicy\n readonly diagnosticsLevel?: DiagnosticsLevel\n }\n\nconst DEFAULT_TRIGGERS: ReadonlyArray<ProcessTriggerSpec> = [{ kind: 'platformEvent', platformEvent: 'runtime:boot' }]\n\nconst normalizeDefinition = (input: ProcessMakeDefinition): ProcessDefinition => {\n const base = typeof input === 'string' ? ({ processId: input } satisfies { readonly processId: string }) : input\n\n const processId = base.processId\n if (typeof processId !== 'string' || processId.length === 0) {\n throw new Error('[Process.make] processId must be a non-empty string')\n }\n\n const triggers = Array.isArray(base.triggers) && base.triggers.length > 0 ? base.triggers : DEFAULT_TRIGGERS\n\n const concurrency = base.concurrency ?? ({ mode: 'latest' } satisfies ProcessConcurrencyPolicy)\n const errorPolicy = base.errorPolicy ?? ({ mode: 'failStop' } satisfies ProcessErrorPolicy)\n const diagnosticsLevel = base.diagnosticsLevel ?? 'off'\n\n return {\n processId,\n name: base.name,\n description: base.description,\n requires: base.requires,\n triggers,\n concurrency,\n errorPolicy,\n diagnosticsLevel,\n }\n}\n\nexport const make = <E, R>(definition: ProcessMakeDefinition, effect: Effect.Effect<void, E, R>): ProcessEffect<E, R> =>\n Meta.attachMeta(effect, {\n definition: normalizeDefinition(definition),\n kind: 'process',\n })\n\nexport const getDefinition = (effect: Effect.Effect<void, any, any>): ProcessDefinition | undefined =>\n Meta.getDefinition(effect)\n\nexport const getMeta = (effect: Effect.Effect<void, any, any>): Meta.ProcessMeta | undefined => Meta.getMeta(effect)\n\nexport const attachMeta = <E, R>(effect: Effect.Effect<void, E, R>, meta: Meta.ProcessMeta): ProcessEffect<E, R> =>\n Meta.attachMeta(effect, meta)\n\ntype LinkModuleToken<Id extends string, Sh extends AnyModuleShape> = ModuleTag<Id, Sh> | ModuleLike<Id, Sh, object>\n\ntype LinkModuleIdOf<M> = M extends { readonly id: infer Id } ? Id : never\ntype LinkModuleShapeOf<M> =\n M extends ModuleLike<string, infer Sh, object> ? Sh : M extends ModuleTag<string, infer Sh> ? Sh : never\ntype LinkModuleTagOf<M> =\n M extends ModuleLike<string, infer Sh, object>\n ? ModuleTag<string, Sh>\n : M extends ModuleTag<string, AnyModuleShape>\n ? M\n : never\n\nconst isModuleLike = (value: unknown): value is ModuleLike<string, AnyModuleShape, object> =>\n Boolean(value) &&\n typeof value === 'object' &&\n ((value as { readonly _kind?: unknown })._kind === 'ModuleDef' ||\n (value as { readonly _kind?: unknown })._kind === 'Module') &&\n 'tag' in (value as object)\n\nconst unwrapModuleTag = <Id extends string, Sh extends AnyModuleShape>(\n module: LinkModuleToken<Id, Sh>,\n): ModuleTag<Id, Sh> => {\n if (isModuleLike(module)) {\n return module.tag\n }\n return module as ModuleTag<Id, Sh>\n}\n\nexport interface LinkConfig<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> {\n readonly id?: string\n readonly modules: Ms\n}\n\nexport type LinkHandles<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> = {\n [M in Ms[number] as LinkModuleIdOf<M>]: ModuleHandle<LinkModuleShapeOf<M>>\n}\n\ntype ModulesRecord<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> = {\n [M in Ms[number] as LinkModuleIdOf<M>]: LinkModuleTagOf<M>\n}\n\nexport function link<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[], E = never, R = never>(\n config: LinkConfig<Ms>,\n logic: ($: LinkHandles<Ms>) => Effect.Effect<void, E, R>,\n): ProcessEffect<E, R> {\n const linkId =\n config.id ??\n [...config.modules]\n .map((m) => m.id)\n .sort()\n .join('~')\n\n const modulesRecord = Object.create(null) as ModulesRecord<Ms>\n const requires = new Set<string>()\n\n for (const module of config.modules) {\n const tag = unwrapModuleTag(module)\n requires.add(String(tag.id))\n ;(modulesRecord as Record<string, ModuleTag<string, AnyModuleShape>>)[tag.id] = tag as unknown as ModuleTag<\n string,\n AnyModuleShape\n >\n }\n\n const effect = ModuleFactory.Link(\n modulesRecord as unknown as Record<string, ModuleTag<string, AnyModuleShape>>,\n logic as unknown as ($: Record<string, ModuleHandle<AnyModuleShape>>) => Effect.Effect<void, E, R>,\n )\n\n const definition: ProcessDefinition = {\n processId: linkId,\n requires: Array.from(requires),\n triggers: [...DEFAULT_TRIGGERS, { kind: 'platformEvent', platformEvent: `link:${linkId}` }],\n concurrency: { mode: 'latest' },\n errorPolicy: { mode: 'failStop' },\n diagnosticsLevel: 'off',\n }\n\n return Meta.attachMeta(effect, {\n definition,\n kind: 'link',\n }) as ProcessEffect<E, R>\n}\n","import { Context, Effect, Layer, Option, Schema } from 'effect'\nimport * as ModuleRuntimeImpl from './ModuleRuntime.js'\nimport * as BoundApiRuntime from './BoundApiRuntime.js'\nimport * as LogicDiagnostics from './core/LogicDiagnostics.js'\nimport * as LogicPlanMarker from './core/LogicPlanMarker.js'\nimport type * as Action from '../action.js'\nimport type { FieldPath } from '../field-path.js'\nimport type {\n AnyModuleShape,\n AnySchema,\n ActionsFromMap,\n ModuleTag as LogixModuleTag,\n ModuleShape,\n ReducersFromMap,\n StateOf,\n ActionOf,\n ModuleHandle,\n ModuleLogic,\n ModuleImpl,\n ModuleImplementStateTransactionOptions,\n} from './core/module.js'\n\n/**\n * v3: Link (formerly Orchestrator)\n * A glue layer for cross-module collaboration.\n *\n * - Does not own its own State.\n * - Can access multiple Modules' readonly handles.\n * - Can define Logic only; cannot define State/Action.\n */\nexport function Link<Modules extends Record<string, LogixModuleTag<any, AnyModuleShape>>, E = never, R = never>(\n modules: Modules,\n logic: ($: { [K in keyof Modules]: ModuleHandle<Modules[K]['shape']> }) => Effect.Effect<void, E, R>,\n): Effect.Effect<void, E, R> {\n return Effect.gen(function* () {\n const handles: Record<string, ModuleHandle<AnyModuleShape>> = {}\n\n for (const [key, module] of Object.entries(modules)) {\n const runtime = yield* module\n\n handles[key] = {\n read: (selector: any) => Effect.map(runtime.getState, selector),\n changes: runtime.changes,\n dispatch: runtime.dispatch,\n actions$: runtime.actions$,\n actions: new Proxy(\n {},\n {\n get: (_target, prop) => (payload: any) => runtime.dispatch({ _tag: prop as string, payload }),\n },\n ),\n }\n }\n\n return yield* logic(\n handles as {\n [K in keyof Modules]: ModuleHandle<Modules[K]['shape']>\n },\n )\n })\n}\n\n/**\n * Module factory implementation: construct a ModuleTag from an id and Schema definitions.\n */\nexport function Module<Id extends string, SSchema extends AnySchema, AMap extends Record<string, Action.AnyActionToken>>(\n id: Id,\n def: {\n readonly state: SSchema\n readonly actions: AMap\n readonly reducers?: ReducersFromMap<SSchema, AMap>\n },\n): LogixModuleTag<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>> {\n const shape: ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap> = {\n stateSchema: def.state,\n actionSchema: Schema.Union(\n ...Object.entries(def.actions).map(([tag, token]) => {\n const payload = (token as Action.AnyActionToken).schema as AnySchema\n return Schema.Struct(\n payload === Schema.Void\n ? {\n _tag: Schema.Literal(tag),\n payload: Schema.optional(payload),\n }\n : {\n _tag: Schema.Literal(tag),\n payload,\n },\n )\n }),\n ) as unknown as Schema.Schema<ActionsFromMap<AMap>>,\n actionMap: def.actions,\n }\n\n type ShapeState = StateOf<typeof shape>\n type ShapeAction = ActionOf<typeof shape>\n\n // Normalize tag-keyed reducers into `_tag -> (state, action, sink?) => state` for the runtime.\n const reducers =\n def.reducers &&\n (Object.fromEntries(\n Object.entries(def.reducers).map(([tag, reducer]) => [\n tag,\n (state: ShapeState, action: ShapeAction, sink?: (path: string | FieldPath) => void) =>\n // Relies on the runtime `_tag` convention: only actions matching the current tag are routed to this reducer.\n (reducer as any)(\n state,\n action as {\n readonly _tag: string\n readonly payload: unknown\n },\n sink,\n ) as ShapeState,\n ]),\n ) as Record<\n string,\n (state: ShapeState, action: ShapeAction, sink?: (path: string | FieldPath) => void) => ShapeState\n >)\n\n class ModuleTag extends Context.Tag(`@logixjs/Module/${id}`)<\n ModuleTag,\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>\n >() {}\n\n const tag = ModuleTag\n\n const moduleTag = Object.assign(tag, {\n _kind: 'ModuleTag' as const,\n id,\n shape,\n stateSchema: shape.stateSchema,\n actionSchema: shape.actionSchema,\n actions: shape.actionMap,\n reducers: def.reducers,\n /**\n * Build a Logic program for the current Module:\n * - Read its ModuleRuntime from Context at runtime.\n * - Build a BoundApi from the runtime.\n * - Pass the BoundApi to the caller to build business logic.\n */\n logic: <R = unknown, E = never>(\n build: (api: import('./core/module.js').BoundApi<typeof shape, R>) => ModuleLogic<typeof shape, R, E>,\n ): ModuleLogic<typeof shape, R, E> => {\n const logicEffect = Effect.gen(function* () {\n const runtime = yield* tag\n const logicUnit = yield* Effect.serviceOption(LogicDiagnostics.LogicUnitServiceTag).pipe(\n Effect.map(Option.getOrUndefined),\n )\n const phaseService = yield* Effect.serviceOption(LogicDiagnostics.LogicPhaseServiceTag).pipe(\n Effect.map(Option.getOrUndefined),\n )\n const api = BoundApiRuntime.make<typeof shape, R>(shape, runtime, {\n getPhase: () => phaseService?.current ?? 'run',\n phaseService,\n moduleId: id,\n logicUnit,\n })\n\n let built: unknown\n try {\n built = build(api)\n } catch (err) {\n // Convert synchronously thrown LogicPhaseError into Effect.fail so runSync won't treat it as an \"async pending fiber\".\n if ((err as any)?._tag === 'LogicPhaseError') {\n return yield* Effect.fail(err as any)\n }\n throw err\n }\n\n if (LogicPlanMarker.isLogicPlanEffect(built)) {\n return yield* built as Effect.Effect<any, any, any>\n }\n\n const isLogicPlan = (value: unknown): value is import('./core/module.js').LogicPlan<typeof shape, R, E> =>\n Boolean(value && typeof value === 'object' && 'setup' in (value as any) && 'run' in (value as any))\n\n const plan = isLogicPlan(built)\n ? built\n : ({\n setup: Effect.void,\n run: built as Effect.Effect<any, any, any>,\n } satisfies import('./core/module.js').LogicPlan<typeof shape, R, E>)\n\n return plan\n })\n\n LogicPlanMarker.markAsLogicPlanEffect(logicEffect)\n return logicEffect\n },\n\n /**\n * live: given an initial state and a set of logics, construct a scoped ModuleRuntime Layer.\n *\n * Env conventions:\n * - R represents extra environment required by the logics (services / platform, etc.).\n * - ModuleRuntime itself only depends on Scope.Scope and is managed by Layer.scoped.\n */\n live: <R = never, E = never>(\n initial: StateOf<typeof shape>,\n ...logics: Array<ModuleLogic<typeof shape, R, E>>\n ): Layer.Layer<import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>, E, R> =>\n Layer.scoped(\n tag,\n ModuleRuntimeImpl.make<StateOf<typeof shape>, ActionOf<typeof shape>, R>(initial, {\n tag,\n logics: logics as ReadonlyArray<Effect.Effect<any, any, any>>,\n moduleId: id,\n reducers,\n }),\n ) as unknown as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n E,\n R\n >,\n\n /**\n * implement: build a ModuleImpl blueprint from Module definition + initial state + a set of logics.\n *\n * - R represents the Env required by the logics.\n * - The returned ModuleImpl.layer carries R as its input environment.\n * - withLayer/withLayers can progressively narrow R to a more concrete Env (even never).\n */\n implement: <R = never>(config: {\n initial: StateOf<typeof shape>\n logics?: Array<ModuleLogic<typeof shape, R, never>>\n imports?: ReadonlyArray<Layer.Layer<any, any, any> | ModuleImpl<any, AnyModuleShape, any>>\n /**\n * processes: a set of long-lived flows bound to this Module implementation (including Link).\n *\n * - These Effects will be forked by the runtime container (e.g. Runtime.make).\n * - Types use relaxed E/R to enable composing cross-module orchestration logic.\n * - Business code typically builds these flows via Link.make.\n */\n processes?: ReadonlyArray<Effect.Effect<void, any, any>>\n /**\n * stateTransaction: module-level StateTransaction config.\n *\n * - If instrumentation is not provided, fall back to Runtime-level config (if any) or NODE_ENV defaults.\n * - If instrumentation is provided, it takes precedence over Runtime-level config and defaults.\n */\n stateTransaction?: ModuleImplementStateTransactionOptions\n }): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, R> => {\n const importedModules = (config.imports ?? []).flatMap((item) => {\n if ((item as ModuleImpl<any, AnyModuleShape, any>)._tag === 'ModuleImpl') {\n return [\n (item as ModuleImpl<any, AnyModuleShape, any>).module as unknown as Context.Tag<\n any,\n import('./core/module.js').ModuleRuntime<any, any>\n >,\n ]\n }\n return []\n })\n\n const baseLayer = Layer.scoped(\n tag,\n ModuleRuntimeImpl.make<StateOf<typeof shape>, ActionOf<typeof shape>, R>(config.initial, {\n tag,\n logics: (config.logics || []) as ReadonlyArray<Effect.Effect<any, any, any>>,\n processes: (config.processes || []) as ReadonlyArray<Effect.Effect<void, any, any>>,\n moduleId: id,\n imports: importedModules,\n reducers,\n stateTransaction: config.stateTransaction,\n }),\n ) as unknown as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >\n\n const processes = config.processes ?? []\n\n const makeImplWithLayer = (\n layer: Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >,\n ): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any> => ({\n _tag: 'ModuleImpl',\n module: moduleTag as unknown as LogixModuleTag<\n Id,\n ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>\n >,\n layer,\n processes,\n stateTransaction: config.stateTransaction,\n withLayer: (\n extra: Layer.Layer<any, never, any>,\n ): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any> => {\n const provided = (\n layer as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >\n ).pipe(Layer.provide(extra as Layer.Layer<any, never, any>))\n\n const merged = Layer.mergeAll(provided, extra as Layer.Layer<any, never, any>) as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >\n\n return makeImplWithLayer(merged)\n },\n withLayers: (\n ...extras: ReadonlyArray<Layer.Layer<any, never, any>>\n ): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any> =>\n extras.reduce<ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any>>(\n (implAcc, extra) => implAcc.withLayer(extra),\n makeImplWithLayer(\n layer as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >,\n ),\n ),\n })\n\n // Start from baseLayer and layer-in imports (Layer or other ModuleImpl.layer) sequentially.\n const initialImpl = makeImplWithLayer(\n baseLayer as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >,\n )\n\n const imports = config.imports ?? []\n\n const finalImpl = imports.reduce<\n ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any>\n >((implAcc, item) => {\n const layer =\n (item as ModuleImpl<any, AnyModuleShape, any>)._tag === 'ModuleImpl'\n ? (item as ModuleImpl<any, AnyModuleShape, any>).layer\n : (item as Layer.Layer<any, any, any>)\n\n return implAcc.withLayer(layer as Layer.Layer<any, never, any>)\n }, initialImpl)\n\n return finalImpl\n },\n })\n\n return moduleTag as LogixModuleTag<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>>\n}\n","import {\n Effect,\n Stream,\n SubscriptionRef,\n PubSub,\n Scope,\n Context,\n Ref,\n FiberRef,\n Option,\n Queue,\n Duration,\n Chunk,\n} from 'effect'\nimport type { LogicPlan, ModuleRuntime as PublicModuleRuntime, StateChangeWithMeta } from './module.js'\nimport * as Lifecycle from './Lifecycle.js'\nimport * as Debug from './DebugSink.js'\nimport { currentConvergeStaticIrCollectors } from './ConvergeStaticIrCollector.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport type * as ModuleTraits from './ModuleTraits.js'\nimport * as StateTransaction from './StateTransaction.js'\nimport * as RuntimeKernel from './RuntimeKernel.js'\nimport * as FullCutoverGate from './FullCutoverGate.js'\nimport * as KernelRef from './KernelRef.js'\nimport * as RuntimeServiceBuiltins from './RuntimeServiceBuiltins.js'\nimport {\n getDefaultStateTxnInstrumentation,\n isDevEnv,\n ReadQueryStrictGateConfigTag,\n StateTransactionConfigTag,\n StateTransactionOverridesTag,\n} from './env.js'\nimport type {\n StateTransactionInstrumentation,\n StateTransactionOverrides,\n TraitConvergeTimeSlicingPatch,\n TxnLanesPatch,\n} from './env.js'\nimport { normalizeNonEmptyString } from './normalize.js'\nimport { EvidenceCollectorTag } from '../../observability/evidenceCollector.js'\nimport * as EffectOp from '../../effect-op.js'\nimport { makeRunOperation } from './ModuleRuntime.operation.js'\nimport { makeDispatchOps } from './ModuleRuntime.dispatch.js'\nimport { makeEffectsRegistry } from './ModuleRuntime.effects.js'\nimport { makeTransactionOps } from './ModuleRuntime.transaction.js'\nimport { makeResolveConcurrencyPolicy } from './ModuleRuntime.concurrencyPolicy.js'\nimport { makeResolveTxnLanePolicy } from './ModuleRuntime.txnLanePolicy.js'\nimport {\n makeResolveTraitConvergeConfig,\n type ResolvedTraitConvergeConfig,\n} from './ModuleRuntime.traitConvergeConfig.js'\nimport type { DirtyAllReason } from '../../field-path.js'\nimport * as RowId from '../../state-trait/rowid.js'\nimport * as StateTraitBuild from '../../state-trait/build.js'\nimport { exportConvergeStaticIr } from '../../state-trait/converge-ir.js'\nimport { makeConvergeExecIr } from '../../state-trait/converge-exec-ir.js'\nimport * as StateTraitConverge from '../../state-trait/converge.js'\nimport * as StateTraitValidate from '../../state-trait/validate.js'\nimport { installInternalHooks, type TraitState } from './ModuleRuntime.internalHooks.js'\nimport { RootContextTag, type RootContext } from './RootContext.js'\nimport * as ProcessRuntime from './process/ProcessRuntime.js'\nimport * as ReadQuery from './ReadQuery.js'\nimport * as SelectorGraph from './SelectorGraph.js'\nimport {\n getRegisteredRuntime,\n getRuntimeByModuleAndInstance,\n registerRuntime,\n registerRuntimeByInstanceKey,\n unregisterRuntime,\n unregisterRuntimeByInstanceKey,\n} from './ModuleRuntime.registry.js'\nimport { makeEnqueueTransaction } from './ModuleRuntime.txnQueue.js'\nimport { runModuleLogics } from './ModuleRuntime.logics.js'\nimport * as ConcurrencyDiagnostics from './ConcurrencyDiagnostics.js'\n\nexport { registerRuntime, unregisterRuntime, getRegisteredRuntime, getRuntimeByModuleAndInstance }\n\nexport interface ModuleRuntimeOptions<S, A, R = never> {\n readonly tag?: Context.Tag<any, PublicModuleRuntime<S, A>>\n /**\n * List of \"child modules\" resolvable within the current instance scope (imports-scope):\n * - Used only to build a minimal imports injector (ModuleToken -> ModuleRuntime).\n * - Do not capture the whole Context into ModuleRuntime (avoid accidentally retaining root/base services).\n */\n readonly imports?: ReadonlyArray<Context.Tag<any, PublicModuleRuntime<any, any>>>\n readonly logics?: ReadonlyArray<Effect.Effect<any, any, R> | LogicPlan<any, R, any>>\n readonly processes?: ReadonlyArray<Effect.Effect<void, any, any>>\n readonly moduleId?: string\n /** Stable instance anchor (single source of truth); defaults to a monotonic sequence. Never default to randomness/time. */\n readonly instanceId?: string\n readonly createState?: Effect.Effect<SubscriptionRef.SubscriptionRef<S>, never, Scope.Scope>\n readonly createActionHub?: Effect.Effect<PubSub.PubSub<A>, never, Scope.Scope>\n /**\n * Primary reducer map: `_tag -> (state, action) => nextState`.\n *\n * - If provided, dispatch will synchronously apply the reducer before publishing the Action.\n * - If a `_tag` has no reducer, behavior matches the current watcher-only mode.\n */\n readonly reducers?: Readonly<\n Record<string, (state: S, action: A, sink?: (path: StateTransaction.StatePatchPath) => void) => S>\n >\n /**\n * Module-level StateTransaction config:\n * - If instrumentation is provided, it takes precedence over the Runtime-level config and NODE_ENV defaults.\n * - Otherwise, fall back to the Runtime-level config (if any) or getDefaultStateTxnInstrumentation().\n */\n readonly stateTransaction?: {\n readonly instrumentation?: StateTransactionInstrumentation\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeMode?: 'auto' | 'full' | 'dirty'\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n readonly txnLanes?: TxnLanesPatch\n }\n}\n\nlet nextInstanceSeq = 0\n\nconst makeDefaultInstanceId = (): string => {\n nextInstanceSeq += 1\n return `i${nextInstanceSeq}`\n}\n\nexport const make = <S, A, R = never>(\n initialState: S,\n options: ModuleRuntimeOptions<S, A, R> = {},\n): Effect.Effect<PublicModuleRuntime<S, A>, never, Scope.Scope | R> => {\n const program = Effect.gen(function* () {\n const stateRef = options.createState ? yield* options.createState : yield* SubscriptionRef.make(initialState)\n\n const commitHub = yield* PubSub.unbounded<StateChangeWithMeta<S>>()\n const actionCommitHub = yield* PubSub.unbounded<StateChangeWithMeta<A>>()\n let commitHubSubscriberCount = 0\n\n const fromCommitHub = Stream.unwrapScoped(\n Effect.gen(function* () {\n commitHubSubscriberCount += 1\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n commitHubSubscriberCount = Math.max(0, commitHubSubscriberCount - 1)\n }),\n )\n return Stream.fromPubSub(commitHub)\n }),\n )\n\n const moduleId = options.moduleId ?? 'unknown'\n const instanceId = normalizeNonEmptyString(options.instanceId) ?? makeDefaultInstanceId()\n const runtimeLabel = yield* FiberRef.get(Debug.currentRuntimeLabel)\n const lifecycle = yield* Lifecycle.makeLifecycleManager({\n moduleId,\n instanceId,\n runtimeLabel,\n })\n const concurrencyDiagnostics = yield* ConcurrencyDiagnostics.make({\n moduleId: options.moduleId,\n instanceId,\n })\n\n // Resolve StateTransaction instrumentation:\n // - Prefer ModuleRuntimeOptions.stateTransaction.instrumentation.\n // - Otherwise read the default from the Runtime-level StateTransactionConfig service.\n // - Finally fall back to NODE_ENV-based defaults.\n const runtimeConfigOpt = yield* Effect.serviceOption(StateTransactionConfigTag)\n const runtimeInstrumentation: StateTransactionInstrumentation | undefined = Option.isSome(runtimeConfigOpt)\n ? runtimeConfigOpt.value.instrumentation\n : undefined\n\n const instrumentation: StateTransactionInstrumentation =\n options.stateTransaction?.instrumentation ?? runtimeInstrumentation ?? getDefaultStateTxnInstrumentation()\n\n const resolveTraitConvergeConfig = makeResolveTraitConvergeConfig({\n moduleId: options.moduleId,\n stateTransaction: options.stateTransaction,\n })\n\n const resolveConcurrencyPolicy = makeResolveConcurrencyPolicy({\n moduleId: options.moduleId,\n diagnostics: concurrencyDiagnostics,\n })\n\n const resolveTxnLanePolicy = makeResolveTxnLanePolicy({\n moduleId: options.moduleId,\n stateTransaction: options.stateTransaction,\n })\n\n const actionHub = options.createActionHub\n ? yield* options.createActionHub\n : yield* Effect.gen(function* () {\n const policy = yield* resolveConcurrencyPolicy()\n return yield* PubSub.bounded<A>(policy.losslessBackpressureCapacity)\n })\n\n const convergePlanCacheCapacity = 128\n const traitState: TraitState = {\n program: undefined,\n convergePlanCache: undefined,\n convergeGeneration: {\n generation: 0,\n generationBumpCount: 0,\n },\n pendingCacheMissReason: undefined,\n lastConvergeIrKeys: undefined,\n listConfigs: [],\n }\n const rowIdStore = new RowId.RowIdStore(instanceId)\n const selectorGraph = SelectorGraph.make<S>({\n moduleId,\n instanceId,\n getFieldPathIdRegistry: () => {\n const convergeIr: any = (traitState.program as any)?.convergeIr\n if (!convergeIr || convergeIr.configError) return undefined\n return convergeIr.fieldPathIdRegistry\n },\n })\n\n // StateTransaction context:\n // - Maintain a single active transaction per ModuleRuntime;\n // - Aggregate state writes from all entrypoints on this instance (dispatch / Traits / source-refresh, etc.);\n // - New entrypoints (e.g. service writebacks / devtools operations) must also go through the same context + queue.\n const txnContext = StateTransaction.makeContext<S>({\n moduleId,\n instanceId,\n instrumentation,\n getFieldPathIdRegistry: () => {\n const convergeIr: any = (traitState.program as any)?.convergeIr\n if (!convergeIr || convergeIr.configError) return undefined\n return convergeIr.fieldPathIdRegistry\n },\n })\n\n const recordStatePatch: RuntimeInternals['txn']['recordStatePatch'] = (\n path,\n reason,\n from,\n to,\n traitNodeId,\n stepId,\n ): void => {\n StateTransaction.recordPatch(txnContext, path, reason, from, to, traitNodeId, stepId)\n }\n\n const updateDraft: RuntimeInternals['txn']['updateDraft'] = (nextState): void => {\n if (!txnContext.current) return\n StateTransaction.updateDraft(txnContext, nextState as S)\n }\n\n const traitConvergeTimeSlicingSignal = yield* Queue.unbounded<void>()\n const traitConvergeTimeSlicingState: {\n readonly signal: Queue.Queue<void>\n readonly backlogDirtyPaths: Set<StateTransaction.StatePatchPath>\n backlogDirtyAllReason?: DirtyAllReason\n firstPendingAtMs: number | undefined\n lastTouchedAtMs: number | undefined\n latestConvergeConfig: ResolvedTraitConvergeConfig | undefined\n capturedContext:\n | {\n readonly runtimeLabel: string | undefined\n readonly diagnosticsLevel: Debug.DiagnosticsLevel\n readonly debugSinks: ReadonlyArray<Debug.Sink>\n readonly overrides: StateTransactionOverrides | undefined\n }\n | undefined\n } = {\n signal: traitConvergeTimeSlicingSignal,\n backlogDirtyPaths: new Set(),\n backlogDirtyAllReason: undefined,\n firstPendingAtMs: undefined,\n lastTouchedAtMs: undefined,\n latestConvergeConfig: undefined,\n capturedContext: undefined,\n }\n\n const moduleTraitsState: {\n frozen: boolean\n contributions: Array<ModuleTraits.TraitContribution>\n snapshot: ModuleTraits.ModuleTraitsSnapshot | undefined\n } = {\n frozen: false,\n contributions: [],\n snapshot: undefined,\n }\n\n /**\n * Transaction history:\n * - Keeps the latest N StateTransaction records per ModuleRuntime.\n * - Used only for dev/test devtools features (e.g. time-travel, txn summary views).\n * - Capacity is bounded to avoid unbounded memory growth in long-running apps.\n */\n const maxTxnHistory = 500\n const txnHistory: Array<StateTransaction.StateTransaction<S>> = []\n const txnById = new Map<string, StateTransaction.StateTransaction<S>>()\n\n /**\n * Transaction queue:\n * - Executes each logic entrypoint (dispatch / source-refresh / future extensions) serially in FIFO order.\n * - Guarantees at most one transaction at a time per instance; different instances can still run in parallel.\n */\n const kernelImplementationRef = yield* KernelRef.resolveKernelImplementationRef()\n const runtimeServicesOverrides = yield* RuntimeKernel.resolveRuntimeServicesOverrides({\n moduleId: options.moduleId,\n })\n\n const runtimeServicesRegistryOpt = yield* Effect.serviceOption(RuntimeKernel.RuntimeServicesRegistryTag)\n const runtimeServicesRegistry = Option.isSome(runtimeServicesRegistryOpt)\n ? runtimeServicesRegistryOpt.value\n : undefined\n\n const resolveRuntimeServiceImpls = <Service>(\n serviceId: string,\n builtin: ReadonlyArray<RuntimeKernel.RuntimeServiceImpl<Service>>,\n ): ReadonlyArray<RuntimeKernel.RuntimeServiceImpl<Service>> => {\n const extraRaw = runtimeServicesRegistry?.implsByServiceId[serviceId]\n if (!extraRaw || extraRaw.length === 0) return builtin\n\n const extra = extraRaw as ReadonlyArray<RuntimeKernel.RuntimeServiceImpl<Service>>\n const seen = new Set<string>()\n const out: Array<RuntimeKernel.RuntimeServiceImpl<Service>> = []\n\n for (const impl of builtin) {\n seen.add(impl.implId)\n out.push(impl)\n }\n\n for (const impl of extra) {\n if (!impl || typeof impl.implId !== 'string' || impl.implId.length === 0) continue\n if (seen.has(impl.implId)) continue\n seen.add(impl.implId)\n out.push(impl)\n }\n\n return out\n }\n\n const makeTxnQueueBuiltin = makeEnqueueTransaction({\n moduleId: options.moduleId,\n instanceId,\n resolveConcurrencyPolicy,\n diagnostics: concurrencyDiagnostics,\n })\n\n const enqueueTxnSel = RuntimeKernel.selectRuntimeService(\n 'txnQueue',\n resolveRuntimeServiceImpls('txnQueue', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeTxnQueueBuiltin,\n },\n {\n implId: 'trace',\n implVersion: 'v1',\n make: makeTxnQueueBuiltin,\n notes: 'no-op wrapper (used for override isolation tests)',\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const enqueueTransaction = yield* enqueueTxnSel.impl.make.pipe(\n Effect.provideService(RuntimeServiceBuiltins.RuntimeServiceBuiltinsTag, {\n getBuiltinMake: (serviceId) =>\n serviceId === 'txnQueue'\n ? (makeTxnQueueBuiltin as Effect.Effect<unknown, never, any>)\n : Effect.dieMessage(`[Logix] builtin make not available: ${serviceId}`),\n } satisfies RuntimeServiceBuiltins.RuntimeServiceBuiltins),\n )\n\n const makeOperationRunnerBuiltin = Effect.succeed(\n makeRunOperation({\n optionsModuleId: options.moduleId,\n instanceId,\n txnContext,\n }),\n )\n\n const runOperationSel = RuntimeKernel.selectRuntimeService(\n 'operationRunner',\n resolveRuntimeServiceImpls('operationRunner', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeOperationRunnerBuiltin,\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const runOperation = yield* runOperationSel.impl.make.pipe(\n Effect.provideService(RuntimeServiceBuiltins.RuntimeServiceBuiltinsTag, {\n getBuiltinMake: (serviceId) =>\n serviceId === 'operationRunner'\n ? (makeOperationRunnerBuiltin as Effect.Effect<unknown, never, any>)\n : Effect.dieMessage(`[Logix] builtin make not available: ${serviceId}`),\n } satisfies RuntimeServiceBuiltins.RuntimeServiceBuiltins),\n )\n\n yield* runOperation(\n 'lifecycle',\n 'module:init',\n { meta: { moduleId, instanceId } },\n Debug.record({\n type: 'module:init',\n moduleId,\n instanceId,\n }),\n )\n\n // Initial state snapshot:\n // - Emit a state:update event to write the initial state into the Debug stream.\n // - Helps Devtools show \"Current State\" even before any business interaction.\n // - Provides frame 0 for the timeline so later events can build time-travel views on top of it.\n const initialSnapshot = yield* SubscriptionRef.get(stateRef)\n yield* runOperation(\n 'state',\n 'state:init',\n { meta: { moduleId, instanceId } },\n Debug.record({\n type: 'state:update',\n moduleId,\n state: initialSnapshot,\n instanceId,\n txnSeq: 0,\n }),\n )\n\n const makeTransactionBuiltin = Effect.sync(() =>\n makeTransactionOps<S>({\n moduleId,\n optionsModuleId: options.moduleId,\n instanceId,\n stateRef,\n commitHub,\n shouldPublishCommitHub: () => commitHubSubscriberCount > 0,\n recordStatePatch,\n onCommit: ({ state, meta, dirtySet, diagnosticsLevel }) =>\n selectorGraph.onCommit(state, meta, dirtySet, diagnosticsLevel),\n enqueueTransaction,\n runOperation,\n txnContext,\n traitConvergeTimeSlicing: traitConvergeTimeSlicingState,\n traitRuntime: {\n getProgram: () => traitState.program,\n getConvergePlanCache: () => traitState.convergePlanCache,\n getConvergeGeneration: () => traitState.convergeGeneration,\n getPendingCacheMissReason: () => traitState.pendingCacheMissReason,\n setPendingCacheMissReason: (next) => {\n traitState.pendingCacheMissReason = next\n },\n rowIdStore,\n getListConfigs: () => traitState.listConfigs,\n },\n resolveTraitConvergeConfig,\n isDevEnv,\n maxTxnHistory,\n txnHistory,\n txnById,\n }),\n )\n\n const transactionSel = RuntimeKernel.selectRuntimeService(\n 'transaction',\n resolveRuntimeServiceImpls('transaction', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeTransactionBuiltin,\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const { readState, setStateInternal, runWithStateTransaction } = yield* transactionSel.impl.make.pipe(\n Effect.provideService(RuntimeServiceBuiltins.RuntimeServiceBuiltinsTag, {\n getBuiltinMake: (serviceId) =>\n serviceId === 'transaction'\n ? (makeTransactionBuiltin as Effect.Effect<unknown, never, any>)\n : Effect.dieMessage(`[Logix] builtin make not available: ${serviceId}`),\n } satisfies RuntimeServiceBuiltins.RuntimeServiceBuiltins),\n )\n\n let deferredFlushCoalescedCount = 0\n let deferredFlushCanceledCount = 0\n\n const runDeferredConvergeFlush = (args: {\n readonly dirtyPathsSnapshot: ReadonlyArray<StateTransaction.StatePatchPath>\n readonly dirtyAllReason?: DirtyAllReason\n readonly lane: 'urgent' | 'nonUrgent'\n readonly slice?: { readonly start: number; readonly end: number; readonly total: number }\n readonly captureOpSeq?: boolean\n }): Effect.Effect<{ readonly txnSeq: number; readonly txnId?: string; readonly opSeq?: number }> => {\n let capturedTxnSeq = 0\n let capturedTxnId: string | undefined = undefined\n let capturedOpSeq: number | undefined = undefined\n\n const details: any = { dirtyPathCount: args.dirtyPathsSnapshot.length }\n if (args.dirtyAllReason) {\n details.dirtyAllReason = args.dirtyAllReason\n }\n if (args.slice) {\n details.sliceStart = args.slice.start\n details.sliceEnd = args.slice.end\n details.sliceTotal = args.slice.total\n }\n\n return enqueueTransaction(\n args.lane,\n runOperation(\n 'lifecycle',\n 'trait:deferredConvergeFlush',\n {\n payload: { dirtyPathCount: args.dirtyPathsSnapshot.length },\n meta: { moduleId, instanceId },\n },\n runWithStateTransaction(\n {\n kind: 'trait:deferred_flush',\n name: 'trait:deferredConvergeFlush',\n details,\n },\n () =>\n Effect.gen(function* () {\n const current: any = txnContext.current\n if (current) {\n capturedTxnSeq = current.txnSeq\n capturedTxnId = current.txnId\n }\n\n if (args.captureOpSeq) {\n const opSeqRaw = yield* FiberRef.get(Debug.currentOpSeq)\n if (typeof opSeqRaw === 'number' && Number.isFinite(opSeqRaw) && opSeqRaw >= 0) {\n capturedOpSeq = Math.floor(opSeqRaw)\n }\n }\n\n if (!current) return\n if (args.dirtyAllReason) {\n current.dirtyAllReason = args.dirtyAllReason\n }\n for (const p of args.dirtyPathsSnapshot) {\n if (typeof p === 'number' && Number.isFinite(p) && p >= 0) {\n current.dirtyPathIds.add(Math.floor(p))\n }\n }\n }),\n ),\n ),\n ).pipe(\n Effect.as({\n txnSeq: capturedTxnSeq,\n txnId: capturedTxnId,\n opSeq: capturedOpSeq,\n } as const),\n )\n }\n\n // 043: time-slicing scheduler for deferred converge (debounce + maxLag); triggered by in-txn signals and enqueued outside the txn.\n yield* Effect.forkScoped(\n Effect.forever(\n Effect.gen(function* () {\n yield* Queue.take(traitConvergeTimeSlicingState.signal)\n\n while (true) {\n const config = traitConvergeTimeSlicingState.latestConvergeConfig?.traitConvergeTimeSlicing\n if (!config?.enabled) {\n traitConvergeTimeSlicingState.backlogDirtyPaths.clear()\n traitConvergeTimeSlicingState.backlogDirtyAllReason = undefined\n traitConvergeTimeSlicingState.firstPendingAtMs = undefined\n traitConvergeTimeSlicingState.lastTouchedAtMs = undefined\n return\n }\n\n const now = Date.now()\n const firstPendingAtMs = traitConvergeTimeSlicingState.firstPendingAtMs ?? now\n traitConvergeTimeSlicingState.firstPendingAtMs = firstPendingAtMs\n\n const captured = traitConvergeTimeSlicingState.capturedContext\n const txnLanePolicy = yield* captured?.overrides\n ? Effect.provideService(resolveTxnLanePolicy(), StateTransactionOverridesTag, captured.overrides)\n : resolveTxnLanePolicy()\n\n const debounceMs = txnLanePolicy.enabled ? txnLanePolicy.debounceMs : config.debounceMs\n const maxLagMs = txnLanePolicy.enabled ? txnLanePolicy.maxLagMs : config.maxLagMs\n\n const elapsedMs = Math.max(0, now - firstPendingAtMs)\n const remainingLagMs = Math.max(0, maxLagMs - elapsedMs)\n if (remainingLagMs <= 0) {\n break\n }\n\n const sleepMs = Math.max(0, Math.min(debounceMs, remainingLagMs))\n if (sleepMs > 0) {\n yield* Effect.sleep(Duration.millis(sleepMs))\n } else {\n yield* Effect.yieldNow()\n }\n\n const drained = yield* Queue.takeAll(traitConvergeTimeSlicingState.signal)\n if (Chunk.isEmpty(drained)) {\n break\n }\n }\n\n const dirtyPathsSnapshot = Array.from(traitConvergeTimeSlicingState.backlogDirtyPaths)\n traitConvergeTimeSlicingState.backlogDirtyPaths.clear()\n const dirtyAllReasonSnapshot = traitConvergeTimeSlicingState.backlogDirtyAllReason\n traitConvergeTimeSlicingState.backlogDirtyAllReason = undefined\n const firstPendingAtMsForRun = traitConvergeTimeSlicingState.firstPendingAtMs\n traitConvergeTimeSlicingState.firstPendingAtMs = undefined\n traitConvergeTimeSlicingState.lastTouchedAtMs = undefined\n\n if (dirtyPathsSnapshot.length === 0 && !dirtyAllReasonSnapshot) {\n return\n }\n\n const program = traitState.program\n if (!program?.convergeExecIr || program.convergeExecIr.topoOrderDeferredInt32.length === 0) {\n return\n }\n\n const captured = traitConvergeTimeSlicingState.capturedContext\n const txnLanePolicy = yield* captured?.overrides\n ? Effect.provideService(resolveTxnLanePolicy(), StateTransactionOverridesTag, captured.overrides)\n : resolveTxnLanePolicy()\n\n const shouldEmitLaneEvidence = captured != null && captured.diagnosticsLevel !== 'off'\n const shouldEmitLaneEvidenceForPolicy =\n shouldEmitLaneEvidence && (txnLanePolicy.enabled || txnLanePolicy.overrideMode != null)\n\n const withCapturedContext = <A2, E2, R2>(eff: Effect.Effect<A2, E2, R2>): Effect.Effect<A2, E2, R2> => {\n let next = eff\n if (captured?.overrides) {\n next = Effect.provideService(next, StateTransactionOverridesTag, captured.overrides)\n }\n if (captured) {\n next = next.pipe(\n Effect.locally(Debug.currentRuntimeLabel, captured.runtimeLabel),\n Effect.locally(Debug.currentDiagnosticsLevel, captured.diagnosticsLevel),\n Effect.locally(Debug.currentDebugSinks, captured.debugSinks),\n )\n }\n return next\n }\n\n const firstPendingAtMs = firstPendingAtMsForRun ?? Date.now()\n\n if (!txnLanePolicy.enabled) {\n const anchor = yield* withCapturedContext(\n runDeferredConvergeFlush({\n dirtyPathsSnapshot,\n dirtyAllReason: dirtyAllReasonSnapshot,\n lane: 'urgent',\n captureOpSeq: shouldEmitLaneEvidenceForPolicy,\n }),\n )\n\n if (shouldEmitLaneEvidenceForPolicy) {\n const reasons: ReadonlyArray<Debug.TxnLaneEvidenceReason> =\n txnLanePolicy.overrideMode === 'forced_off'\n ? ['forced_off']\n : txnLanePolicy.overrideMode === 'forced_sync'\n ? ['forced_sync']\n : ['disabled']\n\n const evidence: Debug.TxnLaneEvidence = {\n anchor: {\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n ...(typeof anchor.opSeq === 'number' ? { opSeq: anchor.opSeq } : {}),\n },\n lane: 'urgent',\n kind: 'trait:deferred_flush',\n policy: txnLanePolicy,\n backlog: {\n pendingCount: 0,\n ageMs: Math.max(0, Date.now() - firstPendingAtMs),\n coalescedCount: deferredFlushCoalescedCount,\n canceledCount: deferredFlushCanceledCount,\n },\n starvation: { triggered: false },\n reasons,\n }\n\n yield* withCapturedContext(\n Debug.record({\n type: 'trace:txn-lane',\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n txnId: anchor.txnId,\n data: { evidence },\n }),\n )\n }\n\n return\n }\n\n const totalSteps = program.convergeExecIr.topoOrderDeferredInt32.length\n\n let cursor = 0\n let chunkSize = Math.min(32, totalSteps)\n let yieldCount = 0\n let lastYieldAtMs = Date.now()\n\n const readIsInputPending = (): boolean => {\n const nav = (globalThis as any).navigator\n const scheduling = nav?.scheduling\n const isInputPending = scheduling?.isInputPending\n if (typeof isInputPending !== 'function') return false\n try {\n return Boolean(isInputPending.call(scheduling))\n } catch {\n return false\n }\n }\n\n while (cursor < totalSteps) {\n const lagMs = Math.max(0, Date.now() - firstPendingAtMs)\n const lagExceeded = lagMs >= txnLanePolicy.maxLagMs\n const budgetMs = lagExceeded\n ? Math.max(txnLanePolicy.budgetMs, txnLanePolicy.budgetMs * 4)\n : txnLanePolicy.budgetMs\n\n const sliceStart = cursor\n const sliceEnd = Math.min(totalSteps, cursor + chunkSize)\n\n const { sliceDurationMs, anchor } = yield* withCapturedContext(\n Effect.gen(function* () {\n const sliceStartedAtMs = Date.now()\n const anchor = yield* runDeferredConvergeFlush({\n dirtyPathsSnapshot,\n dirtyAllReason: dirtyAllReasonSnapshot,\n lane: 'nonUrgent',\n slice: { start: sliceStart, end: sliceEnd, total: totalSteps },\n captureOpSeq: shouldEmitLaneEvidence,\n })\n const sliceDurationMs = Math.max(0, Date.now() - sliceStartedAtMs)\n return { sliceDurationMs, anchor } as const\n }),\n )\n\n cursor = sliceEnd\n\n // Keep the signal queue bounded during long backlog processing.\n yield* Queue.takeAll(traitConvergeTimeSlicingState.signal)\n\n const hasPending =\n traitConvergeTimeSlicingState.backlogDirtyPaths.size > 0 ||\n traitConvergeTimeSlicingState.backlogDirtyAllReason != null\n const willCoalesce = txnLanePolicy.allowCoalesce && !lagExceeded && hasPending\n\n const elapsedSinceLastYieldMs = Math.max(0, Date.now() - lastYieldAtMs)\n const budgetExceeded = budgetMs > 0 && Number.isFinite(budgetMs) && elapsedSinceLastYieldMs >= budgetMs\n const forcedFrameYield = elapsedSinceLastYieldMs >= 16\n const inputPending =\n !willCoalesce && txnLanePolicy.yieldStrategy === 'inputPending' ? readIsInputPending() : false\n\n const shouldYield =\n cursor < totalSteps && !willCoalesce && (inputPending || budgetExceeded || forcedFrameYield)\n\n const yieldReason: Debug.TxnLaneNonUrgentYieldReason = !shouldYield\n ? 'none'\n : inputPending\n ? 'input_pending'\n : budgetExceeded\n ? 'budget_exceeded'\n : 'forced_frame_yield'\n\n if (shouldEmitLaneEvidence) {\n yield* withCapturedContext(\n Effect.gen(function* () {\n const reasons: Array<Debug.TxnLaneEvidenceReason> = ['queued_non_urgent']\n if (lagExceeded) reasons.push('max_lag_forced', 'starvation_protection')\n if (yieldReason === 'budget_exceeded') reasons.push('budget_yield')\n\n const evidence: Debug.TxnLaneEvidence = {\n anchor: {\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n ...(typeof anchor.opSeq === 'number' ? { opSeq: anchor.opSeq } : {}),\n },\n lane: 'nonUrgent',\n kind: 'trait:deferred_flush',\n policy: txnLanePolicy,\n backlog: {\n pendingCount: Math.max(0, totalSteps - sliceEnd),\n ageMs: lagMs,\n coalescedCount: deferredFlushCoalescedCount,\n canceledCount: deferredFlushCanceledCount,\n },\n budget: {\n budgetMs,\n sliceDurationMs,\n yieldCount,\n yielded: shouldYield,\n yieldReason,\n },\n starvation: lagExceeded ? { triggered: true, reason: 'max_lag_exceeded' } : { triggered: false },\n reasons,\n }\n\n yield* Debug.record({\n type: 'trace:txn-lane',\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n txnId: anchor.txnId,\n data: { evidence },\n })\n }),\n )\n }\n\n if (willCoalesce) {\n // Ensure the scheduler wakes again for the new backlog after we cancel.\n deferredFlushCoalescedCount += 1\n deferredFlushCanceledCount += 1\n if (shouldEmitLaneEvidence) {\n yield* withCapturedContext(\n Debug.record({\n type: 'trace:txn-lane',\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n txnId: anchor.txnId,\n data: {\n evidence: {\n anchor: {\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n ...(typeof anchor.opSeq === 'number' ? { opSeq: anchor.opSeq } : {}),\n },\n lane: 'nonUrgent',\n kind: 'trait:deferred_flush',\n policy: txnLanePolicy,\n backlog: {\n pendingCount: Math.max(0, totalSteps - cursor),\n ageMs: lagMs,\n coalescedCount: deferredFlushCoalescedCount,\n canceledCount: deferredFlushCanceledCount,\n },\n budget: {\n budgetMs,\n sliceDurationMs,\n yieldCount,\n yielded: false,\n yieldReason: 'none',\n },\n starvation: { triggered: false },\n reasons: ['coalesced', 'canceled'],\n } satisfies Debug.TxnLaneEvidence,\n },\n }),\n )\n }\n yield* Queue.offer(traitConvergeTimeSlicingState.signal, undefined)\n break\n }\n\n if (budgetMs > 0 && Number.isFinite(budgetMs)) {\n if (sliceDurationMs > budgetMs && chunkSize > 1) {\n chunkSize = Math.max(1, Math.floor(chunkSize / 2))\n } else if (sliceDurationMs < budgetMs / 2) {\n chunkSize = Math.min(totalSteps, chunkSize * 2)\n }\n }\n\n if (shouldYield) {\n yieldCount += 1\n lastYieldAtMs = Date.now()\n yield* Effect.yieldNow()\n }\n }\n\n // If new backlog arrived while processing, ensure we don't lose wakeup after draining signals.\n if (\n traitConvergeTimeSlicingState.backlogDirtyPaths.size > 0 ||\n traitConvergeTimeSlicingState.backlogDirtyAllReason != null\n ) {\n yield* Queue.offer(traitConvergeTimeSlicingState.signal, undefined)\n }\n }),\n ),\n )\n\n const declaredActionTags = (() => {\n const actionMap = (options.tag as any)?.shape?.actionMap\n if (!actionMap || typeof actionMap !== 'object') {\n return undefined\n }\n return new Set(Object.keys(actionMap))\n })()\n\n const makeDispatchBuiltin = Effect.sync(() =>\n makeDispatchOps<S, A>({\n optionsModuleId: options.moduleId,\n instanceId,\n declaredActionTags,\n initialReducers: options.reducers as any,\n txnContext,\n readState,\n setStateInternal,\n recordStatePatch,\n actionHub,\n actionCommitHub,\n diagnostics: concurrencyDiagnostics,\n enqueueTransaction,\n resolveConcurrencyPolicy,\n runOperation,\n runWithStateTransaction,\n isDevEnv,\n }),\n )\n\n const dispatchSel = RuntimeKernel.selectRuntimeService(\n 'dispatch',\n resolveRuntimeServiceImpls('dispatch', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeDispatchBuiltin,\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const dispatchOps = yield* dispatchSel.impl.make.pipe(\n Effect.provideService(RuntimeServiceBuiltins.RuntimeServiceBuiltinsTag, {\n getBuiltinMake: (serviceId) =>\n serviceId === 'dispatch'\n ? (makeDispatchBuiltin as Effect.Effect<unknown, never, any>)\n : Effect.dieMessage(`[Logix] builtin make not available: ${serviceId}`),\n } satisfies RuntimeServiceBuiltins.RuntimeServiceBuiltins),\n )\n\n const runtimeServicesEvidence = RuntimeKernel.makeRuntimeServicesEvidence({\n moduleId: options.moduleId,\n instanceId,\n bindings: [enqueueTxnSel.binding, runOperationSel.binding, transactionSel.binding, dispatchSel.binding],\n overridesApplied: [\n ...enqueueTxnSel.overridesApplied,\n ...runOperationSel.overridesApplied,\n ...transactionSel.overridesApplied,\n ...dispatchSel.overridesApplied,\n ],\n })\n\n if (kernelImplementationRef.kernelId !== 'core') {\n const modeOpt = yield* Effect.serviceOption(RuntimeKernel.FullCutoverGateModeTag)\n const mode = Option.isSome(modeOpt) ? modeOpt.value : 'trial'\n\n if (mode === 'fullCutover') {\n const gate = FullCutoverGate.evaluateFullCutoverGate({\n mode: 'fullCutover',\n requestedKernelId: kernelImplementationRef.kernelId,\n runtimeServicesEvidence,\n diagnosticsLevel: isDevEnv() ? 'light' : 'off',\n })\n\n if (gate.verdict === 'FAIL') {\n const msg = isDevEnv()\n ? [\n '[FullCutoverGateFailed] Runtime assembly detected implicit fallback / missing bindings under fullCutover mode.',\n `requestedKernelId: ${kernelImplementationRef.kernelId}`,\n `missingServiceIds: ${gate.missingServiceIds.join(',')}`,\n `fallbackServiceIds: ${gate.fallbackServiceIds.join(',')}`,\n `anchor: moduleId=${gate.anchor.moduleId}, instanceId=${gate.anchor.instanceId}, txnSeq=${gate.anchor.txnSeq}`,\n ].join('\\n')\n : 'Full cutover gate failed'\n\n const err: any = new Error(msg)\n err.name = 'FullCutoverGateFailed'\n err.gate = gate\n err.instanceId = instanceId\n err.moduleId = options.moduleId\n throw err\n }\n }\n }\n\n const runtime: PublicModuleRuntime<S, A> = {\n // Expose moduleId on the runtime so React / Devtools can correlate module information at the view layer.\n moduleId,\n instanceId,\n lifecycleStatus: lifecycle.getStatus,\n getState: readState,\n setState: (next) => setStateInternal(next, '*', 'unknown', undefined, next),\n dispatch: (action) =>\n // Enqueue the txn request to guarantee FIFO serialization within a single instance.\n dispatchOps.dispatch(action),\n dispatchBatch: (actions) => dispatchOps.dispatchBatch(actions),\n dispatchLowPriority: (action) => dispatchOps.dispatchLowPriority(action),\n actions$: Stream.fromPubSub(actionHub),\n actionsWithMeta$: Stream.fromPubSub(actionCommitHub),\n changes: <V>(selector: (s: S) => V) => Stream.map(stateRef.changes, selector).pipe(Stream.changes),\n changesWithMeta: <V>(selector: (s: S) => V) =>\n Stream.map(fromCommitHub, ({ value, meta }) => ({\n value: selector(value),\n meta,\n })),\n changesReadQueryWithMeta: <V>(input: ReadQuery.ReadQueryInput<S, V>) => {\n const compiled: ReadQuery.ReadQueryCompiled<S, V> =\n (input as any)?.staticIr != null &&\n typeof (input as any)?.lane === 'string' &&\n typeof (input as any)?.producer === 'string'\n ? (input as any)\n : ReadQuery.compile(input)\n\n if (compiled.lane !== 'static') {\n return Stream.unwrapScoped(\n Effect.gen(function* () {\n const strictGateOpt = yield* Effect.serviceOption(ReadQueryStrictGateConfigTag)\n\n if (Option.isSome(strictGateOpt)) {\n const decision = ReadQuery.evaluateStrictGate({\n config: strictGateOpt.value,\n moduleId,\n instanceId,\n txnSeq: 0,\n compiled,\n })\n\n if (decision.verdict === 'WARN') {\n yield* Debug.record(decision.diagnostic)\n } else if (decision.verdict === 'FAIL') {\n yield* Debug.record(decision.diagnostic)\n yield* Effect.die(decision.error)\n }\n }\n\n return Stream.map(fromCommitHub, ({ value, meta }) => ({\n value: compiled.select(value),\n meta,\n }))\n }),\n )\n }\n\n return Stream.unwrapScoped(\n Effect.gen(function* () {\n const entry = yield* selectorGraph.ensureEntry(compiled)\n entry.subscriberCount += 1\n\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n selectorGraph.releaseEntry(compiled.selectorId)\n }),\n )\n\n if (!entry.hasValue) {\n const current = yield* readState\n try {\n entry.cachedValue = compiled.select(current) as any\n entry.hasValue = true\n entry.cachedAtTxnSeq = 0\n } catch {\n // keep entry empty; commit-time eval will emit diagnostic in diagnostics mode (if enabled)\n }\n }\n\n return Stream.fromPubSub(entry.hub) as Stream.Stream<StateChangeWithMeta<V>>\n }),\n )\n },\n ref: <V = S>(selector?: (s: S) => V): SubscriptionRef.SubscriptionRef<V> => {\n if (!selector) {\n return stateRef as unknown as SubscriptionRef.SubscriptionRef<V>\n }\n\n // Read-only derived view: derive from the root state via selector and forbid writes.\n const readonlyRef = {\n get: Effect.map(SubscriptionRef.get(stateRef), selector),\n modify: () => Effect.dieMessage('Cannot write to a derived ref'),\n } as unknown as Ref.Ref<V>\n\n const derived = {\n // SubscriptionRef internals access self.ref / self.pubsub / self.semaphore.\n ref: readonlyRef,\n pubsub: {\n publish: () => Effect.succeed(true),\n },\n semaphore: {\n withPermits:\n () =>\n <A, E, R>(self: Effect.Effect<A, E, R>): Effect.Effect<A, E, R> =>\n self,\n },\n get: readonlyRef.get,\n modify: readonlyRef.modify,\n // Derived stream: selector-map stateRef.changes and de-duplicate.\n changes: Stream.map(stateRef.changes, selector).pipe(Stream.changes) as Stream.Stream<V>,\n } as unknown as SubscriptionRef.SubscriptionRef<V>\n\n return derived\n },\n }\n\n KernelRef.setKernelImplementationRef(runtime, kernelImplementationRef)\n RuntimeKernel.setRuntimeServicesEvidence(runtime, runtimeServicesEvidence)\n\n // Optional: when RunSession/EvidenceCollector is in scope, write runtime services evidence into the collector.\n // By default (non-trial-run), Env does not contain EvidenceCollectorTag, so this adds no overhead.\n const collectorOpt = yield* Effect.serviceOption(EvidenceCollectorTag)\n if (Option.isSome(collectorOpt)) {\n collectorOpt.value.setKernelImplementationRef(kernelImplementationRef)\n const level = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n if (level !== 'off') {\n collectorOpt.value.setRuntimeServicesEvidence(runtimeServicesEvidence)\n }\n }\n\n const convergeStaticIrCollectors = yield* FiberRef.get(currentConvergeStaticIrCollectors)\n const registerConvergeStaticIr = (staticIr: unknown): void => {\n if (convergeStaticIrCollectors.length === 0) return\n for (const collector of convergeStaticIrCollectors) {\n collector.register(staticIr as any)\n }\n }\n\n const sourceRefreshRegistry = new Map<string, (state: unknown) => Effect.Effect<void, never, any>>()\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n sourceRefreshRegistry.clear()\n }),\n )\n\n // Build a minimal imports-scope injector:\n // - Only store ModuleToken -> ModuleRuntime mappings.\n // - Never capture the whole Context into ModuleRuntime (avoid leaking root/base services by accident).\n const importsMap = new Map<Context.Tag<any, PublicModuleRuntime<any, any>>, PublicModuleRuntime<any, any>>()\n\n for (const imported of options.imports ?? []) {\n const maybe = yield* Effect.serviceOption(imported)\n if (Option.isSome(maybe)) {\n importsMap.set(imported, maybe.value)\n }\n }\n\n const importsScope: RuntimeInternals['imports'] = {\n kind: 'imports-scope',\n get: (module) => importsMap.get(module),\n }\n\n const instanceKey = options.moduleId != null ? `${options.moduleId}::${instanceId}` : undefined\n\n if (instanceKey) {\n registerRuntimeByInstanceKey(instanceKey, runtime as PublicModuleRuntime<any, any>)\n }\n\n const registerStateTraitProgram = (\n program: any,\n registerOptions?: { readonly bumpReason?: any; readonly exportStaticIr?: boolean },\n ): void => {\n const nextIr = (program as any).convergeIr\n const nextKeys = nextIr\n ? {\n writersKey: nextIr.writersKey,\n depsKey: nextIr.depsKey,\n }\n : undefined\n\n const requestedBumpReason = registerOptions?.bumpReason\n let bumpReason: any\n\n if (traitState.lastConvergeIrKeys && nextKeys) {\n if (requestedBumpReason) {\n bumpReason = requestedBumpReason\n } else if (traitState.lastConvergeIrKeys.writersKey !== nextKeys.writersKey) {\n bumpReason = 'writers_changed'\n } else if (traitState.lastConvergeIrKeys.depsKey !== nextKeys.depsKey) {\n bumpReason = 'deps_changed'\n }\n } else if (traitState.lastConvergeIrKeys && !nextKeys) {\n bumpReason = requestedBumpReason ?? 'unknown'\n }\n\n if (bumpReason) {\n const nextGeneration = traitState.convergeGeneration.generation + 1\n const nextBumpCount = (traitState.convergeGeneration.generationBumpCount ?? 0) + 1\n traitState.convergeGeneration = {\n generation: nextGeneration,\n generationBumpCount: nextBumpCount,\n lastBumpReason: bumpReason,\n }\n\n traitState.pendingCacheMissReason = 'generation_bumped'\n traitState.convergePlanCache = new StateTraitConverge.ConvergePlanCache(convergePlanCacheCapacity)\n }\n\n traitState.lastConvergeIrKeys = nextKeys\n\n const convergeIr = nextIr\n ? {\n ...nextIr,\n generation: traitState.convergeGeneration.generation,\n }\n : undefined\n\n const convergeExecIr =\n convergeIr && !(convergeIr as any).configError ? makeConvergeExecIr(convergeIr as any) : undefined\n\n traitState.program = {\n ...(program as any),\n convergeIr,\n convergeExecIr,\n }\n traitState.listConfigs = RowId.collectListConfigs((program as any).spec)\n\n if (!traitState.convergePlanCache) {\n traitState.convergePlanCache = new StateTraitConverge.ConvergePlanCache(convergePlanCacheCapacity)\n }\n\n const exportStaticIrEnabled = registerOptions?.exportStaticIr !== false\n\n if (exportStaticIrEnabled && convergeIr && !(convergeIr as any).configError) {\n if (convergeStaticIrCollectors.length > 0) {\n registerConvergeStaticIr(\n exportConvergeStaticIr({\n ir: convergeIr,\n moduleId: options.moduleId ?? 'unknown',\n instanceId,\n }),\n )\n }\n }\n }\n\n // 065: even if the module declares no traits, it must still have a schema-backed Static IR table (FieldPathIdRegistry),\n // otherwise reducer patchPaths / ReadQuery(static lane) cannot be mapped and will degrade to dirtyAll.\n if (!traitState.program) {\n const stateSchema = (options.tag as any)?.stateSchema as unknown\n if (stateSchema) {\n try {\n registerStateTraitProgram(StateTraitBuild.build(stateSchema as any, {} as any), { exportStaticIr: false })\n } catch {\n // best-effort: keep trait program undefined and fall back to dirtyAll scheduling when registry is missing.\n }\n }\n }\n\n const enqueueStateTraitValidateRequest = (request: StateTraitValidate.ScopedValidateRequest): void => {\n if (!txnContext.current) return\n const current: any = txnContext.current\n const list: Array<StateTraitValidate.ScopedValidateRequest> = current.stateTraitValidateRequests ?? []\n list.push(request)\n current.stateTraitValidateRequests = list\n }\n\n const recordReplayEvent = (event: unknown): void => {\n if (!txnContext.current) return\n const current: any = txnContext.current\n current.lastReplayEvent = {\n ...(event as any),\n txnId: current.txnId,\n trigger: current.origin,\n }\n }\n\n const runWithStateTransactionInternal = (\n origin: StateTransaction.StateTxnOrigin,\n body: () => Effect.Effect<void>,\n ): Effect.Effect<void> =>\n enqueueTransaction(\n runOperation(\n origin.kind as any as EffectOp.EffectOp['kind'],\n origin.name ? `txn:${origin.name}` : 'txn',\n { meta: { moduleId: options.moduleId, instanceId } },\n runWithStateTransaction(origin, body),\n ),\n )\n\n const applyTransactionSnapshot = (txnId: string, mode: 'before' | 'after'): Effect.Effect<void> =>\n enqueueTransaction(\n Effect.gen(function* () {\n // Time travel is disabled by default in production to avoid misuse.\n // Devtools should use this only in dev/test with instrumentation = \"full\".\n if (!isDevEnv()) {\n return\n }\n\n const txn = txnById.get(txnId)\n if (!txn) {\n return\n }\n\n const targetState = mode === 'before' ? txn.initialStateSnapshot : txn.finalStateSnapshot\n\n if (targetState === undefined) {\n // Time travel is not possible when snapshots are not collected.\n return\n }\n\n // Record a replay operation as a StateTransaction with origin.kind = \"devtools\"\n // so Devtools txn views can show a complete time-travel trace.\n yield* runWithStateTransaction(\n {\n kind: 'devtools',\n name: 'time-travel',\n details: {\n baseTxnId: txnId,\n mode,\n },\n },\n () =>\n Effect.sync(() => {\n StateTransaction.updateDraft(txnContext, targetState as S)\n StateTransaction.recordPatch(txnContext, '*', 'devtools')\n }),\n )\n }),\n )\n\n const stateSchema = (options.tag as any)?.stateSchema\n\n const effectsRegistry = makeEffectsRegistry({\n moduleId: options.moduleId,\n instanceId,\n actions$: runtime.actions$ as Stream.Stream<unknown>,\n })\n\n const runtimeInternals: RuntimeInternals = {\n moduleId: options.moduleId,\n instanceId,\n stateSchema,\n lifecycle: {\n registerInitRequired: (eff, options) => {\n lifecycle.registerInitRequired(eff, options)\n },\n registerStart: (eff, options) => {\n lifecycle.registerStart(eff, options)\n },\n registerDestroy: (eff, options) => {\n lifecycle.registerDestroy(eff, options)\n },\n registerOnError: (handler) => {\n lifecycle.registerOnError(handler)\n },\n registerPlatformSuspend: (eff, options) => {\n lifecycle.registerPlatformSuspend(eff, options)\n },\n registerPlatformResume: (eff, options) => {\n lifecycle.registerPlatformResume(eff, options)\n },\n registerPlatformReset: (eff, options) => {\n lifecycle.registerPlatformReset(eff, options)\n },\n },\n imports: importsScope,\n txn: {\n instrumentation,\n registerReducer: dispatchOps.registerReducer as any,\n runWithStateTransaction: runWithStateTransactionInternal as any,\n updateDraft,\n recordStatePatch,\n recordReplayEvent,\n applyTransactionSnapshot: applyTransactionSnapshot as any,\n },\n concurrency: {\n resolveConcurrencyPolicy,\n },\n txnLanes: {\n resolveTxnLanePolicy,\n },\n traits: {\n rowIdStore,\n getListConfigs: () => traitState.listConfigs as ReadonlyArray<unknown>,\n registerSourceRefresh: (fieldPath, handler) => {\n sourceRefreshRegistry.set(fieldPath, handler)\n },\n getSourceRefreshHandler: (fieldPath) => sourceRefreshRegistry.get(fieldPath),\n registerStateTraitProgram: registerStateTraitProgram as any,\n enqueueStateTraitValidateRequest: enqueueStateTraitValidateRequest as any,\n registerModuleTraitsContribution: (contribution) => {\n if (moduleTraitsState.frozen) {\n throw new Error('[ModuleTraitsFrozen] Cannot register traits contribution after finalize/freeze.')\n }\n moduleTraitsState.contributions.push(contribution)\n },\n freezeModuleTraits: () => {\n moduleTraitsState.frozen = true\n },\n getModuleTraitsContributions: () => moduleTraitsState.contributions,\n getModuleTraitsSnapshot: () => moduleTraitsState.snapshot,\n setModuleTraitsSnapshot: (snapshot) => {\n moduleTraitsState.snapshot = snapshot\n },\n },\n effects: {\n registerEffect: (args) => effectsRegistry.registerEffect(args as any),\n },\n devtools: {\n registerConvergeStaticIr: registerConvergeStaticIr as any,\n },\n }\n\n yield* installInternalHooks({ runtime, runtimeInternals })\n\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n importsMap.clear()\n }),\n )\n\n if (options.tag) {\n registerRuntime(options.tag as Context.Tag<any, PublicModuleRuntime<S, A>>, runtime)\n }\n\n yield* Effect.addFinalizer(() =>\n lifecycle.runDestroy.pipe(\n Effect.flatMap(() =>\n runOperation(\n 'lifecycle',\n 'module:destroy',\n { meta: { moduleId: options.moduleId, instanceId } },\n Debug.record({\n type: 'module:destroy',\n moduleId: options.moduleId,\n instanceId,\n }),\n ),\n ),\n Effect.tap(() =>\n Effect.sync(() => {\n if (options.tag) {\n unregisterRuntime(options.tag as Context.Tag<any, PublicModuleRuntime<any, any>>)\n }\n if (instanceKey) {\n unregisterRuntimeByInstanceKey(instanceKey)\n }\n }),\n ),\n ),\n )\n\n if (options.tag && options.logics?.length) {\n yield* runModuleLogics({\n tag: options.tag as Context.Tag<any, PublicModuleRuntime<S, A>>,\n logics: options.logics,\n runtime,\n lifecycle,\n moduleId,\n instanceId,\n })\n }\n\n if (options.processes && options.processes.length > 0) {\n const env = (yield* Effect.context<Scope.Scope | R>()) as Context.Context<any>\n const rootContextOpt = Context.getOption(env, RootContextTag as any)\n const isAppModule =\n Option.isSome(rootContextOpt) &&\n Array.isArray((rootContextOpt.value as RootContext).appModuleIds) &&\n (rootContextOpt.value as RootContext).appModuleIds!.includes(moduleId)\n\n if (!isAppModule) {\n const processRuntimeOpt = Context.getOption(env, ProcessRuntime.ProcessRuntimeTag as any)\n const processRuntime = Option.isSome(processRuntimeOpt)\n ? (processRuntimeOpt.value as ProcessRuntime.ProcessRuntime)\n : undefined\n const scope = {\n type: 'moduleInstance',\n moduleId,\n instanceId,\n } as const\n\n yield* Effect.forEach(\n options.processes,\n (process) =>\n Effect.gen(function* () {\n if (processRuntime) {\n const installEffect = processRuntime.install(process as any, {\n scope,\n enabled: true,\n installedAt: 'moduleRuntime',\n })\n\n // During the acquire phase of Layer.scoped(...), the current module runtime is not yet in Context,\n // but instance-scope processes (especially Link) may strictly require dependencies to be resolvable in scope.\n // We explicitly provide the current module runtime to avoid falsely treating itself as a missing dependency.\n const installation = options.tag\n ? yield* installEffect.pipe(\n Effect.provideService(options.tag as Context.Tag<any, any>, runtime as any),\n )\n : yield* installEffect\n\n if (installation !== undefined) {\n return\n }\n }\n\n // Legacy fallback: a raw Effect is still allowed as a process host, but it has no Process static surface/diagnostics.\n yield* Effect.forkScoped(process as any)\n }),\n { discard: true },\n )\n }\n }\n\n return runtime\n })\n\n return program as Effect.Effect<PublicModuleRuntime<S, A>, never, Scope.Scope | R>\n}\n","import { Cause, Context, Effect, Ref } from 'effect'\nimport { toSerializableErrorSummary } from './errorSummary.js'\nimport * as Debug from './DebugSink.js'\n\nexport type Phase = 'init' | 'run' | 'destroy' | 'platform'\n\nexport type Hook = 'initRequired' | 'start' | 'destroy' | 'suspend' | 'resume' | 'reset' | 'unknown'\n\nexport type TaskKind = 'initRequired' | 'start' | 'destroy' | 'platformSuspend' | 'platformResume' | 'platformReset'\n\nexport interface ErrorContext {\n readonly phase: Phase\n readonly hook: Hook\n readonly moduleId: string\n readonly instanceId: string\n readonly taskId?: string\n readonly txnSeq?: number\n readonly opSeq?: number\n /**\n * For diagnostics only: an implementation-side marker indicating where the error originated,\n * e.g. \"logic.fork\" / \"initRequired\" / \"start\".\n *\n * Note: this field must be serializable and must not become a protocol anchor.\n */\n readonly origin?: string\n}\n\nexport interface ModuleRuntimeIdentity {\n readonly moduleId: string\n readonly instanceId: string\n readonly runtimeLabel?: string\n}\n\nexport type InstanceStatus = 'creating' | 'initializing' | 'ready' | 'failed' | 'terminating' | 'terminated'\n\nexport interface InitProgress {\n readonly total: number\n readonly completed: number\n readonly current?: number\n readonly startedAt?: number\n}\n\nexport type LifecycleOutcome =\n | { readonly status: 'success' }\n | {\n readonly status: 'failure'\n readonly error: import('./errorSummary.js').SerializableErrorSummary\n }\n\nexport interface LifecycleStatus {\n readonly identity: ModuleRuntimeIdentity\n readonly status: InstanceStatus\n readonly initOutcome?: LifecycleOutcome\n readonly initProgress?: InitProgress\n}\n\nexport interface TaskRef {\n readonly taskId: string\n readonly kind: TaskKind\n readonly order: number\n readonly name?: string\n readonly fatalOnFailure?: boolean\n}\n\nexport interface LifecycleTask extends TaskRef {\n readonly effect: Effect.Effect<void, never, any>\n}\n\nexport interface Budgets {\n /** Per-instance lifecycle event budget (aligned with specs/011 data-model; default ≤ 20). */\n readonly maxEventsPerInstance: number\n /** Per-event size budget (aligned with specs/011 data-model; default ≤ 4KB). */\n readonly maxEventBytes: number\n}\n\nexport interface LifecycleManager {\n readonly identity: ModuleRuntimeIdentity\n readonly budgets: Budgets\n\n readonly registerPlatformSuspend: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string },\n ) => void\n readonly registerPlatformResume: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string },\n ) => void\n readonly registerPlatformReset: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string },\n ) => void\n\n readonly registerInitRequired: (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => void\n readonly registerStart: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string; readonly fatalOnFailure?: boolean },\n ) => void\n readonly registerDestroy: (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => void\n readonly registerOnError: (\n handler: (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>,\n ) => void\n\n readonly getStatus: Effect.Effect<LifecycleStatus>\n readonly setStatus: (\n status: InstanceStatus,\n patch?: {\n readonly initOutcome?: LifecycleOutcome | undefined\n readonly initProgress?: InitProgress | undefined\n readonly runtimeLabel?: string | undefined\n },\n ) => Effect.Effect<void>\n\n readonly notifyError: (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>\n\n readonly runPlatformSuspend: Effect.Effect<void, never, any>\n readonly runPlatformResume: Effect.Effect<void, never, any>\n readonly runPlatformReset: Effect.Effect<void, never, any>\n\n readonly runInitRequired: Effect.Effect<void, unknown, any>\n readonly runStart: Effect.Effect<void, never, any>\n readonly runDestroy: Effect.Effect<void, never, any>\n\n /** Diagnostics only: whether any onError handler has been registered. */\n readonly hasOnErrorHandlers: Effect.Effect<boolean>\n\n /** Tests/diagnostics only: read a snapshot of registered tasks (immutable view). */\n readonly getTaskSnapshot: Effect.Effect<\n Readonly<{\n readonly initRequired: ReadonlyArray<TaskRef>\n readonly start: ReadonlyArray<TaskRef>\n readonly destroy: ReadonlyArray<TaskRef>\n readonly platformSuspend: ReadonlyArray<TaskRef>\n readonly platformResume: ReadonlyArray<TaskRef>\n readonly platformReset: ReadonlyArray<TaskRef>\n }>,\n never,\n never\n >\n}\n\nexport const LifecycleContext = Context.GenericTag<LifecycleManager>('@logixjs/LifecycleManager')\n\nconst safeRun = (label: string, eff: Effect.Effect<void, any, any>) =>\n eff.pipe(\n Effect.matchCauseEffect({\n onSuccess: () => Effect.void,\n onFailure: (cause) => Effect.logError(`[${label}] failed: ${Cause.pretty(cause)}`),\n }),\n )\n\nconst makeTaskId = (kind: TaskKind, order: number): string => `${kind}:${order}`\n\nexport const makeLifecycleManager = (identity: ModuleRuntimeIdentity): Effect.Effect<LifecycleManager> =>\n Effect.gen(function* () {\n const budgets: Budgets = {\n maxEventsPerInstance: 20,\n maxEventBytes: 4 * 1024,\n }\n\n const statusRef = yield* Ref.make<LifecycleStatus>({\n identity,\n status: 'creating',\n })\n\n const initRequired: LifecycleTask[] = []\n const start: LifecycleTask[] = []\n const destroy: LifecycleTask[] = []\n const platformSuspend: LifecycleTask[] = []\n const platformResume: LifecycleTask[] = []\n const platformReset: LifecycleTask[] = []\n const onErrorHandlers: Array<\n (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>\n > = []\n\n const getStatus: Effect.Effect<LifecycleStatus> = Ref.get(statusRef)\n\n const recordPhase = (phase: Phase, name: string, payload?: unknown): Effect.Effect<void, never, any> =>\n Debug.record({\n type: 'lifecycle:phase',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n phase,\n name,\n payload,\n })\n\n const setStatus = (\n status: InstanceStatus,\n patch?: {\n readonly initOutcome?: LifecycleOutcome | undefined\n readonly initProgress?: InitProgress | undefined\n readonly runtimeLabel?: string | undefined\n },\n ) =>\n Ref.update(statusRef, (prev) => ({\n ...prev,\n identity: {\n ...prev.identity,\n ...(patch?.runtimeLabel ? { runtimeLabel: patch.runtimeLabel } : null),\n },\n status,\n ...(patch?.initOutcome !== undefined ? { initOutcome: patch.initOutcome } : null),\n ...(patch?.initProgress !== undefined ? { initProgress: patch.initProgress } : null),\n }))\n\n const registerInitRequired = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = initRequired.length\n initRequired.push({\n taskId: makeTaskId('initRequired', order),\n kind: 'initRequired',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerStart = (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string; readonly fatalOnFailure?: boolean },\n ) => {\n const order = start.length\n start.push({\n taskId: makeTaskId('start', order),\n kind: 'start',\n order,\n name: options?.name,\n fatalOnFailure: options?.fatalOnFailure,\n effect,\n })\n }\n\n const registerDestroy = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = destroy.length\n destroy.push({\n taskId: makeTaskId('destroy', order),\n kind: 'destroy',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerOnError = (\n handler: (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>,\n ) => {\n onErrorHandlers.push(handler)\n }\n\n const registerPlatformSuspend = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = platformSuspend.length\n platformSuspend.push({\n taskId: makeTaskId('platformSuspend', order),\n kind: 'platformSuspend',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerPlatformResume = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = platformResume.length\n platformResume.push({\n taskId: makeTaskId('platformResume', order),\n kind: 'platformResume',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerPlatformReset = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = platformReset.length\n platformReset.push({\n taskId: makeTaskId('platformReset', order),\n kind: 'platformReset',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const notifyError = (cause: Cause.Cause<unknown>, context: ErrorContext) => {\n // Interrupt/cancel should not be reported as an error.\n if (Cause.isInterrupted(cause)) {\n return Effect.void\n }\n\n return Debug.record({\n type: 'lifecycle:error',\n moduleId: context.moduleId,\n instanceId: context.instanceId,\n cause,\n phase: context.phase,\n hook: context.hook,\n taskId: context.taskId,\n txnSeq: context.txnSeq,\n opSeq: context.opSeq,\n origin: context.origin,\n }).pipe(\n Effect.zipRight(\n Effect.forEach(\n onErrorHandlers,\n (handler) =>\n handler(cause, context).pipe(\n Effect.catchAllCause((inner) => Effect.logError(`[lifecycle.onError] failed: ${Cause.pretty(inner)}`)),\n ),\n { discard: true },\n ),\n ),\n )\n }\n\n const runInitRequired: Effect.Effect<void, unknown, any> = Effect.gen(function* () {\n const total = initRequired.length\n if (total === 0) {\n yield* setStatus('ready', {\n initProgress: { total: 0, completed: 0 },\n initOutcome: { status: 'success' },\n })\n return\n }\n\n const startedAt = Date.now()\n yield* recordPhase('init', 'initRequired:start', { total })\n yield* setStatus('initializing', {\n initProgress: { total, completed: 0, current: 0, startedAt },\n })\n\n let completed = 0\n for (let i = 0; i < initRequired.length; i++) {\n yield* setStatus('initializing', {\n initProgress: { total, completed, current: i, startedAt },\n })\n\n const task = initRequired[i]\n const exit = yield* Effect.exit(task.effect)\n\n if (exit._tag === 'Success') {\n completed += 1\n yield* setStatus('initializing', {\n initProgress: { total, completed, current: i + 1, startedAt },\n })\n continue\n }\n\n const summary = toSerializableErrorSummary(exit.cause)\n yield* notifyError(exit.cause, {\n phase: 'init',\n hook: 'initRequired',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'initRequired',\n })\n\n yield* setStatus('failed', {\n initProgress: { total, completed, current: i, startedAt },\n initOutcome: { status: 'failure', error: summary.errorSummary },\n })\n\n return yield* Effect.failCause(exit.cause)\n }\n\n yield* recordPhase('init', 'initRequired:success', { total })\n yield* setStatus('ready', {\n initProgress: { total, completed, current: total, startedAt },\n initOutcome: { status: 'success' },\n })\n })\n\n const runStart: Effect.Effect<void, never, any> = recordPhase('run', 'start:schedule', {\n total: start.length,\n }).pipe(\n Effect.zipRight(\n Effect.forEach(\n start,\n (task) =>\n Effect.forkScoped(\n task.effect.pipe(\n Effect.catchAllCause((cause) =>\n notifyError(cause, {\n phase: 'run',\n hook: 'start',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'start',\n }),\n ),\n ),\n ).pipe(Effect.asVoid),\n { discard: true, concurrency: 'unbounded' },\n ),\n ),\n )\n\n const runDestroy: Effect.Effect<void, never, any> = Effect.gen(function* () {\n yield* recordPhase('destroy', 'destroy:start', { total: destroy.length })\n yield* setStatus('terminating')\n\n // destroy: run in reverse registration order (LIFO), best-effort (one failure does not block others).\n for (let i = destroy.length - 1; i >= 0; i--) {\n const task = destroy[i]\n yield* safeRun(\n 'lifecycle.onDestroy',\n task.effect.pipe(\n Effect.catchAllCause((cause) =>\n notifyError(cause, {\n phase: 'destroy',\n hook: 'destroy',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'destroy',\n }),\n ),\n ),\n )\n }\n\n yield* setStatus('terminated')\n yield* recordPhase('destroy', 'destroy:done', { total: destroy.length })\n })\n\n const runPlatformSuspend: Effect.Effect<void, never, any> = Effect.gen(function* () {\n if (platformSuspend.length === 0) return\n\n yield* recordPhase('platform', 'signal:suspend', { total: platformSuspend.length })\n for (const task of platformSuspend) {\n yield* safeRun(\n 'lifecycle.onSuspend',\n task.effect.pipe(\n Effect.catchAllCause((cause) =>\n notifyError(cause, {\n phase: 'platform',\n hook: 'suspend',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'platform.suspend',\n }),\n ),\n ),\n )\n }\n })\n\n const runPlatformResume: Effect.Effect<void, never, any> = Effect.gen(function* () {\n if (platformResume.length === 0) return\n\n yield* recordPhase('platform', 'signal:resume', { total: platformResume.length })\n for (const task of platformResume) {\n yield* safeRun(\n 'lifecycle.onResume',\n task.effect.pipe(\n Effect.catchAllCause((cause) =>\n notifyError(cause, {\n phase: 'platform',\n hook: 'resume',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'platform.resume',\n }),\n ),\n ),\n )\n }\n })\n\n const runPlatformReset: Effect.Effect<void, never, any> = Effect.gen(function* () {\n if (platformReset.length === 0) return\n\n yield* recordPhase('platform', 'signal:reset', { total: platformReset.length })\n for (const task of platformReset) {\n yield* safeRun(\n 'lifecycle.onReset',\n task.effect.pipe(\n Effect.catchAllCause((cause) =>\n notifyError(cause, {\n phase: 'platform',\n hook: 'reset',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'platform.reset',\n }),\n ),\n ),\n )\n }\n })\n\n const getTaskSnapshot: LifecycleManager['getTaskSnapshot'] = Effect.sync(() => ({\n initRequired: initRequired.map(({ effect: _eff, ...rest }) => rest),\n start: start.map(({ effect: _eff, ...rest }) => rest),\n destroy: destroy.map(({ effect: _eff, ...rest }) => rest),\n platformSuspend: platformSuspend.map(({ effect: _eff, ...rest }) => rest),\n platformResume: platformResume.map(({ effect: _eff, ...rest }) => rest),\n platformReset: platformReset.map(({ effect: _eff, ...rest }) => rest),\n }))\n\n const hasOnErrorHandlers: LifecycleManager['hasOnErrorHandlers'] = Effect.sync(() => onErrorHandlers.length > 0)\n\n return {\n identity,\n budgets,\n registerPlatformSuspend,\n registerPlatformResume,\n registerPlatformReset,\n registerInitRequired,\n registerStart,\n registerDestroy,\n registerOnError,\n getStatus,\n setStatus,\n notifyError,\n runPlatformSuspend,\n runPlatformResume,\n runPlatformReset,\n runInitRequired,\n runStart,\n runDestroy,\n hasOnErrorHandlers,\n getTaskSnapshot,\n }\n })\n","import { Cause } from 'effect'\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport interface SerializableErrorSummary {\n readonly message: string\n readonly name?: string\n readonly code?: string\n readonly hint?: string\n}\n\nexport interface ErrorSummaryResult {\n readonly errorSummary: SerializableErrorSummary\n readonly downgrade?: DowngradeReason\n}\n\nconst truncate = (value: string, maxLen: number): { readonly value: string; readonly truncated: boolean } => {\n if (value.length <= maxLen) return { value, truncated: false }\n return { value: value.slice(0, maxLen), truncated: true }\n}\n\nconst safeStringify = (value: unknown): { readonly ok: true; readonly json: string } | { readonly ok: false } => {\n try {\n return { ok: true, json: JSON.stringify(value) }\n } catch {\n return { ok: false }\n }\n}\n\nconst getMessageFromUnknown = (cause: unknown): string => {\n if (typeof cause === 'string') return cause\n if (typeof cause === 'number' || typeof cause === 'boolean' || typeof cause === 'bigint') return String(cause)\n if (cause instanceof Error) return cause.message || cause.name || 'Error'\n if (cause && typeof cause === 'object' && 'message' in (cause as any) && typeof (cause as any).message === 'string') {\n return (cause as any).message as string\n }\n\n // Try Effect Cause pretty (best-effort). This may include more details than needed,\n // so callers MUST still treat it as an untrusted/oversized string and truncate.\n try {\n const pretty = Cause.pretty(cause as Cause.Cause<unknown>, { renderErrorCause: true })\n if (typeof pretty === 'string' && pretty.length > 0) return pretty\n } catch {\n // ignore\n }\n\n return 'Unknown error'\n}\n\nexport const toSerializableErrorSummary = (\n cause: unknown,\n options?: {\n readonly maxMessageLength?: number\n },\n): ErrorSummaryResult => {\n const maxMessageLength = options?.maxMessageLength ?? 256\n\n const messageRaw = getMessageFromUnknown(cause)\n const { value: message, truncated } = truncate(messageRaw, maxMessageLength)\n\n const summary: { message: string; name?: string; code?: string; hint?: string } = {\n message,\n }\n\n if (cause instanceof Error) {\n if (cause.name && cause.name !== 'Error') summary.name = cause.name\n const anyCause = cause as any\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n else if (typeof anyCause.code === 'number' && Number.isFinite(anyCause.code)) summary.code = String(anyCause.code)\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n return {\n errorSummary: summary,\n downgrade: truncated ? 'oversized' : undefined,\n }\n }\n\n if (cause && typeof cause === 'object') {\n const anyCause = cause as any\n if (typeof anyCause.name === 'string' && anyCause.name.length > 0) summary.name = anyCause.name\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n }\n\n // If the original cause isn't JSON-serializable, mark it explicitly.\n const stringifyResult = safeStringify(cause)\n if (!stringifyResult.ok) {\n return {\n errorSummary: summary,\n downgrade: 'non_serializable',\n }\n }\n\n if (truncated) {\n return {\n errorSummary: summary,\n downgrade: 'oversized',\n }\n }\n\n if (message === 'Unknown error') {\n return {\n errorSummary: summary,\n downgrade: 'unknown',\n }\n }\n\n return { errorSummary: summary }\n}\n","import { Cause, Effect, FiberRef, Layer, Logger } from 'effect'\nimport {\n projectJsonValue,\n type DowngradeReason as JsonDowngradeReason,\n type JsonValue,\n type JsonValueProjectionStats,\n} from '../../observability/jsonValue.js'\nimport type * as ReplayLog from './ReplayLog.js'\nimport {\n toSerializableErrorSummary,\n type DowngradeReason as ErrorDowngradeReason,\n type SerializableErrorSummary,\n} from './errorSummary.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport type * as ProcessProtocol from './process/protocol.js'\nimport type { ConvergeStaticIrExport } from '../../state-trait/converge-ir.js'\n\nexport interface TriggerRef {\n readonly kind: string\n readonly name?: string\n readonly details?: unknown\n}\n\ntype TraceEventType = `trace:${string}`\ntype GenericTraceEventType = Exclude<\n TraceEventType,\n 'trace:trait:converge' | 'trace:trait:check' | 'trace:trait:validate'\n>\n\n/**\n * ReplayEventRef:\n * - Replay event structure referenced from Debug events.\n * - Based on ReplayLog.Event, enriched with txn/trigger association fields for Devtools aggregation and explanation.\n */\nexport type ReplayEventRef = ReplayLog.ReplayLogEvent & {\n readonly txnId?: string\n readonly trigger?: TriggerRef\n}\n\nexport type Event =\n | {\n readonly type: 'module:init'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'module:destroy'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:phase'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly phase: 'init' | 'run' | 'destroy' | 'platform'\n readonly name: string\n readonly payload?: unknown\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'action:dispatch'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly action: unknown\n readonly actionTag?: string\n readonly unknownAction?: boolean\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'state:update'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly state: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n /**\n * Optional: Static IR digest aligned with FieldPathId/StepId (for consumer-side reverse-mapping & alignment).\n * - When missing or mismatched, consumers must not attempt to reverse-map rootIds -> rootPaths (avoid wrong UI).\n * - Allowed to be omitted on near-zero-cost diagnostics=off paths.\n */\n readonly staticIrDigest?: string\n /**\n * Optional: the affected scope aggregated by this commit (field-level dirty-set).\n * - Populated by Runtime at commit time;\n * - Must stay slim and serializable;\n * - Devtools can use it to explain \"why converge/validate ran / why it degraded to full\".\n */\n readonly dirtySet?: unknown\n /**\n * Optional: patch count aggregated by this commit (from StateTransaction).\n * - Populated by Runtime only on transaction paths.\n * - Devtools can use it as a lightweight transaction summary metric.\n */\n readonly patchCount?: number\n /**\n * Optional: whether patch records were truncated (bounded) under full instrumentation.\n */\n readonly patchesTruncated?: boolean\n /**\n * Optional: truncation reason code (stable enum).\n */\n readonly patchesTruncatedReason?: 'max_patches'\n /**\n * Optional: commit mode (normal/batched/low-priority, etc).\n * - Populated by Runtime;\n * - Default is chosen by the caller (typically \"normal\").\n */\n readonly commitMode?: string\n /**\n * Optional: external visibility priority (normal/low).\n * - Populated by Runtime.\n * - Mainly used by React external subscription scheduling (avoid unnecessary renders).\n */\n readonly priority?: string\n /**\n * Optional: transaction origin kind (origin.kind) that triggered this state commit:\n * - e.g. \"action\" / \"source-refresh\" / \"service-callback\" / \"devtools\".\n * - Populated by Runtime only on StateTransaction-based paths.\n * - Devtools can distinguish app transactions vs devtools time-travel operations.\n */\n readonly originKind?: string\n /**\n * Optional: transaction origin name (origin.name) that triggered this state commit:\n * - e.g. action dispatch / fieldPath / task:success/task:failure, etc.\n * - Populated by Runtime only on StateTransaction-based paths.\n */\n readonly originName?: string\n /**\n * Reserved: Trait converge summary (for Devtools window-level stats / TopN costs / degrade reasons, etc.).\n * - Phase 2: field slot only; structure is not fixed.\n * - Later phases will align with the Trait/Replay event model into an explainable structure.\n */\n readonly traitSummary?: unknown\n /**\n * Reserved: replay event associated with this transaction (re-emit source of truth from ReplayLog).\n * - Phase 2: field slot only.\n * - Later phases will align with ReplayLog.Event structure.\n */\n readonly replayEvent?: ReplayEventRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly identity: ProcessProtocol.ProcessInstanceIdentity\n readonly severity: 'info' | 'warning' | 'error'\n readonly eventSeq: number\n readonly timestampMs: number\n readonly trigger?: ProcessProtocol.ProcessTrigger\n readonly dispatch?: {\n readonly moduleId: string\n readonly instanceId: string\n readonly actionId: string\n }\n readonly error?: ProcessProtocol.SerializableErrorSummary\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly cause: unknown\n readonly phase?: 'init' | 'run' | 'destroy' | 'platform'\n readonly hook?: 'initRequired' | 'start' | 'destroy' | 'suspend' | 'resume' | 'reset' | 'unknown'\n readonly taskId?: string\n readonly opSeq?: number\n readonly origin?: string\n readonly txnSeq?: number\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'diagnostic'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n readonly txnSeq?: number\n readonly txnId?: string\n readonly trigger?: TriggerRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n /**\n * trace:* events:\n * - Extension hook for runtime tracing / Playground / Alignment Lab.\n * - Only the type prefix and moduleId are standardized; payload shape is defined by higher layers (e.g. spanId/attributes in data).\n */\n | {\n readonly type: 'trace:trait:converge'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:check'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:validate'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: GenericTraceEventType\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data?: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n\nexport interface Sink {\n readonly record: (event: Event) => Effect.Effect<void>\n}\nexport const currentDebugSinks = FiberRef.unsafeMake<ReadonlyArray<Sink>>([])\nexport const currentRuntimeLabel = FiberRef.unsafeMake<string | undefined>(undefined)\nexport const currentTxnId = FiberRef.unsafeMake<string | undefined>(undefined)\nexport const currentOpSeq = FiberRef.unsafeMake<number | undefined>(undefined)\nexport type DiagnosticsLevel = 'off' | 'light' | 'sampled' | 'full'\nexport const currentDiagnosticsLevel = FiberRef.unsafeMake<DiagnosticsLevel>('off')\n\nexport const diagnosticsLevel = (level: DiagnosticsLevel): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentDiagnosticsLevel as any, () => level) as Layer.Layer<any, never, never>\n\nexport interface TraitConvergeDiagnosticsSamplingConfig {\n /**\n * Sample once every N txns (deterministic, based on stable txnSeq).\n * - 1: sample every txn (timing granularity similar to full, while keeping payload slim)\n */\n readonly sampleEveryN: number\n /**\n * Max number of TopK hotspots to output (recommended ≤ 3).\n */\n readonly topK: number\n}\n\nexport const currentTraitConvergeDiagnosticsSampling = FiberRef.unsafeMake<TraitConvergeDiagnosticsSamplingConfig>({\n sampleEveryN: 32,\n topK: 3,\n})\n\nexport const traitConvergeDiagnosticsSampling = (\n config: TraitConvergeDiagnosticsSamplingConfig,\n): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentTraitConvergeDiagnosticsSampling as any, () => config) as Layer.Layer<\n any,\n never,\n never\n >\n\nexport const appendSinks = (sinks: ReadonlyArray<Sink>): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentDebugSinks, (current) => [...current, ...sinks]) as Layer.Layer<\n any,\n never,\n never\n >\n\nexport type RuntimeDebugEventKind =\n | 'action'\n | 'state'\n | 'service'\n | 'process'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'lifecycle'\n | 'react-render'\n | 'devtools'\n | 'diagnostic'\n | (string & {})\n\nexport interface RuntimeDebugEventRef {\n readonly eventId: string\n readonly eventSeq: number\n readonly moduleId: string\n readonly instanceId: string\n readonly runtimeLabel?: string\n readonly txnSeq: number\n readonly txnId?: string\n /**\n * linkId:\n * - Current operation chain id (shared by boundary ops in the same chain).\n * - Created by Runtime at the boundary root and propagated via FiberRef across nested/cross-module chains.\n */\n readonly linkId?: string\n readonly timestamp: number\n readonly kind: RuntimeDebugEventKind\n readonly label: string\n readonly meta?: JsonValue\n readonly errorSummary?: SerializableErrorSummary\n readonly downgrade?: {\n readonly reason?: 'non_serializable' | 'oversized' | 'unknown'\n }\n}\n\nexport type TxnLaneEvidenceReason =\n | 'disabled'\n | 'forced_off'\n | 'forced_sync'\n | 'queued_non_urgent'\n | 'preempted_by_urgent'\n | 'budget_yield'\n | 'coalesced'\n | 'canceled'\n | 'max_lag_forced'\n | 'starvation_protection'\n\nexport type TxnLaneNonUrgentYieldReason = 'none' | 'input_pending' | 'budget_exceeded' | 'forced_frame_yield'\n\nexport type TxnLaneEvidence = {\n readonly anchor: {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly opSeq?: number\n }\n readonly lane: 'urgent' | 'nonUrgent'\n readonly kind: string\n readonly policy: {\n readonly enabled: boolean\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n readonly configScope: 'provider' | 'runtime_module' | 'runtime_default' | 'builtin'\n readonly budgetMs: number\n readonly debounceMs: number\n readonly maxLagMs: number\n readonly allowCoalesce: boolean\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n readonly queueMode?: 'fifo' | 'lanes'\n }\n readonly backlog: {\n readonly pendingCount: number\n readonly ageMs?: number\n readonly coalescedCount?: number\n readonly canceledCount?: number\n }\n readonly budget?: {\n readonly budgetMs?: number\n readonly sliceDurationMs?: number\n readonly yieldCount?: number\n readonly yielded?: boolean\n readonly yieldReason?: TxnLaneNonUrgentYieldReason\n }\n readonly starvation?: {\n readonly triggered?: boolean\n readonly reason?: string\n }\n readonly reasons: ReadonlyArray<TxnLaneEvidenceReason>\n}\n\nlet nextGlobalEventSeq = 0\n\nexport const clearRuntimeDebugEventSeq = (): void => {\n nextGlobalEventSeq = 0\n}\n\nconst nextEventSeq = (): number => {\n nextGlobalEventSeq += 1\n return nextGlobalEventSeq\n}\n\nconst makeEventId = (instanceId: string, eventSeq: number): string => `${instanceId}::e${eventSeq}`\n\ntype DowngradeReason = JsonDowngradeReason | ErrorDowngradeReason\n\nconst mergeDowngrade = (\n current: DowngradeReason | undefined,\n next: DowngradeReason | undefined,\n): DowngradeReason | undefined => {\n if (!current) return next\n if (!next) return current\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\n// In browsers, to reduce duplicated noise caused by React StrictMode, etc.,\n// de-duplicate lifecycle:error and diagnostic events: print the same moduleId+payload only once.\nconst browserLifecycleSeen = new Set<string>()\nconst browserDiagnosticSeen = new Set<string>()\n\n// Align trace:react-render events with the most recent state:update txn (UI-only association).\nconst lastTxnByInstance = new Map<string, { readonly txnId: string; readonly txnSeq: number }>()\n\n// trace:react-render / trace:react-selector may enter the sink before state:update (reordering due to concurrency/scheduling).\n// To provide usable txn anchors in Devtools/UI, we allow a one-time backfill for refs missing txn fields.\nconst pendingTxnAlignmentByInstance = new Map<string, Array<RuntimeDebugEventRef>>()\n\nconst enqueuePendingTxnAlignment = (instanceId: string, ref: RuntimeDebugEventRef): void => {\n const list = pendingTxnAlignmentByInstance.get(instanceId)\n if (!list) {\n pendingTxnAlignmentByInstance.set(instanceId, [ref])\n return\n }\n list.push(ref)\n if (list.length > 64) {\n list.shift()\n }\n}\n\nconst backfillPendingTxnAlignment = (\n instanceId: string,\n txn: { readonly txnId: string; readonly txnSeq: number },\n): void => {\n const pending = pendingTxnAlignmentByInstance.get(instanceId)\n if (!pending || pending.length === 0) {\n pendingTxnAlignmentByInstance.delete(instanceId)\n return\n }\n\n for (const ref of pending) {\n const anyRef: any = ref as any\n if (anyRef.txnId == null) {\n anyRef.txnId = txn.txnId\n }\n if (typeof anyRef.txnSeq !== 'number' || anyRef.txnSeq <= 0) {\n anyRef.txnSeq = txn.txnSeq\n }\n }\n\n pendingTxnAlignmentByInstance.delete(instanceId)\n}\n\nconst lifecycleErrorLog = (event: Extract<Event, { readonly type: 'lifecycle:error' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>, {\n renderErrorCause: true,\n })\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const message = `[Logix][module=${moduleId}] lifecycle:error\\n${causePretty}`\n\n return Effect.logError(message).pipe(\n Effect.annotateLogs({\n 'logix.moduleId': moduleId,\n 'logix.event': 'lifecycle:error',\n 'logix.cause': causePretty,\n }),\n )\n}\n\nconst diagnosticLog = (event: Extract<Event, { readonly type: 'diagnostic' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const header = `[Logix][module=${moduleId}] diagnostic(${event.severity})`\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n const msg = `${header}\\n${detail}`\n\n const base =\n event.severity === 'warning'\n ? Effect.logWarning(msg)\n : event.severity === 'info'\n ? Effect.logInfo(msg)\n : Effect.logError(msg)\n\n const annotations: Record<string, unknown> = {\n 'logix.moduleId': moduleId,\n 'logix.event': `diagnostic(${event.severity})`,\n 'logix.diagnostic.code': event.code,\n 'logix.diagnostic.message': event.message,\n }\n if (event.hint) {\n annotations['logix.diagnostic.hint'] = event.hint\n }\n if (event.actionTag) {\n annotations['logix.diagnostic.actionTag'] = event.actionTag\n }\n\n return base.pipe(Effect.annotateLogs(annotations))\n}\n\n/**\n * Default Layer composition based on FiberRef.currentDebugSinks:\n * - Uses Layer.locallyScoped to inject Debug sinks via FiberRef state.\n * - Avoids misusing FiberRef as a Context.Tag.\n */\nexport const noopLayer = Layer.locallyScoped(currentDebugSinks, [])\n\n/**\n * errorOnlyLayer:\n * - Default DebugSink implementation that only cares about lifecycle:error events.\n * - Suitable as a \"minimum observability\" layer so fatal errors don't silently disappear.\n * - Other events (module:init/destroy, action:dispatch, state:update) are not recorded by default.\n */\nconst errorOnlySink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void,\n}\n\nexport const errorOnlyLayer = Layer.locallyScoped(currentDebugSinks, [errorOnlySink])\n\nexport const isErrorOnlyOnlySinks = (sinks: ReadonlyArray<Sink>): boolean => sinks.length === 1 && sinks[0] === errorOnlySink\n\n/**\n * consoleLayer:\n * - Full debug layer that logs all Debug events via Effect logs (logfmt / structured).\n * - Suitable as an observability layer for general environments (Node / tests).\n */\nconst consoleSink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event }),\n}\n\nexport const consoleLayer = Layer.locallyScoped(currentDebugSinks, [consoleSink])\n\nconst isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined'\n\n// Shared browser console rendering logic used by the default DebugSink and browserConsoleLayer.\nconst renderBrowserConsoleEvent = (event: Event): Effect.Effect<void> => {\n // trace:* events: shown as separate groups in browsers for Playground / DevTools observation.\n if (typeof (event as any).type === 'string' && (event as any).type.startsWith('trace:')) {\n const moduleId = (event as any).moduleId ?? 'unknown'\n const type = (event as any).type\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c trace %c' + moduleId + '%c ' + String(type),\n 'color:#6b7280;font-weight:bold', // tag\n 'color:#3b82f6', // label\n 'color:#9ca3af', // module id\n 'color:#6b7280', // type\n )\n // eslint-disable-next-line no-console\n console.log(event)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'lifecycle:error') {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>, { renderErrorCause: true })\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const key = `${moduleId}|${causePretty}`\n if (browserLifecycleSeen.has(key)) {\n return Effect.void\n }\n browserLifecycleSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c lifecycle:error %c' + moduleId,\n 'color:#ef4444;font-weight:bold', // tag\n 'color:#ef4444', // label\n 'color:#9ca3af', // module id\n )\n // eslint-disable-next-line no-console\n console.error(causePretty)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'diagnostic') {\n const moduleId = event.moduleId ?? 'unknown'\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n\n const color =\n event.severity === 'warning' ? 'color:#d97706' : event.severity === 'info' ? 'color:#3b82f6' : 'color:#ef4444'\n\n const label =\n event.severity === 'warning'\n ? 'diagnostic(warning)'\n : event.severity === 'info'\n ? 'diagnostic(info)'\n : 'diagnostic(error)'\n\n const key = `${moduleId}|${event.code}|${event.message}`\n if (browserDiagnosticSeen.has(key)) {\n return Effect.void\n }\n browserDiagnosticSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c ' + label + '%c module=' + moduleId,\n 'color:#6b7280;font-weight:bold',\n color,\n 'color:#9ca3af',\n )\n if (event.severity === 'warning') {\n // eslint-disable-next-line no-console\n console.warn(detail)\n } else if (event.severity === 'info') {\n // eslint-disable-next-line no-console\n console.info(detail)\n } else {\n // eslint-disable-next-line no-console\n console.error(detail)\n }\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n // Other events are not printed to the browser console by default to avoid being too noisy during development.\n // For internal debug events, use a custom Debug sink or use consoleLayer in Node.\n return Effect.void\n}\n\n/**\n * Browser console debug layer:\n * - In browsers, uses console.groupCollapsed + colored labels to simulate pretty logger grouping.\n * - In non-browser environments, falls back to consoleLayer's Effect logging implementation.\n */\nconst browserConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n // Non-browser: fall back to consoleLayer behavior (Effect.log*).\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event })\n }\n\n return renderBrowserConsoleEvent(event)\n },\n}\n\nexport const browserConsoleLayer = Layer.locallyScoped(currentDebugSinks, [browserConsoleSink])\n\n/**\n * Browser diagnostic-only debug layer:\n * - In browsers, prints only lifecycle:error + diagnostic(warning/error) via console.groupCollapsed.\n * - Drops trace:* and other high-frequency events from the browser console (use DevtoolsHub instead).\n * - In non-browser environments, behaves like errorOnlySink (Effect.log*).\n */\nconst browserDiagnosticConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void\n }\n\n return event.type === 'lifecycle:error' || (event.type === 'diagnostic' && event.severity !== 'info')\n ? renderBrowserConsoleEvent(event)\n : Effect.void\n },\n}\n\nexport const browserDiagnosticConsoleLayer = Layer.locallyScoped(currentDebugSinks, [browserDiagnosticConsoleSink])\n\n/**\n * Browser-friendly Logger layer: replaces the default logger with Effect's pretty logger (browser mode).\n * - Avoids hand-written console styles; reuses Effect's colored/grouped formatting.\n * - Safely degrades to the default logger in server environments.\n */\nexport const browserPrettyLoggerLayer = Logger.replace(\n Logger.defaultLogger,\n Logger.prettyLogger({ mode: 'browser', colors: true }),\n)\n\n/**\n * defaultLayer:\n * - Public default layer; currently equivalent to errorOnlyLayer.\n * - Records lifecycle:error only, avoiding a large volume of action/state logs by default.\n */\nexport const defaultLayer = errorOnlyLayer\n\nexport const record = (event: Event) =>\n Effect.gen(function* () {\n const sinks = yield* FiberRef.get(currentDebugSinks)\n\n // Fast path: production default installs errorOnlyLayer (sinks=1).\n // Avoid paying diagnostics FiberRef + enrichment costs for high-frequency events that are always dropped by errorOnly.\n if (isErrorOnlyOnlySinks(sinks)) {\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n if (event.severity !== 'info') {\n yield* diagnosticLog(event)\n } else {\n yield* Effect.void\n }\n return\n }\n yield* Effect.void\n return\n }\n\n // Fast path: when no sinks are installed, only a small subset of events are ever surfaced.\n // Avoid paying per-event FiberRef + enrichment costs for high-frequency events like state:update.\n if (sinks.length === 0) {\n if (isBrowser) {\n if (event.type === 'lifecycle:error' || event.type === 'diagnostic') {\n yield* renderBrowserConsoleEvent(event)\n return\n }\n yield* Effect.void\n return\n }\n\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n yield* diagnosticLog(event)\n return\n }\n yield* Effect.void\n return\n }\n\n const enriched = event as Event\n\n const diagnosticsLevel = yield* FiberRef.get(currentDiagnosticsLevel)\n\n // Enrich Debug.Event with basic fields (enabled only when diagnosticsLevel!=off):\n // - timestamp: for Devtools/Timeline/Overview time aggregation; avoids UI-side \"first observed time\" distortion.\n // - runtimeLabel: from FiberRef for grouping by runtime (injected only when not already provided by the event).\n let now: number | undefined\n const getNow = (): number => {\n if (now === undefined) now = Date.now()\n return now\n }\n\n // diagnostics=off: keep near-zero cost; do not add timestamp for high-frequency events (avoid extra Date.now()).\n // Low-frequency events (lifecycle:error/diagnostic) may still get timestamp for easier debugging.\n if (\n enriched.timestamp === undefined &&\n (diagnosticsLevel !== 'off' || enriched.type === 'lifecycle:error' || enriched.type === 'diagnostic')\n ) {\n ;(enriched as any).timestamp = getNow()\n }\n if (diagnosticsLevel !== 'off' && enriched.runtimeLabel === undefined) {\n const runtimeLabel = yield* FiberRef.get(currentRuntimeLabel)\n if (runtimeLabel) {\n ;(enriched as any).runtimeLabel = runtimeLabel\n }\n }\n\n if (enriched.type === 'diagnostic' && (enriched as any).txnId === undefined) {\n const txnId = yield* FiberRef.get(currentTxnId)\n if (txnId) {\n ;(enriched as any).txnId = txnId\n }\n }\n // linkId is meaningful only for EffectOp events: avoid extra FiberRef reads on high-frequency events (state:update, etc.).\n if (\n diagnosticsLevel !== 'off' &&\n (enriched as any).type === 'trace:effectop' &&\n (enriched as any).linkId === undefined\n ) {\n const linkId = yield* FiberRef.get(EffectOpCore.currentLinkId)\n if (linkId) {\n ;(enriched as any).linkId = linkId\n }\n }\n\n if (sinks.length === 1) {\n yield* sinks[0]!.record(enriched)\n return\n }\n\n yield* Effect.forEach(sinks, (sink) => sink.record(enriched), { discard: true })\n })\n\n/**\n * Normalizes internal Debug.Event into RuntimeDebugEventRef:\n * - Allows Devtools / Runtime to consume Debug events uniformly.\n * - Does not change DebugSink behavior; provides a structured view only.\n */\nexport const toRuntimeDebugEventRef = (\n event: Event,\n options?: {\n readonly diagnosticsLevel?: DiagnosticsLevel\n readonly eventSeq?: number\n readonly resolveConvergeStaticIr?: (staticIrDigest: string) => ConvergeStaticIrExport | undefined\n readonly onMetaProjection?: (projection: {\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: JsonDowngradeReason\n }) => void\n },\n): RuntimeDebugEventRef | undefined => {\n const diagnosticsLevel = options?.diagnosticsLevel ?? 'full'\n if (diagnosticsLevel === 'off') {\n return undefined\n }\n\n const isLightLike = diagnosticsLevel === 'light' || diagnosticsLevel === 'sampled'\n\n const timestamp =\n typeof event.timestamp === 'number' && Number.isFinite(event.timestamp) ? event.timestamp : Date.now()\n\n const moduleIdRaw = (event as any).moduleId\n const moduleId = typeof moduleIdRaw === 'string' && moduleIdRaw.length > 0 ? moduleIdRaw : 'unknown'\n\n const instanceIdRaw = (event as any).instanceId\n const instanceId = typeof instanceIdRaw === 'string' && instanceIdRaw.length > 0 ? instanceIdRaw : 'unknown'\n\n const runtimeLabelRaw = (event as any).runtimeLabel\n const runtimeLabel = typeof runtimeLabelRaw === 'string' && runtimeLabelRaw.length > 0 ? runtimeLabelRaw : undefined\n\n const txnSeqRaw = (event as any).txnSeq\n const txnSeq =\n typeof txnSeqRaw === 'number' && Number.isFinite(txnSeqRaw) && txnSeqRaw >= 0 ? Math.floor(txnSeqRaw) : 0\n\n const txnIdRaw = (event as any).txnId\n const txnId =\n typeof txnIdRaw === 'string' && txnIdRaw.length > 0\n ? txnIdRaw\n : txnSeq > 0\n ? `${instanceId}::t${txnSeq}`\n : undefined\n\n const linkId = (() => {\n const linkIdRaw = (event as any).linkId\n if (typeof linkIdRaw === 'string' && linkIdRaw.length > 0) return linkIdRaw\n\n // trace:*: allow fallback extraction from data.meta.linkId (avoid UI diving into deep meta).\n if (typeof (event as any).type !== 'string' || !(event as any).type.startsWith('trace:')) {\n return undefined\n }\n\n const data: any = (event as any).data\n const meta: any = data?.meta\n const linkIdFromMeta = meta?.linkId\n if (typeof linkIdFromMeta === 'string' && linkIdFromMeta.length > 0) return linkIdFromMeta\n\n return undefined\n })()\n\n const eventSeqRaw = options?.eventSeq\n const eventSeq =\n typeof eventSeqRaw === 'number' && Number.isFinite(eventSeqRaw) && eventSeqRaw > 0\n ? Math.floor(eventSeqRaw)\n : nextEventSeq()\n const eventId = makeEventId(instanceId, eventSeq)\n\n const base = {\n eventId,\n eventSeq,\n moduleId,\n instanceId,\n runtimeLabel,\n txnSeq,\n txnId,\n linkId,\n timestamp,\n } as const\n\n let downgrade: DowngradeReason | undefined\n\n const withDowngrade = (ref: Omit<RuntimeDebugEventRef, 'downgrade'>): RuntimeDebugEventRef => {\n if (!downgrade) return ref\n return { ...ref, downgrade: { reason: downgrade } }\n }\n\n switch (event.type) {\n case 'module:init':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:init',\n })\n case 'module:destroy':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:destroy',\n })\n case 'lifecycle:phase': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:phase' }>\n const metaInput = isLightLike\n ? { type: 'lifecycle:phase', phase: e.phase, name: e.name }\n : { type: 'lifecycle:phase', phase: e.phase, name: e.name, payload: e.payload }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: e.name,\n meta: metaProjection.value,\n })\n }\n case 'action:dispatch': {\n const action: any = (event as any).action\n const actionTagRaw = (event as any).actionTag\n const tag = typeof actionTagRaw === 'string' && actionTagRaw.length > 0 ? actionTagRaw : (action?._tag ?? action?.type)\n const label = String(tag ?? 'action:dispatch')\n const labelNormalized = label.length > 0 ? label : 'unknown'\n const unknownAction = (event as any).unknownAction === true ? true : undefined\n const metaInput = isLightLike\n ? { actionTag: labelNormalized, ...(unknownAction ? { unknownAction: true } : {}) }\n : { action, ...(unknownAction ? { unknownAction: true } : {}) }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (unknownAction) {\n downgrade = mergeDowngrade(downgrade, 'unknown')\n }\n return withDowngrade({\n ...base,\n kind: 'action',\n label: labelNormalized,\n meta: metaProjection.value,\n })\n }\n case 'state:update': {\n const e = event as Extract<Event, { readonly type: 'state:update' }>\n\n const resolveDirtySetRootPaths = (): ReadonlyArray<JsonValue> | undefined => {\n const resolve = options?.resolveConvergeStaticIr\n if (!resolve) return undefined\n\n const digest = e.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return undefined\n\n const dirtySet = e.dirtySet as any\n if (!dirtySet || typeof dirtySet !== 'object' || Array.isArray(dirtySet)) return undefined\n\n const rootIds = dirtySet.rootIds\n if (!Array.isArray(rootIds) || rootIds.length === 0) return undefined\n\n const ir = resolve(digest) as ConvergeStaticIrExport | undefined\n const fieldPaths = (ir as any)?.fieldPaths as unknown\n if (!Array.isArray(fieldPaths) || fieldPaths.length === 0) return undefined\n\n const out: Array<JsonValue> = []\n for (const rawId of rootIds) {\n if (typeof rawId !== 'number' || !Number.isFinite(rawId)) continue\n const id = Math.floor(rawId)\n if (id < 0) continue\n const path = (fieldPaths as any)[id] as unknown\n if (!Array.isArray(path) || path.length === 0) continue\n if (!path.every((seg) => typeof seg === 'string' && seg.length > 0)) continue\n out.push(path as any)\n }\n\n return out.length > 0 ? out : undefined\n }\n\n const dirtySetWithRootPaths = (() => {\n const rootPaths = resolveDirtySetRootPaths()\n if (!rootPaths) return e.dirtySet\n const dirtySet = e.dirtySet as any\n if (!dirtySet || typeof dirtySet !== 'object' || Array.isArray(dirtySet)) return e.dirtySet\n return { ...dirtySet, rootPaths }\n })()\n\n const metaInput = isLightLike\n ? {\n state: e.state,\n dirtySet: dirtySetWithRootPaths,\n patchCount: e.patchCount,\n patchesTruncated: e.patchesTruncated,\n patchesTruncatedReason: e.patchesTruncatedReason,\n staticIrDigest: e.staticIrDigest,\n commitMode: e.commitMode,\n priority: e.priority,\n originKind: e.originKind,\n originName: e.originName,\n }\n : {\n state: e.state,\n dirtySet: dirtySetWithRootPaths,\n patchCount: e.patchCount,\n patchesTruncated: e.patchesTruncated,\n patchesTruncatedReason: e.patchesTruncatedReason,\n staticIrDigest: e.staticIrDigest,\n commitMode: e.commitMode,\n priority: e.priority,\n originKind: e.originKind,\n originName: e.originName,\n traitSummary: e.traitSummary,\n replayEvent: e.replayEvent,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (txnId) {\n lastTxnByInstance.set(instanceId, { txnId, txnSeq })\n backfillPendingTxnAlignment(instanceId, { txnId, txnSeq })\n }\n return withDowngrade({\n ...base,\n kind: 'state',\n label: 'state:update',\n meta: metaProjection.value,\n })\n }\n case 'process:start':\n case 'process:stop':\n case 'process:restart':\n case 'process:trigger':\n case 'process:dispatch':\n case 'process:error': {\n const e = event as Extract<\n Event,\n {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n }\n >\n\n const ts2 = typeof e.timestampMs === 'number' && Number.isFinite(e.timestampMs) ? e.timestampMs : timestamp\n\n const metaInput = {\n identity: e.identity,\n severity: e.severity,\n eventSeq: e.eventSeq,\n timestampMs: e.timestampMs,\n trigger: e.trigger,\n dispatch: e.dispatch,\n error: e.error,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const errorSummary =\n e.type === 'process:error' || e.type === 'process:restart'\n ? (e.error as any as SerializableErrorSummary | undefined)\n : undefined\n\n return withDowngrade({\n ...base,\n timestamp: ts2,\n kind: 'process',\n label: e.type,\n meta: metaProjection.value,\n errorSummary,\n })\n }\n case 'lifecycle:error': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:error' }>\n const summary = toSerializableErrorSummary(e.cause)\n downgrade = mergeDowngrade(downgrade, summary.downgrade)\n const metaInput = isLightLike\n ? { type: 'lifecycle:error', phase: e.phase, name: e.hook }\n : {\n type: 'lifecycle:error',\n phase: e.phase,\n name: e.hook,\n hook: e.hook,\n taskId: e.taskId,\n origin: e.origin,\n txnSeq: e.txnSeq,\n opSeq: e.opSeq,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'lifecycle:error',\n meta: metaProjection.value,\n errorSummary: summary.errorSummary,\n })\n }\n case 'diagnostic': {\n const e = event as Extract<Event, { readonly type: 'diagnostic' }>\n const metaInput = {\n code: e.code,\n severity: e.severity,\n message: e.message,\n hint: e.hint,\n actionTag: e.actionTag,\n kind: e.kind,\n trigger: e.trigger,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.code,\n meta: metaProjection.value,\n })\n }\n default: {\n if (typeof event.type !== 'string' || !event.type.startsWith('trace:')) {\n return undefined\n }\n\n // trace:txn-lane: slim evidence for Txn Lanes (lane/backlog/reasons), used for Devtools summary and offline export.\n if (event.type === 'trace:txn-lane') {\n const data: any = (event as any).data\n const evidence = data?.evidence ?? data\n\n const metaProjection = projectJsonValue(evidence)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const label =\n typeof evidence?.kind === 'string' && evidence.kind.length > 0 ? String(evidence.kind) : 'txn-lane'\n\n return withDowngrade({\n ...base,\n kind: 'txn-lane',\n label,\n meta: metaProjection.value,\n })\n }\n\n // trace:react-render / trace:react-selector: keep slim meta only (field trimming is handled by JsonValue projection).\n if (event.type === 'trace:react-render' || event.type === 'trace:react-selector') {\n const data: any = (event as any).data\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n }\n : {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n meta: data?.meta,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n const label =\n typeof data?.componentLabel === 'string' && data.componentLabel.length > 0\n ? data.componentLabel\n : event.type === 'trace:react-selector'\n ? 'react-selector'\n : 'react-render'\n const last = lastTxnByInstance.get(instanceId)\n const txnSeqFromMeta =\n typeof data?.meta?.txnSeq === 'number' && Number.isFinite(data.meta.txnSeq) && data.meta.txnSeq >= 0\n ? Math.floor(data.meta.txnSeq)\n : undefined\n const txnIdFromMeta =\n typeof data?.meta?.txnId === 'string' && data.meta.txnId.length > 0 ? data.meta.txnId : undefined\n const txnIdAligned = txnIdFromMeta ?? base.txnId ?? last?.txnId\n const txnSeqAligned = txnSeqFromMeta ?? (base.txnSeq > 0 ? base.txnSeq : (last?.txnSeq ?? base.txnSeq))\n const ref = withDowngrade({\n ...base,\n txnId: txnIdAligned,\n txnSeq: txnSeqAligned,\n kind: event.type === 'trace:react-selector' ? 'react-selector' : 'react-render',\n label,\n meta: metaProjection.value,\n })\n\n if (instanceId !== 'unknown' && (ref.txnId == null || ref.txnSeq <= 0)) {\n enqueuePendingTxnAlignment(instanceId, ref)\n }\n\n return ref\n }\n\n // trace:selector:eval: SelectorGraph evaluation evidence within commit (used for txn→selector→render causal chain).\n if (event.type === 'trace:selector:eval') {\n const data: any = (event as any).data\n const metaInput = {\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n changed: data?.changed,\n evalMs: data?.evalMs,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:exec-vm: Exec VM hit/miss evidence (049). In light tier we keep minimal summary fields.\n if (event.type === 'trace:exec-vm') {\n const data: any = (event as any).data\n const metaInput = {\n version: data?.version,\n stage: data?.stage,\n hit: data?.hit,\n reasonCode: data?.reasonCode ?? data?.reason,\n reasonDetail: data?.reasonDetail,\n execIrVersion: data?.execIrVersion,\n execIrHash: data?.execIrHash,\n serviceId: data?.serviceId,\n implId: data?.implId,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:converge: converge evidence must be exportable (JsonValue hard gate) and trims heavy fields in light tier.\n if (event.type === 'trace:trait:converge') {\n const resolveDirtyRootPaths = (args: {\n readonly staticIrDigest: unknown\n readonly rootIds: unknown\n }): ReadonlyArray<JsonValue> | undefined => {\n const resolve = options?.resolveConvergeStaticIr\n if (!resolve) return undefined\n const digest = args.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return undefined\n\n const rootIds = args.rootIds\n if (!Array.isArray(rootIds) || rootIds.length === 0) return undefined\n\n const ir = resolve(digest) as ConvergeStaticIrExport | undefined\n const fieldPaths = (ir as any)?.fieldPaths as unknown\n if (!Array.isArray(fieldPaths) || fieldPaths.length === 0) return undefined\n\n const out: Array<JsonValue> = []\n for (const id of rootIds) {\n if (typeof id !== 'number' || !Number.isFinite(id)) continue\n const idx = Math.floor(id)\n if (idx < 0 || idx >= fieldPaths.length) continue\n const path = fieldPaths[idx]\n if (Array.isArray(path)) {\n out.push(path as any)\n }\n }\n\n return out.length > 0 ? out : undefined\n }\n\n const enrichDirtyRootPaths = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n if (!dirty || typeof dirty !== 'object' || Array.isArray(dirty)) return value\n\n const dirtyRootPaths = resolveDirtyRootPaths({\n staticIrDigest: anyValue.staticIrDigest,\n rootIds: dirty?.rootIds,\n })\n if (!dirtyRootPaths) return value\n\n return {\n ...anyValue,\n dirty: {\n ...(dirty as any),\n rootPaths: dirtyRootPaths,\n },\n } as JsonValue\n }\n\n const stripHeavyLight = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtyRootPaths = resolveDirtyRootPaths({\n staticIrDigest: anyValue.staticIrDigest,\n rootIds: dirty?.rootIds,\n })\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n ...(Array.isArray((dirty as any).rootIds) ? { rootIds: (dirty as any).rootIds } : null),\n ...(typeof (dirty as any).rootIdsTruncated === 'boolean'\n ? { rootIdsTruncated: (dirty as any).rootIdsTruncated }\n : null),\n ...(dirtyRootPaths ? { rootPaths: dirtyRootPaths } : null),\n }\n : undefined\n\n const { top3, dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n }\n\n const stripHeavySampled = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n }\n : undefined\n\n const { dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n }\n\n const data = (event as Extract<Event, { readonly type: 'trace:trait:converge' }>).data\n const metaInput =\n diagnosticsLevel === 'light'\n ? stripHeavyLight(data)\n : diagnosticsLevel === 'sampled'\n ? stripHeavySampled(data)\n : enrichDirtyRootPaths(data)\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:converge',\n label: 'trait:converge',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:check: validation diagnostics must be exportable and stay slim in light tier (keep key fields).\n if (event.type === 'trace:trait:check') {\n const stripHeavy = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n const anyValue = value as any\n const degraded = anyValue.degraded\n const degradedSlim =\n degraded && typeof degraded === 'object' && !Array.isArray(degraded)\n ? { kind: (degraded as any).kind }\n : undefined\n\n const { degraded: _degraded, ...rest } = anyValue\n return (degradedSlim ? { ...rest, degraded: degradedSlim } : rest) as JsonValue\n }\n\n const data = (event as Extract<Event, { readonly type: 'trace:trait:check' }>).data\n const metaInput = isLightLike ? stripHeavy(data) : data\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:check',\n label: 'trait:check',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:validate: validation decision summary must be exportable and slim in light tier (no heavy fields by default).\n if (event.type === 'trace:trait:validate') {\n const data = (event as Extract<Event, { readonly type: 'trace:trait:validate' }>).data\n const metaProjection = projectJsonValue(data)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:validate',\n label: 'trait:validate',\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits: final traits snapshot must be exportable and slim in light tier (digest/count).\n if (event.type === 'trace:module:traits') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n digest: data?.digest,\n count: data?.count,\n }\n : {\n digest: data?.digest,\n count: data?.count,\n traits: data?.traits,\n provenanceIndex: data?.provenanceIndex,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits:conflict: conflict details must be exportable; avoid relying on truncated lifecycle:error messages.\n if (event.type === 'trace:module:traits:conflict') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n conflictCount: data?.conflictCount,\n traitIds: data?.traitIds,\n }\n : {\n conflictCount: data?.conflictCount,\n conflicts: data?.conflicts,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:descriptor: keep key anchors even in light tier (avoid data being fully trimmed).\n if (event.type === 'trace:module:descriptor') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n id: data?.id,\n traits: data?.traits,\n source: data?.source,\n }\n : { data }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:effectop: keep slim op meta and prefer EffectOp.meta.moduleId when present.\n if (event.type === 'trace:effectop') {\n const data: any = (event as any).data\n const opMeta: any = data?.meta\n const opKind = (data?.kind ?? 'service') as RuntimeDebugEventKind\n const label = typeof data?.name === 'string' ? data.name : 'effectop'\n const moduleId2 = typeof opMeta?.moduleId === 'string' ? opMeta.moduleId : moduleId\n const txnId2 = typeof opMeta?.txnId === 'string' && opMeta.txnId.length > 0 ? opMeta.txnId : base.txnId\n const txnSeq2 =\n typeof opMeta?.txnSeq === 'number' && Number.isFinite(opMeta.txnSeq) && opMeta.txnSeq >= 0\n ? Math.floor(opMeta.txnSeq)\n : base.txnSeq\n\n const metaInput = isLightLike\n ? {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n meta: opMeta,\n }\n : {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n payload: data?.payload,\n meta: opMeta,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n moduleId: moduleId2,\n txnId: txnId2,\n txnSeq: txnSeq2,\n kind: opKind,\n label,\n meta: metaProjection.value,\n })\n }\n\n // Other trace:* events: categorize as devtools and trim meta by tier.\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n data: undefined,\n }\n : {\n data: (event as any).data,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n }\n}\n","export type JsonValue =\n | null\n | boolean\n | number\n | string\n | ReadonlyArray<JsonValue>\n | { readonly [key: string]: JsonValue }\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport const isJsonValue = (input: unknown): input is JsonValue => {\n const seen = new WeakSet<object>()\n\n const loop = (value: unknown, depth: number): value is JsonValue => {\n if (depth > 64) return false\n if (value === null) return true\n\n switch (typeof value) {\n case 'string':\n case 'boolean':\n return true\n case 'number':\n return Number.isFinite(value)\n case 'object': {\n if (Array.isArray(value)) {\n if (seen.has(value)) return false\n seen.add(value)\n for (const item of value) {\n if (!loop(item, depth + 1)) return false\n }\n return true\n }\n\n if (!isPlainRecord(value)) return false\n if (seen.has(value)) return false\n seen.add(value)\n\n for (const v of Object.values(value)) {\n if (!loop(v, depth + 1)) return false\n }\n\n return true\n }\n default:\n return false\n }\n }\n\n return loop(input, 0)\n}\n\nexport interface JsonValueProjectionStats {\n readonly dropped: number\n readonly oversized: number\n readonly nonSerializable: number\n}\n\nexport interface JsonValueProjection {\n readonly value: JsonValue\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: DowngradeReason\n}\n\nexport interface JsonValueProjectOptions {\n readonly maxDepth?: number\n readonly maxObjectKeys?: number\n readonly maxArrayLength?: number\n readonly maxStringLength?: number\n readonly maxJsonBytes?: number\n readonly oversizedPreviewBytes?: number\n}\n\nconst defaultOptions: Required<JsonValueProjectOptions> = {\n maxDepth: 6,\n maxObjectKeys: 32,\n maxArrayLength: 32,\n maxStringLength: 256,\n maxJsonBytes: 4 * 1024,\n oversizedPreviewBytes: 256,\n}\n\nconst truncateString = (value: string, maxLen: number, stats: MutableStats): string => {\n if (value.length <= maxLen) return value\n stats.oversized += 1\n return value.slice(0, maxLen)\n}\n\ntype MutableStats = {\n dropped: number\n oversized: number\n nonSerializable: number\n}\n\nconst mergeDowngrade = (current: DowngradeReason | undefined, next: DowngradeReason): DowngradeReason => {\n if (!current) return next\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\nfunction isPlainRecord(value: unknown): value is Record<string, unknown> {\n if (typeof value !== 'object' || value === null) return false\n const proto = Object.getPrototypeOf(value)\n return proto === Object.prototype || proto === null\n}\n\nconst asNumber = (value: number, stats: MutableStats): JsonValue => {\n if (Number.isFinite(value)) return value\n stats.nonSerializable += 1\n return String(value)\n}\n\nconst toJsonValueInternal = (\n input: unknown,\n options: Required<JsonValueProjectOptions>,\n stats: MutableStats,\n seen: WeakSet<object>,\n depth: number,\n): JsonValue => {\n if (input === null) return null\n\n switch (typeof input) {\n case 'string':\n return truncateString(input, options.maxStringLength, stats)\n case 'number':\n return asNumber(input, stats)\n case 'boolean':\n return input\n case 'bigint':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'symbol':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'function':\n stats.nonSerializable += 1\n return '[Function]'\n case 'undefined':\n stats.dropped += 1\n return null\n }\n\n // object\n if (depth >= options.maxDepth) {\n stats.oversized += 1\n return '[Truncated]'\n }\n\n if (input instanceof Date) {\n return input.toISOString()\n }\n\n if (input instanceof Error) {\n stats.nonSerializable += 1\n return {\n name: truncateString(input.name, options.maxStringLength, stats),\n message: truncateString(input.message, options.maxStringLength, stats),\n }\n }\n\n if (typeof input === 'object') {\n if (seen.has(input)) {\n stats.nonSerializable += 1\n return '[Circular]'\n }\n seen.add(input)\n }\n\n if (Array.isArray(input)) {\n const out: Array<JsonValue> = []\n const limit = Math.min(input.length, options.maxArrayLength)\n for (let i = 0; i < limit; i++) {\n out.push(toJsonValueInternal(input[i], options, stats, seen, depth + 1))\n }\n if (input.length > limit) {\n stats.oversized += 1\n out.push(`[...${input.length - limit} more]`)\n }\n return out\n }\n\n if (!isPlainRecord(input)) {\n stats.nonSerializable += 1\n return truncateString(String(input), options.maxStringLength, stats)\n }\n\n const entries = Object.entries(input)\n const limit = Math.min(entries.length, options.maxObjectKeys)\n const out: Record<string, JsonValue> = {}\n\n for (let i = 0; i < limit; i++) {\n const [rawKey, rawValue] = entries[i]!\n const key = truncateString(rawKey, options.maxStringLength, stats)\n if (rawValue === undefined) {\n stats.dropped += 1\n continue\n }\n out[key] = toJsonValueInternal(rawValue, options, stats, seen, depth + 1)\n }\n\n if (entries.length > limit) {\n stats.oversized += 1\n out.__truncatedKeys = entries.length - limit\n }\n\n return out\n}\n\nexport const projectJsonValue = (input: unknown, options?: JsonValueProjectOptions): JsonValueProjection => {\n const resolved: Required<JsonValueProjectOptions> = { ...defaultOptions, ...(options ?? {}) }\n const stats: MutableStats = { dropped: 0, oversized: 0, nonSerializable: 0 }\n const seen = new WeakSet<object>()\n\n let downgrade: DowngradeReason | undefined\n const value = toJsonValueInternal(input, resolved, stats, seen, 0)\n\n if (stats.nonSerializable > 0) {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n }\n if (stats.oversized > 0) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n }\n\n // Hard gate: ensure JSON.stringify never throws and respect the max byte budget.\n try {\n const json = JSON.stringify(value)\n if (json.length > resolved.maxJsonBytes) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n const preview = json.slice(0, Math.min(resolved.oversizedPreviewBytes, resolved.maxJsonBytes))\n return {\n value: {\n _tag: 'oversized',\n bytes: json.length,\n preview,\n },\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized + 1,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n }\n } catch {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n return {\n value: '[Unserializable]',\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable + 1,\n },\n downgrade,\n }\n }\n\n return {\n value,\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n}\n","// EffectOp core model and middleware composition logic.\n// For higher-level Runtime / Devtools integration, see:\n// specs/000-module-traits-runtime/references/effectop-and-middleware.md\n\nimport { Context, Effect, FiberRef } from 'effect'\n\n/**\n * currentLinkId:\n * - Stores the current operation chain id (linkId) in a FiberRef.\n * - Used to correlate multiple boundary ops within the same chain (can be shared across modules via the same FiberRef).\n */\nexport const currentLinkId = FiberRef.unsafeMake<string | undefined>(undefined)\n\n/**\n * OperationPolicy:\n * - Local policy markers (intent only; no rule logic attached).\n *\n * Constraints (enforced by Runtime/middleware together):\n * - Only observation-only capabilities (Observer) may be disabled; global guards must not be disabled.\n */\nexport interface OperationPolicy {\n readonly disableObservers?: boolean\n}\n\n/**\n * OperationRejected:\n * - Unified failure result when a guard rejects execution.\n * - Semantics: explicit failure with no business side effects (rejection must happen before user code executes).\n */\nexport interface OperationRejected {\n readonly _tag: 'OperationRejected'\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}\n\n/**\n * OperationError:\n * - Any boundary operation executed via EffectOp may be explicitly rejected by Guard middleware.\n * - Therefore, the middleware error channel must allow OperationRejected to be added.\n */\nexport type OperationError<E> = E | OperationRejected\n\nexport const makeOperationRejected = (params: {\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}): OperationRejected => ({\n _tag: 'OperationRejected',\n message: params.message,\n kind: params.kind,\n name: params.name,\n linkId: params.linkId,\n details: params.details,\n})\n\n/**\n * EffectOp: a unified representation of an Effect execution at an \"observable boundary\".\n *\n * - Out / Err / Env are the generic parameters of the underlying Effect.\n * - meta carries structured context needed by Devtools / Middleware.\n */\nexport interface EffectOp<Out = unknown, Err = unknown, Env = unknown> {\n readonly id: string\n readonly kind:\n | 'action'\n | 'flow'\n | 'state'\n | 'service'\n | 'lifecycle'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'devtools'\n readonly name: string\n readonly payload?: unknown\n readonly meta?: {\n /**\n * linkId:\n * - Operation chain id: multiple boundary ops in the same chain must share it.\n * - Runtime ensures this field is populated on all boundary ops.\n */\n linkId?: string\n moduleId?: string\n instanceId?: string\n runtimeLabel?: string\n txnId?: string\n txnSeq?: number\n opSeq?: number\n fieldPath?: string\n deps?: ReadonlyArray<string>\n from?: string\n to?: string\n traitNodeId?: string\n stepId?: string\n resourceId?: string\n key?: unknown\n trace?: ReadonlyArray<string>\n tags?: ReadonlyArray<string>\n policy?: OperationPolicy\n // Reserved extension slot for middleware/devtools to attach extra information.\n readonly [k: string]: unknown\n }\n readonly effect: Effect.Effect<Out, Err, Env>\n}\n\n/**\n * Middleware: the general middleware model for observing / wrapping / guarding EffectOps.\n */\nexport type Middleware = <A, E, R>(op: EffectOp<A, E, R>) => Effect.Effect<A, OperationError<E>, R>\n\nexport type MiddlewareStack = ReadonlyArray<Middleware>\n\n/**\n * EffectOpMiddlewareEnv:\n * - A Service in Effect Env that carries the current Runtime's MiddlewareStack.\n * - Injected by Runtime.ts when constructing a ManagedRuntime.\n * - Runtime code (e.g. StateTrait.install) uses this Service to decide which MiddlewareStack to use.\n */\nexport interface EffectOpMiddlewareEnv {\n readonly stack: MiddlewareStack\n}\n\nexport class EffectOpMiddlewareTag extends Context.Tag('Logix/EffectOpMiddleware')<\n EffectOpMiddlewareTag,\n EffectOpMiddlewareEnv\n>() {}\n\n/**\n * composeMiddleware:\n * - Composes Middleware from \"outer to inner\" in declaration order:\n * - stack = [mw1, mw2] => mw1 -> mw2 -> effect -> mw2 -> mw1\n * - Matches the reduceRight example in the reference docs.\n */\nexport const composeMiddleware = (stack: MiddlewareStack): Middleware => {\n return <A, E, R>(op: EffectOp<A, E, R>): Effect.Effect<A, OperationError<E>, R> =>\n stack.reduceRight<Effect.Effect<A, OperationError<E>, R>>(\n (eff, mw) => mw({ ...op, effect: eff } as any) as any,\n op.effect as Effect.Effect<A, OperationError<E>, R>,\n )\n}\n\n/**\n * runWithMiddleware:\n * - Executes a given EffectOp with a MiddlewareStack according to the composition rules.\n * - If the stack is empty, returns op.effect directly.\n */\nexport const runWithMiddleware = <A, E, R>(op: EffectOp<A, E, R>, stack: MiddlewareStack): Effect.Effect<A, E, R> => {\n return Effect.gen(function* () {\n const existing = yield* FiberRef.get(currentLinkId)\n const metaLinkId = (op.meta as any)?.linkId\n const linkId = typeof metaLinkId === 'string' && metaLinkId.length > 0 ? metaLinkId : (existing ?? op.id)\n\n const nextOp: EffectOp<A, E, R> = {\n ...op,\n meta: {\n ...(op.meta ?? {}),\n linkId,\n },\n }\n\n const program = stack.length ? composeMiddleware(stack)(nextOp) : nextOp.effect\n\n // linkId is created at the boundary root and reused for nested ops (the FiberRef is the global single source of truth).\n // NOTE: middleware may explicitly reject with OperationRejected.\n return yield* Effect.locally(currentLinkId, linkId)(program as any)\n }) as Effect.Effect<A, E, R>\n}\n","import { FiberRef, Layer } from 'effect'\nimport type { ConvergeStaticIrExport } from '../../state-trait/converge-ir.js'\n\n/**\n * ConvergeStaticIrCollector:\n * - Consumer interface for collecting ConvergeStaticIrExport (de-duplicated/indexed by staticIrDigest); an internal injectable capability.\n * - Typical implementations: DevtoolsHub (process-level) / EvidenceCollector (RunSession-level).\n *\n * Notes:\n * - Uses FiberRef<ReadonlyArray<...>> to allow appending multiple collectors within the same scope (similar to Debug sinks).\n * - ModuleRuntime reads the FiberRef value during installation and captures it in a closure, avoiding Env lookup on hot paths.\n */\nexport interface ConvergeStaticIrCollector {\n readonly register: (ir: ConvergeStaticIrExport) => void\n}\n\nexport const currentConvergeStaticIrCollectors = FiberRef.unsafeMake<ReadonlyArray<ConvergeStaticIrCollector>>([])\n\nexport const appendConvergeStaticIrCollectors = (\n collectors: ReadonlyArray<ConvergeStaticIrCollector>,\n): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentConvergeStaticIrCollectors, (current) => [\n ...current,\n ...collectors,\n ]) as Layer.Layer<any, never, never>\n","import { Effect, SubscriptionRef } from 'effect'\nimport {\n getFieldPathId,\n dirtyPathsToRootIds,\n normalizeFieldPath,\n normalizePatchReason,\n type FieldPathIdRegistry,\n type DirtyAllReason,\n type DirtySet,\n type FieldPath,\n type FieldPathId,\n type PatchReason,\n} from '../../field-path.js'\n\nexport type { PatchReason } from '../../field-path.js'\n\nexport type StatePatchPath = string | FieldPath | FieldPathId\n\nexport interface TxnPatchRecord {\n readonly opSeq: number\n readonly pathId?: FieldPathId\n readonly reason: PatchReason\n readonly stepId?: number\n readonly traitNodeId?: string\n readonly from?: unknown\n readonly to?: unknown\n}\n\nexport interface StateTxnOrigin {\n readonly kind: string\n readonly name?: string\n readonly details?: unknown\n}\n\nexport type StateTxnInstrumentationLevel = 'full' | 'light'\n\nexport interface StateTxnConfig {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly instrumentation?: StateTxnInstrumentationLevel\n readonly getFieldPathIdRegistry?: () => FieldPathIdRegistry | undefined\n /**\n * Whether to capture initial/final state snapshots:\n * - enabled by default in full mode\n * - disabled by default in light mode\n */\n readonly captureSnapshots?: boolean\n /**\n * Time source function (useful for injecting a fake clock in tests).\n */\n readonly now?: () => number\n}\n\nexport interface StateTransaction<S> {\n readonly txnId: string\n readonly txnSeq: number\n readonly origin: StateTxnOrigin\n readonly startedAt: number\n readonly endedAt: number\n readonly durationMs: number\n readonly dirtySet: DirtySet\n readonly patchCount: number\n readonly patchesTruncated: boolean\n readonly patchesTruncatedReason?: 'max_patches'\n readonly initialStateSnapshot?: S\n readonly finalStateSnapshot?: S\n readonly patches: ReadonlyArray<TxnPatchRecord>\n readonly moduleId?: string\n readonly instanceId?: string\n}\n\n/**\n * StateTxnContext:\n * - Holds transaction state within a single ModuleRuntime.\n * - current is the active transaction (undefined when none).\n *\n * Notes:\n * - The current implementation supports a single active transaction; queueing strategies are added later (US1).\n * - To avoid premature coupling, Context provides only minimal begin/update/record/commit primitives; entry points\n * (dispatch/source-refresh/devtools) are controlled by higher layers.\n */\nexport interface StateTxnRuntimeConfig {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly instrumentation: StateTxnInstrumentationLevel\n readonly captureSnapshots: boolean\n readonly now: () => number\n readonly getFieldPathIdRegistry?: () => FieldPathIdRegistry | undefined\n}\n\nexport interface StateTxnContext<S> {\n readonly config: StateTxnRuntimeConfig\n current?: StateTxnState<S>\n nextTxnSeq: number\n readonly scratch: StateTxnState<S>\n /**\n * recordPatch:\n * - makeContext selects the implementation based on instrumentation (full/light).\n * - Avoids branching per patch record inside hot loops (051: branch relocation).\n */\n recordPatch: (\n path: StatePatchPath | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n}\n\ninterface StateTxnState<S> {\n txnId: string\n txnSeq: number\n origin: StateTxnOrigin\n startedAt: number\n baseState: S\n draft: S\n initialStateSnapshot?: S\n readonly patches: Array<TxnPatchRecord>\n patchCount: number\n\tpatchesTruncated: boolean\n\tfieldPathIdRegistry?: FieldPathIdRegistry\n /**\n * dirtyPathIds:\n * - The set of FieldPathIds for all trackable writes within the transaction window (hot path records only integer anchors).\n * - Any non-mappable/non-trackable write must explicitly degrade to dirtyAll (dirtyAllReason); no silent fallback.\n * - Independent of instrumentation: light mode does not keep patches, but still maintains dirtyPathIds/dirtyAllReason for low-cost semantics (e.g. scheduling/diagnostics).\n */\n readonly dirtyPathIds: Set<FieldPathId>\n dirtyAllReason?: DirtyAllReason\n}\n\nconst defaultNow = () => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === 'function') {\n return perf.now()\n }\n return Date.now()\n}\n\nexport const makeContext = <S>(config: StateTxnConfig): StateTxnContext<S> => {\n const instrumentation: StateTxnInstrumentationLevel = config.instrumentation ?? 'full'\n\n const captureSnapshots = config.captureSnapshots ?? instrumentation === 'full'\n\n const scratch: StateTxnState<S> = {\n txnId: '',\n txnSeq: 0,\n origin: { kind: 'unknown' },\n startedAt: 0,\n baseState: undefined as any,\n draft: undefined as any,\n initialStateSnapshot: undefined,\n patches: [],\n patchCount: 0,\n patchesTruncated: false,\n dirtyPathIds: new Set(),\n dirtyAllReason: undefined,\n }\n\n const ctx: StateTxnContext<S> = {\n config: {\n instrumentation,\n captureSnapshots,\n now: config.now ?? defaultNow,\n moduleId: config.moduleId,\n instanceId: config.instanceId,\n getFieldPathIdRegistry: config.getFieldPathIdRegistry,\n },\n current: undefined,\n nextTxnSeq: 0,\n scratch,\n recordPatch: () => {},\n }\n\n const recordPatchLight = (\n path: StatePatchPath | undefined,\n _reason: PatchReason,\n _from?: unknown,\n _to?: unknown,\n _traitNodeId?: string,\n _stepId?: number,\n ): void => {\n const state = ctx.current\n if (!state) return\n state.patchCount += 1\n resolveAndRecordDirtyPathId(state, path, _reason)\n }\n\n const MAX_PATCHES_FULL = 256\n\n const recordPatchFull = (\n path: StatePatchPath | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ): void => {\n const state = ctx.current\n if (!state) return\n state.patchCount += 1\n const opSeq = state.patchCount - 1\n const pathId = resolveAndRecordDirtyPathId(state, path, reason)\n if (state.patchesTruncated || state.patches.length >= MAX_PATCHES_FULL) {\n state.patchesTruncated = true\n return\n }\n state.patches.push({\n opSeq,\n ...(pathId != null ? { pathId } : null),\n ...(from !== undefined ? { from } : null),\n ...(to !== undefined ? { to } : null),\n reason: normalizePatchReason(reason),\n ...(traitNodeId ? { traitNodeId } : null),\n ...(typeof stepId === 'number' && Number.isFinite(stepId) && stepId >= 0 ? { stepId: Math.floor(stepId) } : null),\n })\n }\n\n ctx.recordPatch = instrumentation === 'full' ? recordPatchFull : recordPatchLight\n\n return ctx\n}\n\n/**\n * Begins a new transaction:\n * - Default behavior: overrides the current transaction (queueing/nesting are refined in US1).\n * - initialState is provided by the caller (typically the current SubscriptionRef snapshot).\n */\nexport const beginTransaction = <S>(ctx: StateTxnContext<S>, origin: StateTxnOrigin, initialState: S): void => {\n const { config } = ctx\n const now = config.now\n const startedAt = now()\n\n ctx.nextTxnSeq += 1\n const txnSeq = ctx.nextTxnSeq\n const anchor = config.instanceId ?? 'unknown'\n const txnId = `${anchor}::t${txnSeq}`\n\n const initialSnapshot = config.captureSnapshots ? initialState : undefined\n\n const state = ctx.scratch\n state.txnId = txnId\n state.txnSeq = txnSeq\n state.origin = origin\n state.startedAt = startedAt\n state.baseState = initialState\n state.draft = initialState\n state.initialStateSnapshot = initialSnapshot\n state.patches.length = 0\n state.patchCount = 0\n state.patchesTruncated = false\n state.fieldPathIdRegistry = ctx.config.getFieldPathIdRegistry?.()\n state.dirtyPathIds.clear()\n state.dirtyAllReason = undefined\n ctx.current = state\n}\n\nconst resolveAndRecordDirtyPathId = <S>(\n state: StateTxnState<S>,\n path: StatePatchPath | undefined,\n reason: PatchReason,\n): FieldPathId | undefined => {\n if (state.dirtyAllReason) return undefined\n\n if (path === undefined) {\n state.dirtyAllReason = 'customMutation'\n return undefined\n }\n\n if (path === '*') {\n state.dirtyAllReason = 'unknownWrite'\n return undefined\n }\n\n const registry = state.fieldPathIdRegistry\n if (!registry) {\n state.dirtyAllReason = reason === 'reducer' ? 'customMutation' : 'fallbackPolicy'\n return undefined\n }\n\n let id: FieldPathId | undefined\n\n if (typeof path === 'number') {\n if (!Number.isFinite(path)) {\n state.dirtyAllReason = 'nonTrackablePatch'\n return undefined\n }\n const n = Math.floor(path)\n if (n < 0) {\n state.dirtyAllReason = 'nonTrackablePatch'\n return undefined\n }\n if (!registry.fieldPaths[n]) {\n state.dirtyAllReason = 'fallbackPolicy'\n return undefined\n }\n id = n\n } else if (typeof path === 'string') {\n const direct = registry.pathStringToId?.get(path)\n if (direct == null) {\n state.dirtyAllReason = 'fallbackPolicy'\n return undefined\n }\n id = direct\n } else {\n const normalized = normalizeFieldPath(path)\n if (!normalized) {\n state.dirtyAllReason = 'nonTrackablePatch'\n return undefined\n }\n\n const next = getFieldPathId(registry, normalized)\n if (next == null) {\n state.dirtyAllReason = 'fallbackPolicy'\n return undefined\n }\n id = next\n }\n\n state.dirtyPathIds.add(id)\n return id\n}\n\n/**\n * Updates the draft state:\n * - next is the latest draft.\n * - When instrumentation is full, patch info is recorded into the transaction via recordPatch.\n */\nexport const updateDraft = <S>(ctx: StateTxnContext<S>, next: S): void => {\n const state = ctx.current\n if (!state) {\n // No active transaction: ignore patch info; higher layers decide whether to start an implicit transaction.\n return\n }\n\n state.draft = next\n}\n\n/**\n * recordPatch:\n * - In full mode, appends a Patch.\n * - In light mode, silently ignores to avoid extra overhead.\n */\nexport const recordPatch = <S>(\n ctx: StateTxnContext<S>,\n path: StatePatchPath | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n): void => {\n ctx.recordPatch(path, reason, from, to, traitNodeId, stepId)\n}\n\n/**\n * Commits the transaction:\n * - Writes the final draft to SubscriptionRef exactly once.\n * - Returns the aggregated StateTransaction; returns undefined if there is no active transaction.\n *\n * Notes:\n * - Emitting Debug/Devtools events is decided by the caller based on the returned transaction.\n * - This module does not depend on DebugSink to avoid circular dependencies in core.\n */\nexport const commit = <S>(\n ctx: StateTxnContext<S>,\n stateRef: SubscriptionRef.SubscriptionRef<S>,\n): Effect.Effect<StateTransaction<S> | undefined> =>\n Effect.gen(function* () {\n const state = ctx.current\n if (!state) {\n return undefined\n }\n\n const { config } = ctx\n const now = config.now\n\n const finalState = state.draft\n\n // 0 commit: when there is no change, do not write SubscriptionRef and do not emit state:update.\n if (Object.is(finalState, state.baseState)) {\n ctx.current = undefined\n return undefined\n }\n\n // Single write to SubscriptionRef: ensures only one external state commit + subscription notification.\n yield* SubscriptionRef.set(stateRef, finalState)\n\n const endedAt = now()\n const durationMs = Math.max(0, endedAt - state.startedAt)\n\n const registry = state.fieldPathIdRegistry\n const dirtySet: DirtySet =\n registry == null\n ? {\n dirtyAll: true,\n reason: state.dirtyAllReason ?? 'fallbackPolicy',\n rootIds: [],\n rootCount: 0,\n keySize: 0,\n keyHash: 0,\n }\n : dirtyPathsToRootIds({\n dirtyPaths: state.dirtyPathIds,\n registry,\n dirtyAllReason: state.dirtyAllReason,\n })\n\n const transaction: StateTransaction<S> = {\n txnId: state.txnId,\n txnSeq: state.txnSeq,\n origin: state.origin,\n startedAt: state.startedAt,\n endedAt,\n durationMs,\n dirtySet,\n patchCount: state.patchCount,\n patchesTruncated: state.patchesTruncated,\n ...(state.patchesTruncated ? { patchesTruncatedReason: 'max_patches' } : null),\n initialStateSnapshot: state.initialStateSnapshot,\n finalStateSnapshot: config.captureSnapshots ? finalState : undefined,\n patches: ctx.config.instrumentation === 'full' ? state.patches.slice() : [],\n moduleId: config.moduleId,\n instanceId: config.instanceId,\n }\n\n // Clear the current transaction.\n ctx.current = undefined\n\n return transaction\n })\n\n/**\n * abort:\n * - Terminates the current transaction and clears context.\n * - Does not write to stateRef.\n * - Higher layers decide whether to record diagnostics/observability events.\n */\nexport const abort = <S>(ctx: StateTxnContext<S>): void => {\n ctx.current = undefined\n}\n","import { Context, Effect, Option } from 'effect'\nimport { isDevEnv } from './env.js'\n\nexport type OverrideScope = 'builtin' | 'runtime_default' | 'runtime_module' | 'provider' | 'instance'\n\nexport type RuntimeServiceOverride = {\n readonly implId?: string\n readonly notes?: string\n}\n\n/**\n * RuntimeServicesOverrides: a serializable override for runtime service implementation selection.\n *\n * - The key is a stable serviceId.\n * - The value may only contain serializable fields (no functions/closures) so evidence can be produced and explained.\n */\nexport type RuntimeServicesOverrides = Readonly<Record<string, RuntimeServiceOverride>>\n\nexport interface RuntimeServicesRuntimeConfig {\n /** Runtime-level default overrides (runtime_default). */\n readonly services?: RuntimeServicesOverrides\n /** Per-module delta overrides by moduleId (runtime_module). */\n readonly servicesByModuleId?: Readonly<Record<string, RuntimeServicesOverrides>>\n}\n\nclass RuntimeServicesRuntimeConfigTagImpl extends Context.Tag('@logixjs/core/RuntimeServicesRuntimeConfig')<\n RuntimeServicesRuntimeConfigTagImpl,\n RuntimeServicesRuntimeConfig\n>() {}\n\nexport const RuntimeServicesRuntimeConfigTag = RuntimeServicesRuntimeConfigTagImpl\n\nexport interface RuntimeServicesProviderOverrides {\n /** Provider-scoped default overrides (provider). */\n readonly services?: RuntimeServicesOverrides\n /** Provider-scoped per-module delta overrides by moduleId (provider). */\n readonly servicesByModuleId?: Readonly<Record<string, RuntimeServicesOverrides>>\n}\n\nclass RuntimeServicesProviderOverridesTagImpl extends Context.Tag('@logixjs/core/RuntimeServicesProviderOverrides')<\n RuntimeServicesProviderOverridesTagImpl,\n RuntimeServicesProviderOverrides\n>() {}\n\nexport const RuntimeServicesProviderOverridesTag = RuntimeServicesProviderOverridesTagImpl\n\nclass RuntimeServicesInstanceOverridesTagImpl extends Context.Tag('@logixjs/core/RuntimeServicesInstanceOverrides')<\n RuntimeServicesInstanceOverridesTagImpl,\n RuntimeServicesOverrides\n>() {}\n\nexport const RuntimeServicesInstanceOverridesTag = RuntimeServicesInstanceOverridesTagImpl\n\n/**\n * FullCutoverGateMode: controls whether fallbacks are allowed during assembly.\n *\n * - trial: allows fallbacks (for trial-run / comparison / diagnostics).\n * - fullCutover: forbids fallbacks (any fallback or missing binding fails).\n *\n * Default: trial. If you need a strict gate, set it to fullCutover explicitly in the public Runtime defaults.\n */\nexport type FullCutoverGateMode = 'trial' | 'fullCutover'\n\nclass FullCutoverGateModeTagImpl extends Context.Tag('@logixjs/core/FullCutoverGateMode')<\n FullCutoverGateModeTagImpl,\n FullCutoverGateMode\n>() {}\n\nexport const FullCutoverGateModeTag = FullCutoverGateModeTagImpl\n\nexport interface RuntimeServiceBinding {\n readonly serviceId: string\n readonly implId?: string\n readonly implVersion?: string\n readonly scope: OverrideScope\n readonly overridden: boolean\n readonly notes?: string\n}\n\nexport interface RuntimeServicesEvidence {\n readonly moduleId?: string\n readonly instanceId: string\n readonly scope: OverrideScope\n readonly bindings: ReadonlyArray<RuntimeServiceBinding>\n readonly overridesApplied: ReadonlyArray<string>\n}\n\nconst ORDERED_SCOPES: ReadonlyArray<OverrideScope> = [\n 'builtin',\n 'runtime_default',\n 'runtime_module',\n 'provider',\n 'instance',\n]\n\nconst maxScope = (a: OverrideScope, b: OverrideScope): OverrideScope => {\n const ai = ORDERED_SCOPES.indexOf(a)\n const bi = ORDERED_SCOPES.indexOf(b)\n return (ai >= bi ? a : b) as OverrideScope\n}\n\nconst isPlainRecord = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\nconst normalizeOverride = (value: unknown): RuntimeServiceOverride | undefined => {\n if (!isPlainRecord(value)) return undefined\n\n const implIdRaw = value.implId\n const notesRaw = value.notes\n\n return {\n implId: typeof implIdRaw === 'string' && implIdRaw.length > 0 ? implIdRaw : undefined,\n notes: typeof notesRaw === 'string' && notesRaw.length > 0 ? notesRaw : undefined,\n }\n}\n\nexport const resolveRuntimeServicesOverrides = (args: {\n readonly moduleId: string | undefined\n}): Effect.Effect<\n {\n readonly runtimeDefault?: RuntimeServicesOverrides\n readonly runtimeModule?: RuntimeServicesOverrides\n readonly provider?: RuntimeServicesOverrides\n readonly providerModule?: RuntimeServicesOverrides\n readonly instance?: RuntimeServicesOverrides\n },\n never,\n any\n> =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(RuntimeServicesRuntimeConfigTag)\n const providerOverridesOpt = yield* Effect.serviceOption(RuntimeServicesProviderOverridesTag)\n const instanceOverridesOpt = yield* Effect.serviceOption(RuntimeServicesInstanceOverridesTag)\n\n const runtimeConfig = Option.isSome(runtimeConfigOpt) ? runtimeConfigOpt.value : undefined\n const providerOverrides = Option.isSome(providerOverridesOpt) ? providerOverridesOpt.value : undefined\n const instanceOverrides = Option.isSome(instanceOverridesOpt) ? instanceOverridesOpt.value : undefined\n\n const moduleId = args.moduleId\n\n const runtimeModule =\n moduleId && runtimeConfig?.servicesByModuleId ? runtimeConfig.servicesByModuleId[moduleId] : undefined\n\n const providerModule =\n moduleId && providerOverrides?.servicesByModuleId ? providerOverrides.servicesByModuleId[moduleId] : undefined\n\n return {\n runtimeDefault: runtimeConfig?.services,\n runtimeModule,\n provider: providerOverrides?.services,\n providerModule,\n instance: instanceOverrides,\n }\n })\n\nexport interface RuntimeServiceImpl<Service> {\n readonly implId: string\n readonly implVersion: string\n readonly make: Effect.Effect<Service, never, any>\n readonly notes?: string\n}\n\n/**\n * RuntimeServicesRegistry:\n * - Used to inject additional serviceId → impls (e.g. implementations provided by core-ng).\n * - A non-serializable contract used only during assembly; selection evidence is still carried by\n * RuntimeServicesOverrides + RuntimeServicesEvidence.\n */\nexport interface RuntimeServicesRegistry {\n readonly implsByServiceId: Readonly<Record<string, ReadonlyArray<RuntimeServiceImpl<any>>>>\n}\n\nclass RuntimeServicesRegistryTagImpl extends Context.Tag('@logixjs/core/RuntimeServicesRegistry')<\n RuntimeServicesRegistryTagImpl,\n RuntimeServicesRegistry\n>() {}\n\nexport const RuntimeServicesRegistryTag = RuntimeServicesRegistryTagImpl\n\nexport interface RuntimeServiceSelection<Service> {\n readonly binding: RuntimeServiceBinding\n readonly impl: RuntimeServiceImpl<Service>\n readonly overridesApplied: ReadonlyArray<string>\n}\n\nexport const selectRuntimeService = <Service>(\n serviceId: string,\n impls: ReadonlyArray<RuntimeServiceImpl<Service>>,\n overrides: {\n readonly runtimeDefault?: RuntimeServicesOverrides\n readonly runtimeModule?: RuntimeServicesOverrides\n readonly provider?: RuntimeServicesOverrides\n readonly providerModule?: RuntimeServicesOverrides\n readonly instance?: RuntimeServicesOverrides\n },\n): RuntimeServiceSelection<Service> => {\n const builtin = impls[0]\n if (!builtin) {\n throw new Error(`[Logix] RuntimeKernel registry missing builtin impl for: ${serviceId}`)\n }\n\n let desired: { readonly scope: OverrideScope; readonly override: RuntimeServiceOverride } | undefined\n const consider = (scope: OverrideScope, patch: RuntimeServicesOverrides | undefined): void => {\n const next = patch ? normalizeOverride(patch[serviceId]) : undefined\n if (!next || !next.implId) return\n desired = { scope, override: next }\n }\n\n // priority: builtin < runtime_default < runtime_module < provider < instance\n consider('runtime_default', overrides.runtimeDefault)\n consider('runtime_module', overrides.runtimeModule)\n consider('provider', overrides.provider)\n consider('provider', overrides.providerModule)\n consider('instance', overrides.instance)\n\n if (!desired) {\n return {\n impl: builtin,\n binding: {\n serviceId,\n implId: builtin.implId,\n implVersion: builtin.implVersion,\n scope: 'builtin',\n overridden: false,\n notes: builtin.notes,\n },\n overridesApplied: [],\n }\n }\n\n const desiredImplId = desired.override.implId!\n const selected = impls.find((i) => i.implId === desiredImplId)\n const impl = selected ?? builtin\n\n const didFallback = selected == null\n const fallbackNote = didFallback\n ? `Unknown implId \"${desiredImplId}\", falling back to builtin \"${builtin.implId}\"`\n : undefined\n\n const notes = [desired.override.notes, impl.notes, fallbackNote]\n .filter((s): s is string => typeof s === 'string' && s.length > 0)\n .join('; ')\n\n return {\n impl,\n binding: {\n serviceId,\n implId: impl.implId,\n implVersion: impl.implVersion,\n scope: desired.scope,\n overridden: true,\n notes: notes.length > 0 ? notes : undefined,\n },\n overridesApplied: [\n didFallback\n ? `${desired.scope}:${serviceId}=${desiredImplId} (fallback=${builtin.implId})`\n : `${desired.scope}:${serviceId}=${desiredImplId}`,\n ],\n }\n}\n\nexport const makeRuntimeServicesEvidence = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n readonly bindings: ReadonlyArray<RuntimeServiceBinding>\n readonly overridesApplied: ReadonlyArray<string>\n}): RuntimeServicesEvidence => {\n let scope: OverrideScope = 'builtin'\n for (const b of args.bindings) {\n scope = maxScope(scope, b.scope)\n }\n\n return {\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n scope,\n bindings: args.bindings,\n overridesApplied: args.overridesApplied,\n }\n}\n\nconst RUNTIME_SERVICES_EVIDENCE = Symbol.for('@logixjs/core/runtimeServicesEvidence')\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nconst formatScope = (moduleId: unknown, instanceId: unknown): string => {\n const m = typeof moduleId === 'string' && moduleId.length > 0 ? moduleId : 'unknown'\n const i = typeof instanceId === 'string' && instanceId.length > 0 ? instanceId : 'unknown'\n return `moduleId=${m}, instanceId=${i}`\n}\n\nexport const setRuntimeServicesEvidence = (runtime: object, evidence: RuntimeServicesEvidence): void => {\n defineHidden(runtime, RUNTIME_SERVICES_EVIDENCE, evidence)\n}\n\nexport const getRuntimeServicesEvidence = (runtime: object): RuntimeServicesEvidence => {\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n const evidence = (runtime as any)[RUNTIME_SERVICES_EVIDENCE] as RuntimeServicesEvidence | undefined\n if (!evidence) {\n const msg = isDevEnv()\n ? [\n '[MissingRuntimeServicesEvidence] Runtime services evidence not installed on ModuleRuntime instance.',\n `scope: ${formatScope(scope.moduleId, scope.instanceId)}`,\n 'fix:',\n '- Ensure ModuleRuntime.make assembles RuntimeKernel and attaches evidence (020 US2).',\n '- If you created a mock runtime for tests, attach evidence or avoid calling evidence-only APIs.',\n ].join('\\n')\n : 'Runtime services evidence not installed'\n throw new Error(msg)\n }\n\n const runtimeInstanceId = scope.instanceId\n if (\n typeof runtimeInstanceId === 'string' &&\n runtimeInstanceId.length > 0 &&\n runtimeInstanceId !== evidence.instanceId\n ) {\n throw new Error(\n isDevEnv()\n ? [\n '[InconsistentRuntimeServicesEvidence] Runtime services evidence instanceId mismatch.',\n `runtime: ${formatScope(scope.moduleId, runtimeInstanceId)}`,\n `evidence: ${formatScope(evidence.moduleId, evidence.instanceId)}`,\n ].join('\\n')\n : 'Runtime services evidence mismatch',\n )\n }\n\n return evidence\n}\n","import { Context, Layer } from 'effect'\nimport type { TraitConvergeRequestedMode } from '../../state-trait/model.js'\nimport type { ReadQueryStrictGateConfig } from './ReadQuery.js'\n\n// Unified runtime env detection, avoiding bundlers inlining NODE_ENV at build time.\nexport const getNodeEnv = (): string | undefined => {\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const env = (globalThis as any)?.process?.env\n return typeof env?.NODE_ENV === 'string' ? env.NODE_ENV : undefined\n } catch {\n return undefined\n }\n}\n\nexport const isDevEnv = (): boolean => getNodeEnv() !== 'production'\n\nexport type StateTransactionInstrumentation = 'full' | 'light'\n\n/**\n * getDefaultStateTxnInstrumentation:\n * - Currently chooses default instrumentation by NODE_ENV:\n * - dev / test: full (keep patches and snapshots for debugging).\n * - production: light (keep minimal semantics to reduce overhead).\n * - May evolve with finer-grained overrides in Runtime.make / Module.make.\n */\nexport const getDefaultStateTxnInstrumentation = (): StateTransactionInstrumentation => (isDevEnv() ? 'full' : 'light')\n\n/**\n * Runtime-level StateTransaction config Service:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime.make can read runtime-level defaults from Env.\n *\n * Notes:\n * - instrumentation is only a runtime-level default.\n * - Explicit instrumentation in ModuleImpl / ModuleRuntimeOptions has higher priority.\n */\nexport interface StateTransactionRuntimeConfig {\n readonly instrumentation?: StateTransactionInstrumentation\n /**\n * StateTrait derived converge budget (ms):\n * - Exceeding the budget triggers a soft degrade (freeze derived fields, preserve base writes and 0/1 commit semantics).\n * - Default is 200ms (aligned with the 007 spec threshold).\n */\n readonly traitConvergeBudgetMs?: number\n /**\n * Auto-mode decision budget (ms):\n * - Only used during the decision phase when requestedMode=\"auto\".\n * - Exceeding the budget must immediately fall back to full (and record evidence).\n */\n readonly traitConvergeDecisionBudgetMs?: number\n /**\n * StateTrait converge scheduling strategy:\n * - full: full topo execution (current default; safest).\n * - dirty: minimal triggering based on dirtyPaths + deps in the txn window (requires accurate deps).\n */\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n /**\n * 043: Trait converge time-slicing (explicit opt-in). Disabled by default.\n */\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n /**\n * 060: Txn Lanes (priority scheduling for transaction follow-up work). Enabled by default since 062.\n */\n readonly txnLanes?: TxnLanesPatch\n /**\n * Runtime-level per-module overrides (hotfix path):\n * - Only affects converge behavior for the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /**\n * 060: Txn Lanes runtime_module overrides (hotfix / gradual tuning).\n * - Only affects the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionConfigTagImpl extends Context.Tag('@logixjs/core/StateTransactionRuntimeConfig')<\n StateTransactionConfigTagImpl,\n StateTransactionRuntimeConfig\n>() {}\n\nexport const StateTransactionConfigTag = StateTransactionConfigTagImpl\n\nexport type ReadQueryStrictGateRuntimeConfig = ReadQueryStrictGateConfig\n\nclass ReadQueryStrictGateConfigTagImpl extends Context.Tag('@logixjs/core/ReadQueryStrictGateRuntimeConfig')<\n ReadQueryStrictGateConfigTagImpl,\n ReadQueryStrictGateRuntimeConfig\n>() {}\n\nexport const ReadQueryStrictGateConfigTag = ReadQueryStrictGateConfigTagImpl\n\nexport type ReplayMode = 'live' | 'replay'\n\nexport interface ReplayModeConfig {\n readonly mode: ReplayMode\n}\n\nclass ReplayModeConfigTagImpl extends Context.Tag('@logixjs/core/ReplayModeConfig')<\n ReplayModeConfigTagImpl,\n ReplayModeConfig\n>() {}\n\nexport const ReplayModeConfigTag = ReplayModeConfigTagImpl\n\nexport const replayModeLayer = (mode: ReplayMode): Layer.Layer<ReplayModeConfigTagImpl, never, never> =>\n Layer.succeed(ReplayModeConfigTag, { mode })\n\nexport interface StateTransactionTraitConvergeOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n}\n\nexport interface TxnLanesPatch {\n /**\n * enabled: whether Txn Lanes is enabled (default on since 062).\n * - undefined: default enabled (when not explicitly configured)\n * - false: disabled (returns to baseline behavior)\n * - true: enabled (only affects scheduling of follow-up work outside the transaction; transactions remain synchronous)\n */\n readonly enabled?: boolean\n /**\n * overrideMode: runtime temporary override (for debugging/rollback/comparison).\n * - forced_off: forcibly disables Txn Lanes (returns to baseline behavior).\n * - forced_sync: forces fully synchronous execution (ignores non-urgent deferral and time-slicing; used for comparisons).\n *\n * Notes:\n * - Override precedence follows StateTransactionOverrides: provider > runtime_module > runtime_default > builtin.\n * - Overrides must be explainable by evidence (see 060 LaneEvidence reasons).\n */\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n /** non-urgent work loop slice budget (ms). */\n readonly budgetMs?: number\n /** Non-urgent backlog coalescing window (ms). */\n readonly debounceMs?: number\n /** Max lag upper bound (ms): exceeding it triggers an explainable starvation protection (forced catch-up). */\n readonly maxLagMs?: number\n /** Whether to allow coalescing/canceling intermediate non-urgent work (must preserve eventual consistency). */\n readonly allowCoalesce?: boolean\n /**\n * Yield strategy for the non-urgent work loop (progressive enhancement).\n * - baseline: uses only time budget + hard upper bound\n * - inputPending: when supported by browsers, also consults `navigator.scheduling.isInputPending`\n */\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n}\n\nexport interface TraitConvergeTimeSlicingPatch {\n /**\n * enabled:\n * - false/undefined: disabled (default)\n * - true: enables time-slicing (only affects computed/link explicitly marked as deferred)\n */\n readonly enabled?: boolean\n /**\n * debounceMs: coalescing interval (ms) for the deferral window; merges high-frequency inputs into one deferred flush.\n */\n readonly debounceMs?: number\n /**\n * maxLagMs: max lag upper bound (ms); exceeding it triggers an explainable forced flush (starvation protection).\n */\n readonly maxLagMs?: number\n}\n\n/**\n * Provider-scoped StateTransactionOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface StateTransactionOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /** 060: Txn Lanes provider-level overrides (delta overrides). */\n readonly txnLanes?: TxnLanesPatch\n /** 060: Txn Lanes provider_module overrides (by moduleId). */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionOverridesTagImpl extends Context.Tag('@logixjs/core/StateTransactionOverrides')<\n StateTransactionOverridesTagImpl,\n StateTransactionOverrides\n>() {}\n\nexport const StateTransactionOverridesTag = StateTransactionOverridesTagImpl\n\nexport type ConcurrencyLimit = number | 'unbounded'\n\nexport interface ConcurrencyPolicyPatch {\n readonly concurrencyLimit?: ConcurrencyLimit\n readonly losslessBackpressureCapacity?: number\n readonly allowUnbounded?: boolean\n readonly pressureWarningThreshold?: {\n readonly backlogCount?: number\n readonly backlogDurationMs?: number\n }\n readonly warningCooldownMs?: number\n}\n\n/**\n * Runtime-level ConcurrencyPolicy:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime merges sources via a resolver (builtin/runtime_module/provider, etc.).\n *\n * Notes:\n * - overridesByModuleId is used for runtime_module hot-switching (hotfix / gradual tuning) and is lower priority than provider overrides.\n */\nexport interface ConcurrencyPolicy extends ConcurrencyPolicyPatch {\n readonly overridesByModuleId?: Readonly<Record<string, ConcurrencyPolicyPatch>>\n}\n\nclass ConcurrencyPolicyTagImpl extends Context.Tag('@logixjs/core/ConcurrencyPolicy')<\n ConcurrencyPolicyTagImpl,\n ConcurrencyPolicy\n>() {}\n\nexport const ConcurrencyPolicyTag = ConcurrencyPolicyTagImpl\n\n/**\n * Provider-scoped ConcurrencyPolicyOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface ConcurrencyPolicyOverrides extends ConcurrencyPolicyPatch {\n readonly overridesByModuleId?: Readonly<Record<string, ConcurrencyPolicyPatch>>\n}\n\nclass ConcurrencyPolicyOverridesTagImpl extends Context.Tag('@logixjs/core/ConcurrencyPolicyOverrides')<\n ConcurrencyPolicyOverridesTagImpl,\n ConcurrencyPolicyOverrides\n>() {}\n\nexport const ConcurrencyPolicyOverridesTag = ConcurrencyPolicyOverridesTagImpl\n","import { Context, Effect, Option } from 'effect'\nimport { isDevEnv } from './env.js'\n\n/**\n * Stable identifier for a kernel variant (requested kernel family).\n *\n * - Recommended: `[a-z0-9-]+` (lower-kebab).\n * - Recommended reserved names: `core` (builtin semantics), `core-ng` (history/comparison).\n */\nexport type KernelId = 'core' | 'core-ng' | (string & {})\n\nconst isKernelId = (value: unknown): value is KernelId =>\n typeof value === 'string' && value.length > 0 && /^[a-z0-9-]+$/.test(value)\n\nexport interface KernelImplementationRef {\n /**\n * Requested kernel family id (not a version number).\n * Actual activation / fallback must be interpreted via RuntimeServicesEvidence.\n */\n readonly kernelId: KernelId\n /** The npm package that provides the kernel implementation. */\n readonly packageName: string\n /** Optional semver for explainability (not used as a semantic anchor). */\n readonly packageVersion?: string\n /** Optional build hash/id for evidence diff explainability. */\n readonly buildId?: string\n /** Explainability-only labels; must not become semantic switches. */\n readonly capabilities?: ReadonlyArray<string>\n}\n\nexport const defaultKernelImplementationRef = {\n kernelId: 'core',\n packageName: '@logixjs/core',\n} as const satisfies KernelImplementationRef\n\nexport const isKernelImplementationRef = (value: unknown): value is KernelImplementationRef => {\n if (typeof value !== 'object' || value === null || Array.isArray(value)) return false\n\n const keys = Object.keys(value)\n for (const k of keys) {\n if (k !== 'kernelId' && k !== 'packageName' && k !== 'packageVersion' && k !== 'buildId' && k !== 'capabilities') {\n return false\n }\n }\n\n const v: any = value\n if (!isKernelId(v.kernelId)) return false\n if (typeof v.packageName !== 'string' || v.packageName.length === 0) return false\n\n if (v.packageVersion !== undefined && (typeof v.packageVersion !== 'string' || v.packageVersion.length === 0)) {\n return false\n }\n if (v.buildId !== undefined && (typeof v.buildId !== 'string' || v.buildId.length === 0)) {\n return false\n }\n if (v.capabilities !== undefined) {\n if (!Array.isArray(v.capabilities)) return false\n if (!v.capabilities.every((c: unknown) => typeof c === 'string')) return false\n }\n\n return true\n}\n\nexport const normalizeKernelImplementationRef = (\n value: unknown,\n fallback: KernelImplementationRef = defaultKernelImplementationRef,\n): KernelImplementationRef => {\n if (typeof value !== 'object' || value === null || Array.isArray(value)) return fallback\n\n const v: any = value\n if (!isKernelId(v.kernelId)) return fallback\n if (typeof v.packageName !== 'string' || v.packageName.length === 0) return fallback\n\n return {\n kernelId: v.kernelId,\n packageName: v.packageName,\n ...(typeof v.packageVersion === 'string' && v.packageVersion.length > 0\n ? { packageVersion: v.packageVersion }\n : {}),\n ...(typeof v.buildId === 'string' && v.buildId.length > 0 ? { buildId: v.buildId } : {}),\n ...(Array.isArray(v.capabilities) && v.capabilities.every((c: unknown) => typeof c === 'string')\n ? { capabilities: v.capabilities as ReadonlyArray<string> }\n : {}),\n }\n}\n\nclass KernelImplementationRefTagImpl extends Context.Tag('@logixjs/core/KernelImplementationRef')<\n KernelImplementationRefTagImpl,\n KernelImplementationRef\n>() {}\n\nexport const KernelImplementationRefTag = KernelImplementationRefTagImpl\n\nexport const resolveKernelImplementationRef = (): Effect.Effect<KernelImplementationRef, never, any> =>\n Effect.gen(function* () {\n const opt = yield* Effect.serviceOption(KernelImplementationRefTag)\n return normalizeKernelImplementationRef(Option.isSome(opt) ? opt.value : undefined)\n })\n\nconst KERNEL_IMPLEMENTATION_REF = Symbol.for('@logixjs/core/kernelImplementationRef')\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nconst formatScope = (moduleId: unknown, instanceId: unknown): string => {\n const m = typeof moduleId === 'string' && moduleId.length > 0 ? moduleId : 'unknown'\n const i = typeof instanceId === 'string' && instanceId.length > 0 ? instanceId : 'unknown'\n return `moduleId=${m}, instanceId=${i}`\n}\n\nexport const setKernelImplementationRef = (runtime: object, ref: KernelImplementationRef): void => {\n defineHidden(runtime, KERNEL_IMPLEMENTATION_REF, ref)\n}\n\nexport const getKernelImplementationRef = (runtime: object): KernelImplementationRef => {\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n const ref = (runtime as any)[KERNEL_IMPLEMENTATION_REF] as KernelImplementationRef | undefined\n if (!ref) {\n const msg = isDevEnv()\n ? [\n '[MissingKernelImplementationRef] KernelImplementationRef not installed on ModuleRuntime instance.',\n `scope: ${formatScope(scope.moduleId, scope.instanceId)}`,\n 'fix:',\n '- Ensure ModuleRuntime.make installs KernelImplementationRef (045 kernel contract).',\n '- If you created a mock runtime for tests, attach KernelImplementationRef or avoid calling kernel-only APIs.',\n ].join('\\n')\n : 'KernelImplementationRef not installed'\n throw new Error(msg)\n }\n return ref\n}\n","import { Context, Effect } from 'effect'\n\nexport interface RuntimeServiceBuiltins {\n /**\n * Returns the make Effect of a builtin implementation (provided by ModuleRuntime during assembly, avoiding external code\n * capturing internal closures/state).\n * - For kernel implementors only (e.g. core-ng) to implement behavior-equivalent replacements or thin wrappers.\n * - Not an app-facing contract; must not be depended on from business Flow/Logic.\n */\n readonly getBuiltinMake: (serviceId: string) => Effect.Effect<unknown, never, any>\n}\n\nexport class RuntimeServiceBuiltinsTag extends Context.Tag('@logixjs/core/RuntimeServiceBuiltins')<\n RuntimeServiceBuiltinsTag,\n RuntimeServiceBuiltins\n>() {}\n","import { Context, Effect, FiberRef, Layer } from 'effect'\nimport type { JsonValue } from './jsonValue.js'\nimport { projectJsonValue } from './jsonValue.js'\nimport type { EvidencePackage } from './evidence.js'\nimport type { RunSession } from './runSession.js'\nimport { makeEvidenceSink } from './runSession.js'\nimport {\n currentDiagnosticsLevel,\n toRuntimeDebugEventRef,\n type Event as DebugEvent,\n type Sink as DebugSink,\n} from '../runtime/core/DebugSink.js'\n\nexport interface EvidenceCollector {\n readonly session: RunSession\n readonly debugSink: DebugSink\n readonly registerConvergeStaticIr: (staticIr: unknown) => void\n readonly setKernelImplementationRef: (ref: unknown) => void\n readonly setRuntimeServicesEvidence: (evidence: unknown) => void\n readonly exportEvidencePackage: (options?: { readonly maxEvents?: number }) => EvidencePackage\n readonly clear: () => void\n}\n\nclass EvidenceCollectorTagImpl extends Context.Tag('@logixjs/core/EvidenceCollector')<\n EvidenceCollectorTagImpl,\n EvidenceCollector\n>() {}\n\nexport const EvidenceCollectorTag = EvidenceCollectorTagImpl\n\nexport const evidenceCollectorLayer = (\n collector: EvidenceCollector,\n): Layer.Layer<EvidenceCollectorTagImpl, never, never> =>\n Layer.succeed(EvidenceCollectorTag, collector) as Layer.Layer<EvidenceCollectorTagImpl, never, never>\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\nexport const makeEvidenceCollector = (session: RunSession): EvidenceCollector => {\n const sink = makeEvidenceSink(session)\n\n const convergeStaticIrByDigest = new Map<string, JsonValue>()\n let kernelImplementationRef: JsonValue | undefined\n let runtimeServicesEvidence: JsonValue | undefined\n\n const exportBudget = {\n dropped: 0,\n oversized: 0,\n nonSerializable: 0,\n }\n\n const debugSink: DebugSink = {\n record: (event: DebugEvent) =>\n Effect.gen(function* () {\n const level = yield* FiberRef.get(currentDiagnosticsLevel)\n const instanceIdRaw = (event as any).instanceId\n const instanceId = typeof instanceIdRaw === 'string' && instanceIdRaw.length > 0 ? instanceIdRaw : 'unknown'\n const eventSeq = level === 'off' ? undefined : session.local.nextSeq('eventSeq', instanceId)\n const ref = toRuntimeDebugEventRef(event, {\n diagnosticsLevel: level,\n eventSeq,\n onMetaProjection: ({ stats }) => {\n exportBudget.dropped += stats.dropped\n exportBudget.oversized += stats.oversized\n },\n })\n if (!ref) return\n\n const projected = projectJsonValue(ref)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n\n sink.record('debug:event', projected.value, {\n timestamp: ref.timestamp,\n })\n }),\n }\n\n const registerConvergeStaticIr = (staticIr: unknown): void => {\n if (!isRecord(staticIr)) return\n const digest = staticIr.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return\n const projected = projectJsonValue(staticIr)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n convergeStaticIrByDigest.set(digest, projected.value)\n }\n\n const setKernelImplementationRef = (ref: unknown): void => {\n const projected = projectJsonValue(ref)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n kernelImplementationRef = projected.value\n }\n\n const setRuntimeServicesEvidence = (evidence: unknown): void => {\n const projected = projectJsonValue(evidence)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n runtimeServicesEvidence = projected.value\n }\n\n const exportEvidencePackage = (options?: { readonly maxEvents?: number }): EvidencePackage => {\n const convergeSummary =\n convergeStaticIrByDigest.size > 0\n ? ({\n staticIrByDigest: Object.fromEntries(convergeStaticIrByDigest),\n } as unknown as JsonValue)\n : undefined\n\n const runtimeSummary =\n kernelImplementationRef != null || runtimeServicesEvidence != null\n ? ({\n ...(kernelImplementationRef != null ? { kernelImplementationRef } : {}),\n ...(runtimeServicesEvidence != null ? { services: runtimeServicesEvidence } : {}),\n } as unknown as JsonValue)\n : undefined\n\n const summary =\n convergeSummary != null || runtimeSummary != null\n ? ({\n ...(convergeSummary != null ? { converge: convergeSummary } : {}),\n ...(runtimeSummary != null ? { runtime: runtimeSummary } : {}),\n } as unknown as JsonValue)\n : undefined\n\n return sink.export({\n maxEvents: options?.maxEvents,\n summary,\n })\n }\n\n const clear = (): void => {\n sink.clear()\n convergeStaticIrByDigest.clear()\n kernelImplementationRef = undefined\n runtimeServicesEvidence = undefined\n exportBudget.dropped = 0\n exportBudget.oversized = 0\n exportBudget.nonSerializable = 0\n }\n\n return {\n session,\n debugSink,\n registerConvergeStaticIr,\n setKernelImplementationRef,\n setRuntimeServicesEvidence,\n exportEvidencePackage,\n clear,\n }\n}\n","import { Context, Layer } from 'effect'\nimport type { JsonValue } from './jsonValue.js'\nimport type { EvidencePackage, EvidencePackageSource, ObservationEnvelope } from './evidence.js'\nimport { exportEvidencePackage, OBSERVABILITY_PROTOCOL_VERSION } from './evidence.js'\n\nexport type RunId = string\n\nexport interface RunSessionLocalState {\n /**\n * once: a de-dup key set for \"emit only once\" behavior (must be isolated per session to avoid cross-session pollution).\n * Returns true if it's the first occurrence, false if the key has been seen before.\n */\n readonly once: (key: string) => boolean\n\n /**\n * seq: allocate monotonic sequences by key (e.g. opSeq/eventSeq), must be isolated per session.\n */\n readonly nextSeq: (namespace: string, key: string) => number\n\n /** Tests/reset only: clear this session's local state. */\n readonly clear: () => void\n}\n\nexport interface RunSession {\n readonly runId: RunId\n readonly source: EvidencePackageSource\n readonly startedAt: number\n readonly local: RunSessionLocalState\n}\n\nclass RunSessionTagImpl extends Context.Tag('@logixjs/core/RunSession')<RunSessionTagImpl, RunSession>() {}\n\nexport const RunSessionTag = RunSessionTagImpl\n\nexport interface EvidenceSink {\n readonly record: (type: string, payload: JsonValue, options?: { readonly timestamp?: number }) => void\n readonly export: (options?: {\n readonly protocolVersion?: string\n readonly createdAt?: number\n readonly summary?: JsonValue\n readonly maxEvents?: number\n }) => EvidencePackage\n readonly clear: () => void\n}\n\nconst NEXT_RUN_SEQ_KEY = Symbol.for('@logixjs/core/runSession/nextRunSeq')\nlet fallbackNextRunSeq = 0\n\nconst nextRunSeq = (): number => {\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const g: any = globalThis as any\n const prev = typeof g[NEXT_RUN_SEQ_KEY] === 'number' ? (g[NEXT_RUN_SEQ_KEY] as number) : 0\n const next = prev + 1\n g[NEXT_RUN_SEQ_KEY] = next\n return next\n } catch {\n fallbackNextRunSeq += 1\n return fallbackNextRunSeq\n }\n}\n\nconst makeRunId = (startedAt: number): RunId => `run-${startedAt}.${nextRunSeq()}`\n\nexport const makeRunSessionLocalState = (): RunSessionLocalState => {\n const onceKeys = new Set<string>()\n const seqByNamespace = new Map<string, Map<string, number>>()\n\n return {\n once: (key) => {\n if (onceKeys.has(key)) return false\n onceKeys.add(key)\n return true\n },\n nextSeq: (namespace, key) => {\n const byKey = seqByNamespace.get(namespace) ?? new Map<string, number>()\n if (!seqByNamespace.has(namespace)) seqByNamespace.set(namespace, byKey)\n const prev = byKey.get(key) ?? 0\n const next = prev + 1\n byKey.set(key, next)\n return next\n },\n clear: () => {\n onceKeys.clear()\n seqByNamespace.clear()\n },\n }\n}\n\nexport const makeRunSession = (options?: {\n readonly runId?: RunId\n readonly source?: EvidencePackageSource\n readonly startedAt?: number\n readonly local?: RunSessionLocalState\n}): RunSession => {\n const startedAt = options?.startedAt ?? Date.now()\n return {\n runId: options?.runId ?? makeRunId(startedAt),\n source: options?.source ?? { host: 'unknown' },\n startedAt,\n local: options?.local ?? makeRunSessionLocalState(),\n }\n}\n\nexport const makeEvidenceSink = (session: RunSession): EvidenceSink => {\n const events: ObservationEnvelope[] = []\n let nextSeq = 1\n\n return {\n record: (type, payload, options) => {\n events.push({\n protocolVersion: OBSERVABILITY_PROTOCOL_VERSION,\n runId: session.runId,\n seq: nextSeq++,\n timestamp: options?.timestamp ?? Date.now(),\n type,\n payload,\n })\n },\n export: (options) => {\n const protocolVersion = options?.protocolVersion ?? OBSERVABILITY_PROTOCOL_VERSION\n const maxEvents = options?.maxEvents\n\n const selected =\n typeof maxEvents === 'number' && Number.isFinite(maxEvents) && maxEvents > 0\n ? events.slice(Math.max(0, events.length - Math.floor(maxEvents)))\n : events.slice()\n\n return exportEvidencePackage({\n protocolVersion,\n runId: session.runId,\n source: session.source,\n createdAt: options?.createdAt,\n events: selected,\n summary: options?.summary,\n })\n },\n clear: () => {\n events.length = 0\n nextSeq = 1\n },\n }\n}\n\nexport const runSessionLayer = (session?: RunSession): Layer.Layer<RunSessionTagImpl, never, never> =>\n Layer.succeed(RunSessionTag, session ?? makeRunSession()) as Layer.Layer<RunSessionTagImpl, never, never>\n","import { Effect, FiberRef, Option } from 'effect'\nimport type { StateTxnContext } from './StateTransaction.js'\nimport * as Debug from './DebugSink.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport * as EffectOp from '../../effect-op.js'\nimport { RunSessionTag } from '../../observability/runSession.js'\n\nexport const getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack, never, never> =>\n Effect.serviceOption(EffectOpCore.EffectOpMiddlewareTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\nexport type RunOperation = <A, E, R>(\n kind: EffectOp.EffectOp['kind'],\n name: string,\n params: {\n readonly payload?: unknown\n readonly meta?: EffectOp.EffectOp['meta']\n },\n eff: Effect.Effect<A, E, R>,\n) => Effect.Effect<A, E, R>\n\nexport const makeRunOperation = (args: {\n readonly optionsModuleId: string | undefined\n readonly instanceId: string\n readonly txnContext: StateTxnContext<any>\n}): RunOperation => {\n const { optionsModuleId, instanceId, txnContext } = args\n\n const runOperation: RunOperation = <A2, E2, R2>(\n kind: EffectOp.EffectOp['kind'],\n name: string,\n params: {\n readonly payload?: unknown\n readonly meta?: EffectOp.EffectOp['meta']\n },\n eff: Effect.Effect<A2, E2, R2>,\n ): Effect.Effect<A2, E2, R2> =>\n Effect.gen(function* () {\n const stack = yield* getMiddlewareStack()\n\n const currentTxnId = txnContext.current?.txnId\n const existingLinkId = yield* FiberRef.get(EffectOpCore.currentLinkId)\n\n const runtimeLabel = yield* FiberRef.get(Debug.currentRuntimeLabel)\n\n // NOTE: linkId is generated/propagated by the Runtime:\n // - Boundary entrypoints create a new linkId.\n // - Nested operations reuse the current FiberRef.linkId.\n // - Never default to randomness/time to avoid non-replayable implicit identifiers.\n const { linkId: _ignoredLinkId, ...restMeta } = (params.meta ?? {}) as any\n\n const baseMeta: EffectOp.EffectOp['meta'] = {\n ...restMeta,\n // Filled by the runtime.\n moduleId: (params.meta as any)?.moduleId ?? optionsModuleId,\n instanceId: (params.meta as any)?.instanceId ?? instanceId,\n runtimeLabel: (params.meta as any)?.runtimeLabel ?? runtimeLabel,\n txnSeq: (params.meta as any)?.txnSeq ?? txnContext.current?.txnSeq,\n txnId: (params.meta as any)?.txnId ?? currentTxnId,\n }\n\n const baseMetaAny = baseMeta as any\n if (!(typeof baseMetaAny.opSeq === 'number' && Number.isFinite(baseMetaAny.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const key = baseMetaAny.instanceId ?? 'global'\n baseMetaAny.opSeq = sessionOpt.value.local.nextSeq('opSeq', key)\n }\n }\n\n const op0 = EffectOp.make<A2, E2, R2>({\n kind,\n name,\n payload: params.payload,\n effect: eff,\n meta: baseMeta,\n })\n\n const linkId = existingLinkId ?? op0.id\n const op =\n (op0.meta as any)?.linkId === linkId\n ? op0\n : ({\n ...op0,\n meta: {\n ...(op0.meta ?? {}),\n linkId,\n },\n } as typeof op0)\n\n const program = stack.length ? EffectOp.run(op, stack) : op.effect\n\n // linkId: created at the boundary, reused for nested ops (shared across modules via a FiberRef).\n const opSeq =\n typeof baseMetaAny.opSeq === 'number' && Number.isFinite(baseMetaAny.opSeq)\n ? Math.floor(baseMetaAny.opSeq)\n : undefined\n return yield* Effect.locally(\n EffectOpCore.currentLinkId,\n linkId,\n )(Effect.locally(Debug.currentOpSeq, opSeq)(program))\n })\n\n return runOperation\n}\n","// Internal EffectOp API (for internal implementation code).\n//\n// Goal:\n// - Internal modules must never import root public submodules (e.g. `../EffectOp`).\n// - This file hosts the shared implementation; public `src/EffectOp.ts` delegates to it.\n\nimport { Effect, Option } from 'effect'\nimport * as Core from './runtime/core/EffectOpCore.js'\nimport { RunSessionTag } from './observability/runSession.js'\n\nexport type EffectOp<Out = unknown, Err = unknown, Env = unknown> = Core.EffectOp<Out, Err, Env>\n\nexport type OperationPolicy = Core.OperationPolicy\n\nexport type OperationRejected = Core.OperationRejected\n\nexport type OperationError<E> = Core.OperationError<E>\n\nexport type Middleware = Core.Middleware\n\nexport type MiddlewareStack = Core.MiddlewareStack\n\nexport const composeMiddleware = Core.composeMiddleware\n\nexport const makeOperationRejected = Core.makeOperationRejected\n\n/**\n * Generate a stable id for identifying an EffectOp.\n * - Uses a monotonic sequence by default to avoid non-replayability from randomness/time.\n * - If meta.instanceId is available, prefer deriving `${instanceId}::o${opSeq}`.\n */\nlet nextGlobalOpSeq = 0\n\nconst nextOpSeq = (): number => {\n nextGlobalOpSeq += 1\n return nextGlobalOpSeq\n}\n\nconst makeId = (instanceId: string | undefined, opSeq: number): string =>\n instanceId ? `${instanceId}::o${opSeq}` : `o${opSeq}`\n\n/**\n * EffectOp.make:\n * - Create an EffectOp with basic meta.\n * - Generates a stable id by default (based on `instanceId` + monotonic `opSeq`); callers may override externally.\n */\nexport const make = <A, E, R>(params: {\n readonly kind: EffectOp['kind']\n readonly name: string\n readonly effect: Effect.Effect<A, E, R>\n readonly payload?: unknown\n readonly meta?: EffectOp['meta']\n readonly id?: string\n}): EffectOp<A, E, R> => ({\n ...(params.id\n ? { id: params.id, meta: params.meta }\n : (() => {\n const meta: any = params.meta ?? {}\n const instanceId: string | undefined = meta.instanceId\n const opSeq: number =\n typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq) ? Math.floor(meta.opSeq) : nextOpSeq()\n return {\n id: makeId(instanceId, opSeq),\n meta: meta.opSeq === opSeq ? meta : { ...meta, opSeq },\n }\n })()),\n kind: params.kind,\n name: params.name,\n payload: params.payload,\n effect: params.effect,\n})\n\n/**\n * EffectOp.makeInRunSession:\n * - Allocate a stable `opSeq` within a RunSession scope (per-session + per-instance).\n * - If RunSession is missing from Env, fall back to a process-wide monotonic sequence (no process-wide Map).\n */\nexport const makeInRunSession = <A, E, R>(params: {\n readonly kind: EffectOp['kind']\n readonly name: string\n readonly effect: Effect.Effect<A, E, R>\n readonly payload?: unknown\n readonly meta?: EffectOp['meta']\n readonly id?: string\n}): Effect.Effect<EffectOp<A, E, R>, never, any> =>\n Effect.gen(function* () {\n if (params.id) {\n return {\n id: params.id,\n kind: params.kind,\n name: params.name,\n payload: params.payload,\n meta: params.meta,\n effect: params.effect,\n } satisfies EffectOp<A, E, R>\n }\n\n const meta: any = params.meta ?? {}\n const instanceId: string | undefined = meta.instanceId\n\n let opSeq: number\n if (typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq)) {\n opSeq = Math.floor(meta.opSeq)\n } else {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const key = instanceId ?? 'global'\n opSeq = sessionOpt.value.local.nextSeq('opSeq', key)\n } else {\n opSeq = nextOpSeq()\n }\n }\n\n return {\n id: makeId(instanceId, opSeq),\n kind: params.kind,\n name: params.name,\n payload: params.payload,\n meta: meta.opSeq === opSeq ? meta : { ...meta, opSeq },\n effect: params.effect,\n } satisfies EffectOp<A, E, R>\n })\n\n/**\n * EffectOp.withMeta:\n * - Append or override meta fields on an existing EffectOp.\n * - Does not change the effect itself.\n */\nexport const withMeta = <A, E, R>(\n op: EffectOp<A, E, R>,\n meta: Partial<NonNullable<EffectOp['meta']>>,\n): EffectOp<A, E, R> => ({\n ...op,\n meta: { ...(op.meta ?? {}), ...meta },\n})\n\n/**\n * EffectOp.run:\n * - Execute an EffectOp using the given MiddlewareStack.\n * - If the stack is empty, return op.effect directly.\n */\nexport const run = <A, E, R>(op: EffectOp<A, E, R>, stack: MiddlewareStack): Effect.Effect<A, E, R> =>\n Core.runWithMiddleware(op, stack)\n","import { Effect, PubSub } from 'effect'\nimport type { StateChangeWithMeta, StateCommitMeta, StateCommitMode, StateCommitPriority } from './module.js'\nimport * as Debug from './DebugSink.js'\nimport type { ConcurrencyDiagnostics } from './ConcurrencyDiagnostics.js'\nimport * as ReducerDiagnostics from './ReducerDiagnostics.js'\nimport * as StateTransaction from './StateTransaction.js'\nimport type { RunOperation } from './ModuleRuntime.operation.js'\nimport type { RunWithStateTransaction, SetStateInternal } from './ModuleRuntime.transaction.js'\nimport type { EnqueueTransaction } from './ModuleRuntime.txnQueue.js'\nimport type { ResolvedConcurrencyPolicy } from './ModuleRuntime.concurrencyPolicy.js'\n\nexport const makeDispatchOps = <S, A>(args: {\n readonly optionsModuleId: string | undefined\n readonly instanceId: string\n readonly declaredActionTags?: ReadonlySet<string>\n readonly initialReducers?: Readonly<\n Record<string, (state: S, action: A, sink?: (path: StateTransaction.StatePatchPath) => void) => S>\n >\n readonly txnContext: StateTransaction.StateTxnContext<S>\n readonly readState: Effect.Effect<S>\n readonly setStateInternal: SetStateInternal<S>\n readonly recordStatePatch: (\n path: StateTransaction.StatePatchPath | undefined,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n readonly actionHub: PubSub.PubSub<A>\n readonly actionCommitHub: PubSub.PubSub<StateChangeWithMeta<A>>\n readonly diagnostics: ConcurrencyDiagnostics\n readonly enqueueTransaction: EnqueueTransaction\n readonly resolveConcurrencyPolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>\n readonly runOperation: RunOperation\n readonly runWithStateTransaction: RunWithStateTransaction\n readonly isDevEnv: () => boolean\n}): {\n readonly registerReducer: (tag: string, fn: (state: S, action: A) => S) => void\n readonly dispatch: (action: A) => Effect.Effect<void>\n readonly dispatchBatch: (actions: ReadonlyArray<A>) => Effect.Effect<void>\n readonly dispatchLowPriority: (action: A) => Effect.Effect<void>\n} => {\n const {\n optionsModuleId,\n instanceId,\n declaredActionTags,\n initialReducers,\n txnContext,\n readState,\n setStateInternal,\n recordStatePatch,\n actionHub,\n actionCommitHub,\n diagnostics,\n enqueueTransaction,\n resolveConcurrencyPolicy,\n runOperation,\n runWithStateTransaction,\n isDevEnv,\n } = args\n\n const resolveActionTag = (action: A): string | undefined => {\n const tag = (action as any)?._tag\n if (typeof tag === 'string' && tag.length > 0) return tag\n const type = (action as any)?.type\n if (typeof type === 'string' && type.length > 0) return type\n if (tag != null) return String(tag)\n if (type != null) return String(type)\n return undefined\n }\n\n // Primary reducer map: initial values come from options.reducers and can be extended at runtime via internal hooks (for $.reducer sugar).\n const reducerMap = new Map<string, (state: S, action: A) => S>()\n if (initialReducers) {\n for (const [key, fn] of Object.entries(initialReducers)) {\n reducerMap.set(key, fn as (state: S, action: A) => S)\n }\n }\n\n // Track whether an Action tag has been dispatched, for diagnosing config issues like late reducer registration.\n const dispatchedTags = new Set<string>()\n\n const registerReducer = (tag: string, fn: (state: S, action: A) => S): void => {\n if (reducerMap.has(tag)) {\n // Duplicate registration: throw a config error with extra context; catchAllCause emits diagnostics.\n throw ReducerDiagnostics.makeReducerError('ReducerDuplicateError', tag, optionsModuleId)\n }\n if (dispatchedTags.has(tag)) {\n // Registering after the tag has already been dispatched is a risky config; surfaced via a custom error type for diagnostics.\n throw ReducerDiagnostics.makeReducerError('ReducerLateRegistrationError', tag, optionsModuleId)\n }\n reducerMap.set(tag, fn)\n }\n\n const applyPrimaryReducer = (action: A) => {\n const tag = resolveActionTag(action)\n if (tag == null || reducerMap.size === 0) {\n return Effect.void\n }\n const tagKey = tag.length > 0 ? tag : 'unknown'\n dispatchedTags.add(tagKey)\n const reducer = reducerMap.get(tagKey)\n if (!reducer) {\n return Effect.void\n }\n\n return readState.pipe(\n Effect.flatMap((prev) =>\n Effect.gen(function* () {\n const patchPaths: Array<StateTransaction.StatePatchPath> = []\n const sink = (path: StateTransaction.StatePatchPath): void => {\n if (typeof path === 'string') {\n if (path.length > 0) patchPaths.push(path)\n return\n }\n if (typeof path === 'number') {\n if (Number.isFinite(path)) patchPaths.push(Math.floor(path))\n return\n }\n if (path.length > 0) patchPaths.push(path)\n }\n\n const next = (reducer as any)(prev, action, sink) as S\n\n // No-op reducer: avoid dirty evidence to prevent redundant converge/validate full paths.\n if (Object.is(next, prev)) {\n return\n }\n\n // Prefer the traceable in-transaction path:\n // - If the reducer provides patchPaths (e.g. generated by Logix.Module.Reducer.mutate), record field-level patches.\n // - Otherwise deterministically fall back to dirtyAll (path=\"*\") and emit a migration diagnostic in dev mode.\n if (txnContext.current) {\n if (patchPaths.length > 0) {\n StateTransaction.updateDraft(txnContext, next)\n for (const path of patchPaths) {\n recordStatePatch(path, 'reducer')\n }\n return\n }\n\n StateTransaction.updateDraft(txnContext, next)\n recordStatePatch('*', 'reducer', undefined, next)\n\n if (isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq: txnContext.current?.txnSeq,\n txnId: txnContext.current?.txnId,\n trigger: txnContext.current?.origin,\n code: 'state_transaction::dirty_all_fallback',\n severity: 'warning',\n message:\n 'Reducer writeback did not provide field-level dirty-set evidence; falling back to dirtyAll scheduling.',\n hint: 'Prefer Logix.Module.Reducer.mutate(...) or $.state.mutate(...) inside the transaction to produce field-level patchPaths.',\n kind: 'dirty_all_fallback:reducer',\n })\n }\n\n return\n }\n\n yield* setStateInternal(next, '*', 'reducer', undefined, next)\n }),\n ),\n )\n }\n\n const makeActionOrigin = (originName: string, action: A): StateTransaction.StateTxnOrigin => ({\n kind: 'action',\n name: originName,\n details: {\n _tag: resolveActionTag(action) ?? 'unknown',\n path: typeof (action as any)?.payload?.path === 'string' ? ((action as any).payload.path as string) : undefined,\n op: (() => {\n const tag = resolveActionTag(action) ?? ''\n if (tag.includes('Remove') || tag.includes('remove')) return 'remove'\n if (\n tag.includes('Append') ||\n tag.includes('Prepend') ||\n tag.includes('Insert') ||\n tag.includes('Swap') ||\n tag.includes('Move') ||\n tag.includes('append') ||\n tag.includes('prepend') ||\n tag.includes('insert') ||\n tag.includes('swap') ||\n tag.includes('move')\n ) {\n return 'insert'\n }\n if (tag.includes('Unset') || tag.includes('unset')) return 'unset'\n return 'set'\n })(),\n },\n })\n\n const dispatchInTransaction = (action: A): Effect.Effect<void> =>\n Effect.gen(function* () {\n // Apply the primary reducer first (may be a no-op).\n yield* applyPrimaryReducer(action)\n\n const actionTag = resolveActionTag(action)\n const actionTagNormalized = typeof actionTag === 'string' && actionTag.length > 0 ? actionTag : 'unknown'\n const unknownAction = declaredActionTags ? !declaredActionTags.has(actionTagNormalized) : false\n\n // Record action dispatch (for Devtools/diagnostics).\n yield* Debug.record({\n type: 'action:dispatch',\n moduleId: optionsModuleId,\n action,\n actionTag: actionTagNormalized,\n ...(unknownAction ? { unknownAction: true } : {}),\n instanceId,\n txnSeq: txnContext.current?.txnSeq,\n txnId: txnContext.current?.txnId,\n })\n\n // actionsWithMeta$: provides stable txnSeq/txnId anchors for higher-level subscriptions (e.g. Process).\n const current = txnContext.current\n if (current) {\n const meta: StateCommitMeta = {\n txnSeq: current.txnSeq,\n txnId: current.txnId,\n commitMode: ((current as any).commitMode ?? 'normal') as StateCommitMode,\n priority: ((current as any).priority ?? 'normal') as StateCommitPriority,\n originKind: current.origin.kind,\n originName: current.origin.name,\n }\n yield* PubSub.publish(actionCommitHub, { value: action, meta })\n }\n })\n\n const runDispatch = (action: A): Effect.Effect<void> =>\n runOperation(\n 'action',\n 'action:dispatch',\n {\n payload: action,\n meta: { moduleId: optionsModuleId, instanceId },\n },\n runWithStateTransaction(makeActionOrigin('dispatch', action), () => dispatchInTransaction(action)),\n ).pipe(Effect.asVoid)\n\n const runDispatchLowPriority = (action: A): Effect.Effect<void> =>\n runOperation(\n 'action',\n 'action:dispatchLowPriority',\n {\n payload: action,\n meta: { moduleId: optionsModuleId, instanceId },\n },\n runWithStateTransaction(makeActionOrigin('dispatchLowPriority', action), () =>\n Effect.gen(function* () {\n if (txnContext.current) {\n ;(txnContext.current as any).commitMode = 'lowPriority' as StateCommitMode\n ;(txnContext.current as any).priority = 'low' as StateCommitPriority\n }\n yield* dispatchInTransaction(action)\n }),\n ),\n ).pipe(Effect.asVoid)\n\n const runDispatchBatch = (actions: ReadonlyArray<A>): Effect.Effect<void> => {\n if (actions.length === 0) return Effect.void\n\n return runOperation(\n 'action',\n 'action:dispatchBatch',\n {\n payload: actions,\n meta: { moduleId: optionsModuleId, instanceId },\n },\n runWithStateTransaction({ kind: 'action', name: 'dispatchBatch', details: { count: actions.length } }, () =>\n Effect.gen(function* () {\n if (txnContext.current) {\n ;(txnContext.current as any).commitMode = 'batch' as StateCommitMode\n ;(txnContext.current as any).priority = 'normal' as StateCommitPriority\n }\n for (const action of actions) {\n yield* dispatchInTransaction(action)\n }\n }),\n ),\n ).pipe(Effect.asVoid)\n }\n\n const publishWithPressureDiagnostics = (triggerName: string, publish: Effect.Effect<unknown>): Effect.Effect<void> =>\n Effect.gen(function* () {\n const startedAt = Date.now()\n yield* publish\n const elapsedMs = Date.now() - startedAt\n\n // fast-path: treat 0ms as \"no backpressure wait observed\" to avoid parsing policy per dispatch.\n if (elapsedMs <= 0) {\n return\n }\n\n const policy = yield* resolveConcurrencyPolicy()\n yield* diagnostics.emitPressureIfNeeded({\n policy,\n trigger: { kind: 'actionHub', name: triggerName },\n saturatedDurationMs: elapsedMs,\n })\n })\n\n return {\n registerReducer,\n // Note: publish is a lossless/backpressure channel and may wait.\n // Must run outside the transaction window (FR-012) and must not block the txnQueue consumer fiber (avoid deadlock).\n dispatch: (action) =>\n enqueueTransaction(runDispatch(action)).pipe(\n Effect.zipRight(publishWithPressureDiagnostics('publish', PubSub.publish(actionHub, action))),\n ),\n dispatchBatch: (actions) =>\n enqueueTransaction(runDispatchBatch(actions)).pipe(\n Effect.zipRight(publishWithPressureDiagnostics('publishAll', PubSub.publishAll(actionHub, actions))),\n ),\n dispatchLowPriority: (action) =>\n enqueueTransaction(runDispatchLowPriority(action)).pipe(\n Effect.zipRight(publishWithPressureDiagnostics('publish', PubSub.publish(actionHub, action))),\n ),\n }\n}\n","import { Cause, Chunk, Effect } from 'effect'\nimport * as Debug from './DebugSink.js'\n\n/**\n * Reducer diagnostic error types:\n * - ReducerDuplicateError: multiple primary reducers registered for the same tag.\n * - ReducerLateRegistrationError: reducer registered after actions with this tag have already been dispatched.\n *\n * These errors are internal to Runtime and are converted into Debug diagnostic events in the catch phase.\n */\nexport interface ReducerDiagnosticError extends Error {\n readonly _tag: 'ReducerDuplicateError' | 'ReducerLateRegistrationError'\n readonly tag: string\n readonly moduleId?: string\n}\n\nexport const makeReducerError = (\n _tag: ReducerDiagnosticError['_tag'],\n tag: string,\n moduleId?: string,\n): ReducerDiagnosticError =>\n Object.assign(\n new Error(\n _tag === 'ReducerDuplicateError'\n ? `[ModuleRuntime] Duplicate primary reducer for tag \"${tag}\". Each action tag must have at most one primary reducer.`\n : `[ModuleRuntime] Late primary reducer registration for tag \"${tag}\". Reducers must be registered before the first dispatch of this tag.`,\n ),\n {\n _tag,\n tag,\n moduleId,\n },\n ) as ReducerDiagnosticError\n\n/**\n * Extracts Reducer diagnostic errors from a Logic-forked Cause and emits them as Debug events.\n *\n * Notes:\n * - Emits diagnostic events only when ReducerDiagnosticError is present.\n * - moduleId prefers the error object's moduleId, falling back to the caller-provided moduleId.\n */\nexport const emitDiagnosticsFromCause = (\n cause: Cause.Cause<unknown>,\n moduleIdFromContext?: string,\n): Effect.Effect<void, never, any> =>\n Effect.sync(() => {\n const defects = Chunk.toReadonlyArray(Cause.defects(cause))\n\n let duplicate: ReducerDiagnosticError | undefined\n let late: ReducerDiagnosticError | undefined\n\n for (const defect of defects) {\n if (!defect || typeof defect !== 'object') continue\n const error = defect as any\n if (error._tag === 'ReducerDuplicateError') {\n duplicate = error as ReducerDiagnosticError\n } else if (error._tag === 'ReducerLateRegistrationError') {\n late = error as ReducerDiagnosticError\n }\n }\n\n const effects: Array<Effect.Effect<void>> = []\n\n if (duplicate) {\n effects.push(\n Debug.record({\n type: 'diagnostic',\n moduleId: duplicate.moduleId ?? moduleIdFromContext,\n code: 'reducer::duplicate',\n severity: 'error',\n message: `Primary reducer for tag \"${duplicate.tag}\" is already registered and cannot be redefined.`,\n hint: 'Ensure each Action tag defines a single primary reducer. If it is defined in both Module.reducers and $.reducer, keep the Module.reducers version or merge into one definition.',\n actionTag: duplicate.tag,\n }),\n )\n }\n\n if (late) {\n effects.push(\n Debug.record({\n type: 'diagnostic',\n moduleId: late.moduleId ?? moduleIdFromContext,\n code: 'reducer::late_registration',\n severity: 'error',\n message: `Primary reducer for tag \"${late.tag}\" was registered after actions with this tag had already been dispatched.`,\n hint: 'Move this reducer to Module.make({ reducers }), or ensure $.reducer(\"tag\", ...) runs before the first dispatch.',\n actionTag: late.tag,\n }),\n )\n }\n\n if (effects.length === 0) {\n return Effect.void\n }\n\n let combined: Effect.Effect<void> = Effect.void\n for (const eff of effects) {\n combined = combined.pipe(Effect.zipRight(eff))\n }\n return combined\n }).pipe(Effect.flatten)\n","import { Effect, Stream } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { toSerializableErrorSummary } from './errorSummary.js'\n\ntype Phase = 'setup' | 'run'\n\ntype AnyEffectHandler = (payload: unknown) => Effect.Effect<void, any, any>\n\ntype HandlerEntry = {\n readonly actionTag: string\n readonly sourceKey: string\n readonly handler: AnyEffectHandler\n readonly phase: Phase\n readonly logicUnitId: string\n readonly logicUnitLabel: string\n readonly logicUnitPath?: string\n}\n\ntype LogicUnitState = {\n nextHandlerSeq: number\n handlerIds: WeakMap<AnyEffectHandler, string>\n}\n\ntype ActionTagState = {\n readonly handlers: Map<string, HandlerEntry>\n watcherStarted: boolean\n}\n\nconst resolveActionTag = (action: unknown): string | undefined => {\n const tag = (action as any)?._tag\n if (typeof tag === 'string' && tag.length > 0) return tag\n const type = (action as any)?.type\n if (typeof type === 'string' && type.length > 0) return type\n if (tag != null) return String(tag)\n if (type != null) return String(type)\n return undefined\n}\n\nconst matchesActionTag = (action: unknown, actionTag: string): boolean => {\n const tag = resolveActionTag(action)\n return tag === actionTag\n}\n\nconst getOrCreateLogicUnitState = (states: Map<string, LogicUnitState>, logicUnitId: string): LogicUnitState => {\n const existing = states.get(logicUnitId)\n if (existing) return existing\n const next: LogicUnitState = { nextHandlerSeq: 0, handlerIds: new WeakMap() }\n states.set(logicUnitId, next)\n return next\n}\n\nconst getOrAssignHandlerId = (state: LogicUnitState, handler: AnyEffectHandler): string => {\n const existing = state.handlerIds.get(handler)\n if (existing) return existing\n state.nextHandlerSeq += 1\n const id = `h${state.nextHandlerSeq}`\n state.handlerIds.set(handler, id)\n return id\n}\n\nexport type RegisterEffectArgs = {\n readonly actionTag: string\n readonly handler: AnyEffectHandler\n readonly phase: Phase\n readonly logicUnit?: {\n readonly logicUnitId: string\n readonly logicUnitLabel: string\n readonly path?: string\n }\n}\n\nexport const makeEffectsRegistry = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n readonly actions$: Stream.Stream<unknown>\n}): {\n readonly registerEffect: (\n params: RegisterEffectArgs,\n ) => Effect.Effect<{ readonly sourceKey: string; readonly duplicate: boolean }, never, any>\n} => {\n const { moduleId, instanceId, actions$ } = args\n\n const logicUnitStates = new Map<string, LogicUnitState>()\n const tagStates = new Map<string, ActionTagState>()\n\n const getOrCreateTagState = (actionTag: string): ActionTagState => {\n const existing = tagStates.get(actionTag)\n if (existing) return existing\n const next: ActionTagState = { handlers: new Map(), watcherStarted: false }\n tagStates.set(actionTag, next)\n return next\n }\n\n const startWatcherIfNeeded = (actionTag: string, state: ActionTagState): Effect.Effect<void, never, any> => {\n if (state.watcherStarted) return Effect.void\n state.watcherStarted = true\n\n const program = Stream.runForEach(actions$.pipe(Stream.filter((a) => matchesActionTag(a, actionTag))), (action) =>\n Effect.gen(function* () {\n const entries = Array.from(state.handlers.values())\n if (entries.length === 0) return\n\n const payload = (action as any)?.payload\n\n yield* Effect.forEach(\n entries,\n (entry) =>\n Effect.forkScoped(\n Effect.gen(function* () {\n const exit = yield* Effect.exit(entry.handler(payload))\n if (exit._tag === 'Success') return\n\n const { errorSummary, downgrade } = toSerializableErrorSummary(exit.cause)\n const downgradeHint = downgrade ? ` (downgrade=${downgrade})` : ''\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::handler_failure',\n severity: 'error',\n message: `Effect handler failed for actionTag=\"${entry.actionTag}\" sourceKey=\"${entry.sourceKey}\".${downgradeHint}`,\n hint: `${errorSummary.name ? `${errorSummary.name}: ` : ''}${errorSummary.message}`,\n actionTag: entry.actionTag,\n kind: 'effect_handler_failure',\n trigger: {\n kind: 'effect',\n name: 'handler',\n details: {\n actionTag: entry.actionTag,\n sourceKey: entry.sourceKey,\n logicUnitId: entry.logicUnitId,\n },\n },\n })\n }),\n ),\n { discard: true },\n )\n }),\n ).pipe(\n Effect.catchAllCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::watcher_crashed',\n severity: 'error',\n message: `Effect watcher crashed for actionTag=\"${actionTag}\".`,\n hint: toSerializableErrorSummary(cause).errorSummary.message,\n actionTag,\n kind: 'effect_watcher_crashed',\n }),\n ),\n )\n\n return Effect.forkScoped(program).pipe(Effect.asVoid)\n }\n\n const registerEffect = (params: RegisterEffectArgs) =>\n Effect.gen(function* () {\n const actionTag = params.actionTag\n const handler = params.handler\n\n const logicUnitId = params.logicUnit?.logicUnitId ?? 'unknown'\n const logicUnitLabel = params.logicUnit?.logicUnitLabel ?? `logicUnit:${logicUnitId}`\n const logicUnitPath = params.logicUnit?.path\n\n const unitState = getOrCreateLogicUnitState(logicUnitStates, logicUnitId)\n const handlerId = getOrAssignHandlerId(unitState, handler)\n const sourceKey = `${logicUnitId}::${handlerId}`\n\n const tagState = getOrCreateTagState(actionTag)\n\n const duplicate = tagState.handlers.has(sourceKey)\n if (duplicate) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::duplicate_registration',\n severity: 'warning',\n message: `Duplicate effect registration ignored for actionTag=\"${actionTag}\" sourceKey=\"${sourceKey}\".`,\n hint:\n 'The runtime de-duplicates effect handlers by (actionTag, sourceKey). ' +\n 'If you see this unexpectedly, check repeated setup registration or accidental double-mounting.',\n actionTag,\n kind: 'effect_duplicate_registration',\n trigger: {\n kind: 'effect',\n name: 'register',\n details: {\n actionTag,\n sourceKey,\n phase: params.phase,\n logicUnitId,\n logicUnitLabel,\n logicUnitPath,\n },\n },\n })\n return { sourceKey, duplicate: true } as const\n }\n\n tagState.handlers.set(sourceKey, {\n actionTag,\n sourceKey,\n handler,\n phase: params.phase,\n logicUnitId,\n logicUnitLabel,\n logicUnitPath,\n })\n\n if (params.phase === 'run') {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::dynamic_registration',\n severity: 'warning',\n message: `Effect registered in run phase for actionTag=\"${actionTag}\" sourceKey=\"${sourceKey}\".`,\n hint: 'Run-phase registration only affects future actions; prefer registering effects during setup for deterministic behavior.',\n actionTag,\n kind: 'effect_dynamic_registration',\n trigger: {\n kind: 'effect',\n name: 'register:run',\n details: { actionTag, sourceKey, logicUnitId, logicUnitLabel, logicUnitPath },\n },\n })\n }\n\n yield* startWatcherIfNeeded(actionTag, tagState)\n return { sourceKey, duplicate: false } as const\n })\n\n return { registerEffect }\n}\n","import { Cause, Effect, Exit, Fiber, FiberRef, Option, PubSub, Queue, SubscriptionRef } from 'effect'\nimport type { StateChangeWithMeta, StateCommitMeta, StateCommitMode, StateCommitPriority } from './module.js'\nimport type {\n StateTraitProgram,\n TraitConvergeGenerationEvidence,\n TraitConvergePlanCacheEvidence,\n} from '../../state-trait/model.js'\nimport type { DirtyAllReason, DirtySet } from '../../field-path.js'\nimport * as Debug from './DebugSink.js'\nimport * as StateTransaction from './StateTransaction.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport * as StateTraitConverge from '../../state-trait/converge.js'\nimport * as StateTraitValidate from '../../state-trait/validate.js'\nimport * as StateTraitSource from '../../state-trait/source.js'\nimport { getConvergeStaticIrDigest } from '../../state-trait/converge-ir.js'\nimport type * as RowId from '../../state-trait/rowid.js'\nimport type { RunOperation } from './ModuleRuntime.operation.js'\nimport type { ResolvedTraitConvergeConfig } from './ModuleRuntime.traitConvergeConfig.js'\nimport type { EnqueueTransaction } from './ModuleRuntime.txnQueue.js'\nimport { StateTransactionOverridesTag, type StateTransactionOverrides } from './env.js'\n\nconst DIRTY_ALL_SET_STATE_HINT = Symbol.for('@logixjs/core/dirtyAllSetStateHint')\n\nconst readDeferredFlushSlice = (details: unknown): { readonly start: number; readonly end: number } | undefined => {\n if (!details || typeof details !== 'object') return undefined\n const raw = details as any\n const start = raw.sliceStart\n const end = raw.sliceEnd\n if (typeof start !== 'number' || typeof end !== 'number') return undefined\n if (!Number.isFinite(start) || !Number.isFinite(end)) return undefined\n const s = Math.floor(start)\n const e = Math.floor(end)\n if (s < 0 || e <= s) return undefined\n return { start: s, end: e }\n}\n\nexport type RunWithStateTransaction = <E>(\n origin: StateTransaction.StateTxnOrigin,\n body: () => Effect.Effect<void, E, never>,\n) => Effect.Effect<void, E, never>\n\nexport type SetStateInternal<S> = (\n next: S,\n path: StateTransaction.StatePatchPath,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n) => Effect.Effect<void>\n\nexport type TraitRuntimeAccess = {\n readonly getProgram: () => StateTraitProgram<any> | undefined\n readonly getConvergePlanCache: () => StateTraitConverge.ConvergePlanCache | undefined\n readonly getConvergeGeneration: () => TraitConvergeGenerationEvidence\n readonly getPendingCacheMissReason: () => TraitConvergePlanCacheEvidence['missReason'] | undefined\n readonly setPendingCacheMissReason: (next: TraitConvergePlanCacheEvidence['missReason'] | undefined) => void\n readonly rowIdStore: RowId.RowIdStore\n readonly getListConfigs: () => ReadonlyArray<RowId.ListConfig>\n}\n\nexport type TraitConvergeTimeSlicingState = {\n readonly signal: Queue.Queue<void>\n readonly backlogDirtyPaths: Set<StateTransaction.StatePatchPath>\n backlogDirtyAllReason?: DirtyAllReason\n firstPendingAtMs: number | undefined\n lastTouchedAtMs: number | undefined\n latestConvergeConfig: ResolvedTraitConvergeConfig | undefined\n capturedContext:\n | {\n readonly runtimeLabel: string | undefined\n readonly diagnosticsLevel: Debug.DiagnosticsLevel\n readonly debugSinks: ReadonlyArray<Debug.Sink>\n readonly overrides: StateTransactionOverrides | undefined\n }\n | undefined\n}\n\nexport const makeTransactionOps = <S>(args: {\n readonly moduleId: string\n readonly optionsModuleId: string | undefined\n readonly instanceId: string\n readonly stateRef: SubscriptionRef.SubscriptionRef<S>\n readonly commitHub: PubSub.PubSub<StateChangeWithMeta<S>>\n readonly shouldPublishCommitHub?: () => boolean\n readonly recordStatePatch: (\n path: StateTransaction.StatePatchPath | undefined,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n readonly onCommit?: (args: {\n readonly state: S\n readonly meta: StateCommitMeta\n readonly dirtySet: DirtySet\n readonly diagnosticsLevel: Debug.DiagnosticsLevel\n }) => Effect.Effect<void>\n readonly enqueueTransaction: EnqueueTransaction\n readonly runOperation: RunOperation\n readonly txnContext: StateTransaction.StateTxnContext<S>\n readonly traitConvergeTimeSlicing: TraitConvergeTimeSlicingState\n readonly traitRuntime: TraitRuntimeAccess\n readonly resolveTraitConvergeConfig: () => Effect.Effect<ResolvedTraitConvergeConfig, never, never>\n readonly isDevEnv: () => boolean\n readonly maxTxnHistory: number\n readonly txnHistory: Array<StateTransaction.StateTransaction<S>>\n readonly txnById: Map<string, StateTransaction.StateTransaction<S>>\n}): {\n readonly readState: Effect.Effect<S>\n readonly setStateInternal: SetStateInternal<S>\n readonly runWithStateTransaction: RunWithStateTransaction\n readonly __logixGetExecVmAssemblyEvidence?: () => unknown\n} => {\n const {\n moduleId,\n optionsModuleId,\n instanceId,\n stateRef,\n commitHub,\n shouldPublishCommitHub,\n recordStatePatch,\n onCommit,\n enqueueTransaction,\n runOperation,\n txnContext,\n traitConvergeTimeSlicing,\n traitRuntime,\n resolveTraitConvergeConfig,\n isDevEnv,\n maxTxnHistory,\n txnHistory,\n txnById,\n } = args\n\n /**\n * Read current state:\n * - If a transaction is active, return the transaction draft.\n * - Otherwise, fall back to the underlying SubscriptionRef snapshot.\n */\n const readState: Effect.Effect<S> = Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n const current = txnContext.current\n if (inTxn && current) return current.draft\n return yield* SubscriptionRef.get(stateRef)\n })\n\n /**\n * runWithStateTransaction:\n * - Open a transaction for a single logic entrypoint (dispatch / source-refresh / future extensions).\n * - Aggregate all state writes within body; at the end commit once and emit a state:update debug event.\n * - The caller must ensure body does not cross long IO boundaries (see the spec constraints on the transaction window).\n */\n const runWithStateTransaction: RunWithStateTransaction = <E2>(\n origin: StateTransaction.StateTxnOrigin,\n body: () => Effect.Effect<void, E2, never>,\n ): Effect.Effect<void, E2, never> =>\n Effect.locally(\n TaskRunner.inSyncTransactionFiber,\n true,\n )(\n Effect.gen(function* () {\n const baseState = yield* SubscriptionRef.get(stateRef)\n\n StateTransaction.beginTransaction(txnContext, origin, baseState)\n const txnCurrent: any = txnContext.current\n txnCurrent.stateTraitValidateRequests = []\n txnCurrent.commitMode = 'normal' as StateCommitMode\n txnCurrent.priority = 'normal' as StateCommitPriority\n\n const txnId = txnContext.current?.txnId\n const txnSeq = txnContext.current?.txnSeq\n\n TaskRunner.enterSyncTransaction()\n let exit: Exit.Exit<void, E2> | undefined\n\n try {\n exit = yield* Effect.exit(\n Effect.locally(\n Debug.currentTxnId,\n txnId,\n )(\n Effect.gen(function* () {\n // Trait summary inside the transaction window (for devtools/diagnostics).\n let traitSummary: unknown | undefined\n\n // Execute the actual logic inside the transaction window (reducer / watcher writeback / traits, etc.).\n if (isDevEnv()) {\n const bodyFiber = yield* Effect.fork(body())\n\n const YIELD_BUDGET = 5\n let polled = yield* Fiber.poll(bodyFiber)\n for (let i = 0; i < YIELD_BUDGET && Option.isNone(polled); i++) {\n yield* Effect.yieldNow()\n polled = yield* Fiber.poll(bodyFiber)\n }\n\n if (Option.isNone(polled)) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n txnId,\n trigger: origin,\n code: 'state_transaction::async_escape',\n severity: 'error',\n message:\n 'Synchronous StateTransaction body escaped the transaction window (async/await detected).',\n hint: 'No IO/await/sleep/promises inside the transaction window; use run*Task (pending → IO → writeback) or move async logic outside the transaction.',\n kind: 'async_in_transaction',\n })\n }\n\n const bodyExit = yield* Fiber.await(bodyFiber)\n yield* Exit.match(bodyExit, {\n onFailure: (cause) => Effect.failCause(cause),\n onSuccess: () => Effect.void,\n })\n } else {\n yield* body()\n }\n\n const stateTraitProgram = traitRuntime.getProgram()\n\n // StateTrait: converge derived fields (computed/link, etc.) before commit to ensure 0/1 commit per window.\n if (stateTraitProgram && txnContext.current) {\n const convergeConfig = yield* resolveTraitConvergeConfig()\n traitConvergeTimeSlicing.latestConvergeConfig = convergeConfig\n const timeSlicingConfig = convergeConfig.traitConvergeTimeSlicing\n const isDeferredFlushTxn = origin.kind === 'trait:deferred_flush'\n const hasDeferredSteps =\n stateTraitProgram.convergeExecIr != null &&\n stateTraitProgram.convergeExecIr.topoOrderDeferredInt32.length > 0\n const canTimeSlice = timeSlicingConfig.enabled === true && hasDeferredSteps\n const schedulingScope: StateTraitConverge.ConvergeContext<any>['schedulingScope'] = isDeferredFlushTxn\n ? 'deferred'\n : canTimeSlice\n ? 'immediate'\n : 'all'\n\n const deferredSlice = isDeferredFlushTxn ? readDeferredFlushSlice(origin.details) : undefined\n const deferredScopeStepIds =\n deferredSlice && stateTraitProgram.convergeExecIr\n ? stateTraitProgram.convergeExecIr.topoOrderDeferredInt32.subarray(\n deferredSlice.start,\n deferredSlice.end,\n )\n : undefined\n\n const convergeExit = yield* Effect.exit(\n StateTraitConverge.convergeInTransaction(\n stateTraitProgram as any,\n {\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n txnId,\n configScope: convergeConfig.configScope,\n now: txnContext.config.now,\n budgetMs: convergeConfig.traitConvergeBudgetMs,\n decisionBudgetMs: convergeConfig.traitConvergeDecisionBudgetMs,\n requestedMode: deferredScopeStepIds ? 'full' : convergeConfig.traitConvergeMode,\n schedulingScope,\n ...(deferredScopeStepIds ? { schedulingScopeStepIds: deferredScopeStepIds } : {}),\n dirtyAllReason: (txnContext.current as any)?.dirtyAllReason,\n dirtyPaths: txnContext.current?.dirtyPathIds,\n allowInPlaceDraft:\n txnContext.current != null &&\n !Object.is(txnContext.current.draft, txnContext.current.baseState),\n planCache: traitRuntime.getConvergePlanCache(),\n generation: traitRuntime.getConvergeGeneration(),\n cacheMissReasonHint: traitRuntime.getPendingCacheMissReason(),\n getDraft: () => txnContext.current!.draft as any,\n setDraft: (next) => {\n StateTransaction.updateDraft(txnContext, next as any)\n },\n recordPatch: (path, reason, from, to, traitNodeId, stepId) =>\n recordStatePatch(path, reason, from, to, traitNodeId, stepId),\n } as StateTraitConverge.ConvergeContext<any>,\n ),\n )\n\n if (traitRuntime.getPendingCacheMissReason() === 'generation_bumped') {\n traitRuntime.setPendingCacheMissReason(undefined)\n }\n\n if (convergeExit._tag === 'Failure') {\n const errors = [...Cause.failures(convergeExit.cause), ...Cause.defects(convergeExit.cause)]\n const configError = errors.find(\n (err): err is StateTraitConverge.StateTraitConfigError =>\n err instanceof StateTraitConverge.StateTraitConfigError,\n )\n\n if (configError) {\n const fields = configError.fields ?? []\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n txnId,\n trigger: origin,\n code: 'state_trait::config_error',\n severity: 'error',\n message: configError.message,\n hint:\n configError.code === 'CYCLE_DETECTED'\n ? `computed/link graph has a cycle: ${fields.join(', ')}`\n : `multiple writers detected for the same field: ${fields.join(', ')}`,\n kind: `state_trait_config_error:${configError.code}`,\n })\n }\n\n return yield* Effect.failCause(convergeExit.cause)\n }\n\n const outcome = convergeExit.value\n\n const dirtyAllReasonForDeferred: DirtyAllReason | undefined = (txnContext.current as any)?.dirtyAllReason\n const dirtyPathsSnapshotForDeferred: ReadonlyArray<StateTransaction.StatePatchPath> | undefined =\n canTimeSlice && !isDeferredFlushTxn && !dirtyAllReasonForDeferred\n ? Array.from(txnContext.current.dirtyPathIds)\n : undefined\n\n if (\n canTimeSlice &&\n !isDeferredFlushTxn &&\n outcome._tag !== 'Degraded' &&\n (dirtyAllReasonForDeferred != null ||\n (dirtyPathsSnapshotForDeferred != null && dirtyPathsSnapshotForDeferred.length > 0))\n ) {\n const nowMs = Date.now()\n traitConvergeTimeSlicing.firstPendingAtMs = traitConvergeTimeSlicing.firstPendingAtMs ?? nowMs\n traitConvergeTimeSlicing.lastTouchedAtMs = nowMs\n\n if (dirtyAllReasonForDeferred != null) {\n traitConvergeTimeSlicing.backlogDirtyAllReason = dirtyAllReasonForDeferred\n traitConvergeTimeSlicing.backlogDirtyPaths.clear()\n } else if (!traitConvergeTimeSlicing.backlogDirtyAllReason && dirtyPathsSnapshotForDeferred) {\n for (const p of dirtyPathsSnapshotForDeferred) {\n traitConvergeTimeSlicing.backlogDirtyPaths.add(p)\n }\n }\n\n const runtimeLabel = yield* FiberRef.get(Debug.currentRuntimeLabel)\n const diagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n const debugSinks = yield* FiberRef.get(Debug.currentDebugSinks)\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n const overrides = Option.isSome(overridesOpt) ? overridesOpt.value : undefined\n\n traitConvergeTimeSlicing.capturedContext = {\n runtimeLabel,\n diagnosticsLevel,\n debugSinks,\n overrides,\n }\n\n yield* Queue.offer(traitConvergeTimeSlicing.signal, undefined)\n }\n\n traitSummary = outcome.decision ? { converge: outcome.decision } : undefined\n\n if (outcome._tag === 'Degraded') {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n code: outcome.reason === 'budget_exceeded' ? 'trait::budget_exceeded' : 'trait::runtime_error',\n severity: 'warning',\n message:\n outcome.reason === 'budget_exceeded'\n ? 'Trait converge exceeded budget; derived fields are frozen for this operation window.'\n : 'Trait converge failed at runtime; derived fields are frozen for this operation window.',\n hint:\n outcome.reason === 'budget_exceeded'\n ? 'Check whether computed/check contains heavy computation; move it to source/task or split into cacheable derived pieces.'\n : 'Check computed/link/check for invalid inputs or impure logic; add equals or guards if needed.',\n kind: 'trait_degraded',\n })\n }\n }\n\n // TraitLifecycle scoped validate: flush after converge so validation reads the latest derived state.\n if (stateTraitProgram && txnContext.current) {\n const dedupeScopedValidateRequests = (\n requests: ReadonlyArray<StateTraitValidate.ScopedValidateRequest>,\n ): ReadonlyArray<StateTraitValidate.ScopedValidateRequest> => {\n if (requests.length <= 1) return requests\n\n const priorities: Record<StateTraitValidate.ValidateMode, number> = {\n submit: 4,\n blur: 3,\n valueChange: 2,\n manual: 1,\n }\n\n let bestMode: StateTraitValidate.ValidateMode = 'manual'\n let bestP = priorities[bestMode]\n let hasRoot = false\n\n for (const r of requests) {\n const p = priorities[r.mode]\n if (p > bestP) {\n bestP = p\n bestMode = r.mode\n }\n if (r.target.kind === 'root') {\n hasRoot = true\n }\n }\n\n if (hasRoot) {\n return [{ mode: bestMode, target: { kind: 'root' } }]\n }\n\n const makeKey = (target: StateTraitValidate.ValidateTarget): string => {\n switch (target.kind) {\n case 'field':\n return `field:${target.path}`\n case 'list':\n return `list:${target.path}`\n case 'item':\n return `item:${target.path}:${target.index}:${target.field ?? ''}`\n case 'root':\n return 'root'\n }\n }\n\n const order: Array<string> = []\n const byKey = new Map<string, StateTraitValidate.ScopedValidateRequest>()\n\n for (const req of requests) {\n const key = makeKey(req.target)\n const existing = byKey.get(key)\n if (!existing) {\n byKey.set(key, req)\n order.push(key)\n continue\n }\n if (priorities[req.mode] > priorities[existing.mode]) {\n byKey.set(key, { ...existing, mode: req.mode })\n }\n }\n\n return order.map((k) => byKey.get(k)!).filter(Boolean)\n }\n\n const pending = (txnContext.current as any).stateTraitValidateRequests as\n | ReadonlyArray<StateTraitValidate.ScopedValidateRequest>\n | undefined\n\n if (pending && pending.length > 0) {\n const deduped = dedupeScopedValidateRequests(pending)\n yield* StateTraitValidate.validateInTransaction(\n stateTraitProgram as any,\n {\n moduleId: optionsModuleId,\n instanceId,\n txnSeq: txnContext.current!.txnSeq,\n txnId: txnContext.current!.txnId,\n origin: txnContext.current!.origin,\n rowIdStore: traitRuntime.rowIdStore,\n listConfigs: traitRuntime.getListConfigs(),\n getDraft: () => txnContext.current!.draft as any,\n setDraft: (next) => {\n StateTransaction.updateDraft(txnContext, next as any)\n },\n recordPatch: (path, reason, from, to, traitNodeId, stepId) =>\n recordStatePatch(path, reason, from, to, traitNodeId, stepId),\n } as StateTraitValidate.ValidateContext<any>,\n deduped,\n )\n }\n }\n\n // If a source key becomes empty, synchronously recycle it back to idle (avoid tearing / ghost data).\n if (stateTraitProgram && txnContext.current) {\n yield* StateTraitSource.syncIdleInTransaction(\n stateTraitProgram as any,\n {\n moduleId: optionsModuleId,\n instanceId,\n getDraft: () => txnContext.current!.draft as any,\n setDraft: (next) => {\n StateTransaction.updateDraft(txnContext, next as any)\n },\n recordPatch: (path, reason, from, to, traitNodeId, stepId) =>\n recordStatePatch(path, reason, from, to, traitNodeId, stepId),\n } as StateTraitSource.SourceSyncContext<any>,\n )\n }\n\n // Commit the transaction: write to the underlying state once, and emit a single aggregated state:update event.\n yield* runOperation(\n 'state',\n 'state:update',\n { meta: { moduleId: optionsModuleId, instanceId } },\n Effect.gen(function* () {\n const replayEvent = (txnContext.current as any)?.lastReplayEvent as unknown\n const commitMode = ((txnContext.current as any)?.commitMode ?? 'normal') as StateCommitMode\n const priority = ((txnContext.current as any)?.priority ?? 'normal') as StateCommitPriority\n const dirtyAllSetStateHint = !!(txnContext.current as any)\n ? (txnContext.current as any)[DIRTY_ALL_SET_STATE_HINT] === true\n : false\n const txn = yield* StateTransaction.commit(txnContext, stateRef)\n\n if (txn) {\n const shouldWarnDirtyAllSetState =\n dirtyAllSetStateHint || (txn.origin.kind === 'state' && txn.origin.name === 'setState')\n\n if (shouldWarnDirtyAllSetState && isDevEnv() && (txn.dirtySet as any)?.dirtyAll === true) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq: txn.txnSeq,\n txnId: txn.txnId,\n trigger: txn.origin,\n code: 'state_transaction::dirty_all_fallback',\n severity: 'warning',\n message:\n 'setState/state.update did not provide field-level dirty-set evidence; falling back to dirtyAll scheduling.',\n hint: 'Prefer $.state.mutate(...) or Logix.Module.Reducer.mutate(...) to produce field-level patchPaths; otherwise converge/validate degrades to full-path scheduling.',\n kind: 'dirty_all_fallback:set_state',\n })\n }\n\n // Record txn history: only for dev/test or explicit full instrumentation (devtools/debugging).\n // In production (default light), keep zero retention to avoid turning \"txn history\" into an implicit memory tax.\n if (isDevEnv() || txnContext.config.instrumentation === 'full') {\n txnHistory.push(txn)\n txnById.set(txn.txnId, txn)\n if (txnHistory.length > maxTxnHistory) {\n const oldest = txnHistory.shift()\n if (oldest) {\n txnById.delete(oldest.txnId)\n }\n }\n }\n\n const nextState =\n txn.finalStateSnapshot !== undefined\n ? txn.finalStateSnapshot\n : yield* SubscriptionRef.get(stateRef)\n\n // RowID virtual identity layer: align mappings after each observable commit\n // so in-flight gates and cache reuse remain stable under insert/remove/reorder.\n const listConfigs = traitRuntime.getListConfigs()\n if (listConfigs.length > 0) {\n traitRuntime.rowIdStore.updateAll(nextState as any, listConfigs)\n }\n\n const meta: StateCommitMeta = {\n txnSeq: txn.txnSeq,\n txnId: txn.txnId,\n commitMode,\n priority,\n originKind: txn.origin.kind,\n originName: txn.origin.name,\n }\n\n if (!shouldPublishCommitHub || shouldPublishCommitHub()) {\n yield* PubSub.publish(commitHub, {\n value: nextState,\n meta,\n })\n }\n\n // Perf-sensitive ordering:\n // - In diagnostics=off mode (default for production/perf runs), allow selectorGraph notifications to be published\n // before state:update debug recording so React external store subscribers can start flushing earlier.\n // - In diagnostics=light/full, keep the original ordering so any selector eval trace stays after state:update\n // (preserves a more intuitive txn → selector → render causal chain in devtools).\n const diagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n if (onCommit && diagnosticsLevel === 'off') {\n yield* onCommit({\n state: nextState,\n meta,\n dirtySet: txn.dirtySet,\n diagnosticsLevel,\n })\n }\n\n const debugSinks = yield* FiberRef.get(Debug.currentDebugSinks)\n const shouldRecordStateUpdate = debugSinks.length > 0 && !Debug.isErrorOnlyOnlySinks(debugSinks)\n\n if (shouldRecordStateUpdate) {\n const shouldComputeEvidence = diagnosticsLevel !== 'off'\n\n const staticIrDigest = shouldComputeEvidence\n ? (() => {\n const convergeIr: any = (stateTraitProgram as any)?.convergeIr\n if (!convergeIr || convergeIr.configError) return undefined\n return getConvergeStaticIrDigest(convergeIr)\n })()\n : undefined\n\n const dirtySetEvidence = shouldComputeEvidence\n ? (() => {\n const rootIdsTopK = diagnosticsLevel === 'full' ? 32 : 3\n\n if (txn.dirtySet.dirtyAll) {\n return {\n dirtyAll: true,\n reason: txn.dirtySet.reason ?? 'unknownWrite',\n rootIds: [],\n rootCount: 0,\n keySize: 0,\n keyHash: 0,\n rootIdsTruncated: false,\n }\n }\n\n const fullRootIds = txn.dirtySet.rootIds\n const topK = fullRootIds.slice(0, rootIdsTopK)\n return {\n dirtyAll: false,\n // Keep diff anchors (count/hash/size) for the full set; only truncate the rootIds payload.\n rootIds: topK,\n rootCount: txn.dirtySet.rootCount,\n keySize: txn.dirtySet.keySize,\n keyHash: txn.dirtySet.keyHash,\n rootIdsTruncated: fullRootIds.length > rootIdsTopK,\n }\n })()\n : undefined\n\n yield* Debug.record({\n type: 'state:update',\n moduleId: optionsModuleId,\n state: nextState,\n instanceId,\n txnSeq: txn.txnSeq,\n txnId: txn.txnId,\n staticIrDigest,\n dirtySet: dirtySetEvidence,\n patchCount: txn.patchCount,\n patchesTruncated: txn.patchesTruncated,\n ...(txn.patchesTruncated ? { patchesTruncatedReason: txn.patchesTruncatedReason } : null),\n commitMode,\n priority,\n originKind: txn.origin.kind,\n originName: txn.origin.name,\n traitSummary,\n replayEvent: replayEvent as any,\n })\n }\n\n if (onCommit && diagnosticsLevel !== 'off') {\n yield* onCommit({\n state: nextState,\n meta,\n dirtySet: txn.dirtySet,\n diagnosticsLevel,\n })\n }\n }\n }),\n )\n }),\n ),\n )\n } finally {\n TaskRunner.exitSyncTransaction()\n }\n\n if (exit!._tag === 'Failure') {\n // Always clear the transaction context on failure to avoid leaking into subsequent entrypoints.\n StateTransaction.abort(txnContext)\n return yield* Effect.failCause(exit!.cause)\n }\n }),\n )\n\n /**\n * setStateInternal:\n * - Inside an active transaction: only update the draft and record patches (whole-State granularity), without writing to the underlying Ref.\n * - Outside a transaction: keep legacy behavior, write to SubscriptionRef directly and emit a state:update Debug event.\n *\n * Notes:\n * - When path=\"*\" and field-level evidence is missing, treat it as a dirtyAll-degrade entrypoint: it triggers full converge/validate paths;\n * - Prefer `$.state.mutate(...)` / `Logix.Module.Reducer.mutate(...)` to produce field-level patchPaths;\n * - Any non-trackable write (including path=\"*\") must explicitly degrade (dirtyAll); do not \"ignore *\" when roots exist.\n */\n const setStateInternal: SetStateInternal<S> = (\n next: S,\n path: StateTransaction.StatePatchPath,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ): Effect.Effect<void> =>\n Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn && txnContext.current) {\n const current: any = txnContext.current\n\n StateTransaction.updateDraft(txnContext, next)\n recordStatePatch(path, reason, from, to, traitNodeId, stepId)\n\n if (path === '*') {\n current[DIRTY_ALL_SET_STATE_HINT] = true\n }\n return\n }\n\n // Writes from non-transaction fibers must be queued to avoid bypassing txnQueue with concurrent updates.\n yield* enqueueTransaction(\n runOperation(\n 'state',\n 'state:update',\n {\n payload: next,\n meta: { moduleId, instanceId },\n },\n runWithStateTransaction(\n {\n kind: 'state',\n name: 'setState',\n },\n () =>\n Effect.sync(() => {\n // baseState is injected by runWithStateTransaction at txn start; we only need to update the draft here.\n StateTransaction.updateDraft(txnContext, next)\n recordStatePatch(path, reason, from, to, traitNodeId, stepId)\n }),\n ),\n ),\n )\n })\n\n const getExecVmAssemblyEvidence = (): unknown => {\n const program: any = traitRuntime.getProgram()\n const convergeIr: any = program?.convergeIr\n if (!convergeIr || convergeIr.configError) return undefined\n\n const digest = getConvergeStaticIrDigest(convergeIr)\n return {\n convergeStaticIrDigest: digest,\n convergeGeneration: convergeIr.generation,\n }\n }\n\n return {\n readState,\n setStateInternal,\n runWithStateTransaction,\n __logixGetExecVmAssemblyEvidence: getExecVmAssemblyEvidence,\n }\n}\n","import { Cause, Effect, Fiber, FiberRef, Ref, Stream } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { isDevEnv } from './env.js'\nimport type * as Logic from './LogicMiddleware.js'\nimport type { AnyModuleShape } from './module.js'\nimport type { RuntimeInternalsResolvedConcurrencyPolicy } from './RuntimeInternals.js'\nimport type { StateTxnOrigin } from './StateTransaction.js'\n\n/**\n * Prevents calling run*Task inside a \"synchronous transaction execution fiber\" (it would deadlock the txnQueue).\n *\n * - ModuleRuntime locally marks it as true while executing each transaction (dispatch/source-refresh/devtools/...).\n * - run*Task checks the flag on start: when true, it emits diagnostics only in dev/test and then no-ops.\n */\nexport const inSyncTransactionFiber = FiberRef.unsafeMake(false)\n\n/**\n * Force source.refresh:\n * - Default: when snapshot keyHash is unchanged and a non-idle snapshot already exists, refresh SHOULD be a no-op\n * (avoid redundant IO/writeback).\n * - Exception: explicit refresh (manual refresh) / invalidation-driven refresh needs to \"re-fetch even with the same keyHash\".\n *\n * Note: use a FiberRef to locally pass \"whether this refresh is forced\", avoiding expanding the source refresh handler signature.\n */\nexport const forceSourceRefresh = FiberRef.unsafeMake(false)\n\n/**\n * Synchronous transaction window (process-level) marker:\n * - Used as a hard guard in \"non-Effect API\" entry points (e.g. Promise/async functions).\n * - FiberRef cannot reliably read the \"current fiber\" in such entry points, so we need a synchronous callstack-level marker.\n *\n * Note: if a transaction body incorrectly crosses async boundaries, this marker will be held longer; that is a severe violation.\n */\nlet inSyncTransactionGlobalDepth = 0\n\nexport const enterSyncTransaction = (): void => {\n inSyncTransactionGlobalDepth += 1\n}\n\nexport const exitSyncTransaction = (): void => {\n inSyncTransactionGlobalDepth = Math.max(0, inSyncTransactionGlobalDepth - 1)\n}\n\nexport const isInSyncTransaction = (): boolean => inSyncTransactionGlobalDepth > 0\n\nexport type TaskRunnerMode =\n | 'task' // sequential\n | 'parallel'\n | 'latest'\n | 'exhaust'\n\nexport type TaskStatus = 'idle' | 'pending' | 'running' | 'success' | 'failure' | 'interrupted'\n\nexport interface TaskExecution {\n readonly taskId: number\n readonly status: TaskStatus\n readonly acceptedAt: number\n readonly startedAt?: number\n readonly endedAt?: number\n}\n\nexport interface TaskRunnerOrigins {\n readonly pending?: StateTxnOrigin\n readonly success?: StateTxnOrigin\n readonly failure?: StateTxnOrigin\n}\n\ntype TaskHandler<Payload, Sh extends AnyModuleShape, R> =\n | Logic.Of<Sh, R, void, never>\n | ((payload: Payload) => Logic.Of<Sh, R, void, never>)\n\ntype TaskEffect<Payload, Sh extends AnyModuleShape, R, A, E> =\n | Logic.Of<Sh, R, A, E>\n | ((payload: Payload) => Logic.Of<Sh, R, A, E>)\n\nexport interface TaskRunnerConfig<Payload, Sh extends AnyModuleShape, R, A = void, E = never> {\n /**\n * Optional: trigger source name (e.g. actionTag / fieldPath), used as the default pending origin.name.\n * - BoundApiRuntime may fill this in for onAction(\"xxx\") / traits.source.refresh(\"field\"), etc.\n * - Other callers are not required to provide it.\n */\n readonly triggerName?: string\n\n /**\n * pending: synchronous state writes (loading=true / clearing errors, etc.), always a separate transaction entry.\n * - Only executed for tasks that are accepted and actually started (ignored triggers in runExhaustTask do not run pending).\n */\n readonly pending?: TaskHandler<Payload, Sh, R>\n\n /**\n * effect: real IO / async work (must run outside the transaction window).\n */\n readonly effect: TaskEffect<Payload, Sh, R, A, E>\n\n /**\n * success: success writeback (separate transaction entry).\n */\n readonly success?: (result: A, payload: Payload) => Logic.Of<Sh, R, void, never>\n\n /**\n * failure: failure writeback (separate transaction entry).\n *\n * Note: takes a Cause to preserve defect/interrupt semantics; interrupts do not trigger failure writeback by default.\n */\n readonly failure?: (cause: Cause.Cause<E>, payload: Payload) => Logic.Of<Sh, R, void, never>\n\n /**\n * origin: optional override for the three transaction origins.\n * - Default: pending.kind=\"task:pending\"; success/failure.kind=\"service-callback\".\n */\n readonly origin?: TaskRunnerOrigins\n\n /**\n * priority: reserved for future debugging/sorting; does not change transaction boundaries or concurrency semantics.\n */\n readonly priority?: number\n}\n\nexport interface TaskRunnerRuntime {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runWithStateTransaction: (\n origin: StateTxnOrigin,\n body: () => Effect.Effect<void, never, any>,\n ) => Effect.Effect<void, never, any>\n readonly resolveConcurrencyPolicy?: () => Effect.Effect<RuntimeInternalsResolvedConcurrencyPolicy, never, any>\n}\n\nconst resolve = <Payload, Sh extends AnyModuleShape, R, A, E>(\n eff: TaskEffect<Payload, Sh, R, A, E> | TaskHandler<Payload, Sh, R>,\n payload: Payload,\n): any => (typeof eff === 'function' ? (eff as any)(payload) : eff)\n\nconst defaultOrigins = (triggerName: string | undefined): Required<TaskRunnerOrigins> => ({\n pending: {\n kind: 'task:pending',\n name: triggerName,\n },\n success: {\n kind: 'service-callback',\n name: 'task:success',\n },\n failure: {\n kind: 'service-callback',\n name: 'task:failure',\n },\n})\n\nexport const shouldNoopInSyncTransactionFiber = (options: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n}): Effect.Effect<boolean> =>\n Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(inSyncTransactionFiber)\n if (!inTxn) {\n return false\n }\n // Always no-op regardless of env (otherwise we may deadlock); diagnostics are emitted only in dev/test.\n if (isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: options.moduleId,\n instanceId: options.instanceId,\n code: options.code,\n severity: options.severity,\n message: options.message,\n hint: options.hint,\n actionTag: options.actionTag,\n kind: options.kind,\n })\n }\n return true\n })\n\nconst resolveConcurrencyLimit = (runtime: TaskRunnerRuntime): Effect.Effect<number | 'unbounded', never, any> =>\n runtime.resolveConcurrencyPolicy\n ? runtime.resolveConcurrencyPolicy().pipe(Effect.map((p) => p.concurrencyLimit))\n : Effect.succeed(16)\n\nconst runTaskLifecycle = <Payload, Sh extends AnyModuleShape, R, A, E>(\n payload: Payload,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n getCanWriteBack?: Effect.Effect<boolean>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> =>\n Effect.gen(function* () {\n const noop = yield* shouldNoopInSyncTransactionFiber({\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'logic::invalid_usage',\n severity: 'error',\n message: 'run*Task is not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint:\n 'Call run*Task from the run section of a watcher (e.g. $.onAction/$.onState/$.on); ' +\n 'do not call it directly inside a reducer / trait.run / synchronous transaction body. For long-lived flows, use a multi-entry pattern (pending → IO → writeback).',\n kind: 'run_task_in_transaction',\n })\n if (noop) {\n return\n }\n\n const defaults = defaultOrigins(config.triggerName)\n const origins: Required<TaskRunnerOrigins> = {\n pending: config.origin?.pending ?? defaults.pending,\n success: config.origin?.success ?? defaults.success,\n failure: config.origin?.failure ?? defaults.failure,\n }\n\n // 1) pending: separate transaction entry; once started it should not be interrupted by runLatest.\n const pending = config.pending\n if (pending) {\n yield* Effect.uninterruptible(\n runtime.runWithStateTransaction(origins.pending, () => Effect.asVoid(resolve(pending, payload))),\n )\n }\n\n // 2) IO: runs outside the transaction window.\n const io = resolve(config.effect, payload) as Effect.Effect<A, E, Logic.Env<Sh, R>>\n const exit = yield* Effect.exit(io)\n\n // 3) writeback: use the guard to confirm it's still the current task (runLatestTask).\n if (getCanWriteBack) {\n const ok = yield* getCanWriteBack\n if (!ok) {\n return\n }\n }\n\n if (exit._tag === 'Success') {\n const success = config.success\n if (success) {\n yield* runtime.runWithStateTransaction(origins.success, () => Effect.asVoid(success(exit.value, payload)))\n }\n return\n }\n\n // Failure: interruptions do not trigger failure writeback (e.g. runLatestTask cancellation, Scope ending).\n const cause = exit.cause as Cause.Cause<E>\n if (Cause.isInterrupted(cause)) {\n return\n }\n\n const failure = config.failure\n if (failure) {\n yield* runtime.runWithStateTransaction(origins.failure, () => Effect.asVoid(failure(cause, payload)))\n }\n }).pipe(\n // Watchers must not crash as a whole due to a single task failure: swallow errors, but keep them diagnosable.\n Effect.catchAllCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'task_runner::unhandled_failure',\n severity: 'error',\n message: 'TaskRunner encountered an unhandled failure (pending/IO/writeback).',\n hint: 'Add a failure writeback for this task or handle errors explicitly upstream; avoid fire-and-forget swallowing errors.',\n actionTag: config.triggerName,\n kind: 'task_runner_unhandled_failure',\n trigger: {\n kind: 'task',\n name: config.triggerName,\n },\n }).pipe(Effect.zipRight(Effect.logError('TaskRunner error', cause))),\n ),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n\n/**\n * makeTaskRunner:\n * - Reuses FlowRuntime concurrency semantics (sequential/parallel/latest/exhaust).\n * - Splits a single trigger into: pending (separate txn) → IO → success/failure (separate txn).\n */\nexport const makeTaskRunner = <Payload, Sh extends AnyModuleShape, R, A = void, E = never>(\n stream: Stream.Stream<Payload>,\n mode: TaskRunnerMode,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> => {\n if (mode === 'latest') {\n return Effect.gen(function* () {\n const taskIdRef = yield* Ref.make(0)\n const currentFiberRef = yield* Ref.make<Fiber.RuntimeFiber<void, never> | undefined>(undefined)\n\n const start = (payload: Payload) =>\n Effect.gen(function* () {\n const taskId = yield* Ref.updateAndGet(taskIdRef, (n) => n + 1)\n\n const prev = yield* Ref.get(currentFiberRef)\n if (prev) {\n // Do not wait for the old fiber to fully end (avoid blocking new triggers); writeback is guarded by taskId.\n yield* Fiber.interruptFork(prev)\n }\n\n const canWriteBack = Ref.get(taskIdRef).pipe(Effect.map((current) => current === taskId))\n\n const fiber = yield* Effect.fork(\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config, canWriteBack),\n )\n\n yield* Ref.set(currentFiberRef, fiber)\n })\n\n return yield* Stream.runForEach(stream, start)\n })\n }\n\n if (mode === 'exhaust') {\n return Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit(runtime)\n const busyRef = yield* Ref.make(false)\n\n const mapper = (payload: Payload) =>\n Effect.gen(function* () {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? ([false, busy] as const) : ([true, true] as const),\n )\n if (!acquired) {\n // Ignore trigger: no pending transaction is produced.\n return\n }\n try {\n yield* runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config)\n } finally {\n yield* Ref.set(busyRef, false)\n }\n })\n\n return yield* Stream.runDrain(stream.pipe(Stream.mapEffect(mapper, { concurrency })))\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n }\n\n if (mode === 'parallel') {\n return Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit(runtime)\n\n return yield* Stream.runDrain(\n stream.pipe(\n Stream.mapEffect((payload) => runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config), {\n concurrency,\n }),\n ),\n )\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n }\n\n // mode === \"task\"(sequential)\n return Stream.runForEach(stream, (payload) =>\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n}\n","import { Effect, FiberRef } from 'effect'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport {\n toSerializableErrorSummary,\n} from '../runtime/core/errorSummary.js'\nimport { dirtyPathsToRootIds, type FieldPath } from '../field-path.js'\nimport { getConvergeStaticIrDigest } from './converge-ir.js'\nimport { CowDraft, ShallowInPlaceDraft } from './converge-draft.js'\nimport { emitSchemaMismatch } from './converge-diagnostics.js'\nimport { currentExecVmMode } from './exec-vm-mode.js'\nimport { makeConvergeExecIr } from './converge-exec-ir.js'\nimport { getMiddlewareStack, runWriterStep, runWriterStepOffFast } from './converge-step.js'\nimport {\n StateTraitConfigError,\n type ConvergeContext,\n type ConvergeOutcome,\n type ConvergeStepSummary,\n type ConvergeSummary,\n type ConvergeMode,\n} from './converge.types.js'\nimport type {\n StateTraitEntry,\n StateTraitProgram,\n TraitConvergeConfigScope,\n TraitConvergeDecisionSummary,\n TraitConvergeDiagnosticsSamplingSummary,\n TraitConvergeDirtySummary,\n TraitConvergeGenerationEvidence,\n TraitConvergeHotspot,\n TraitConvergeOutcome as TraitConvergeOutcomeTag,\n TraitConvergePlanCacheEvidence,\n TraitConvergeReason,\n TraitConvergeRequestedMode,\n TraitConvergeStaticIrEvidence,\n TraitConvergeStepStats,\n} from './model.js'\n\nconst pickTop3Steps = (steps: ReadonlyArray<ConvergeStepSummary>): ReadonlyArray<ConvergeStepSummary> => {\n let first: ConvergeStepSummary | undefined\n let second: ConvergeStepSummary | undefined\n let third: ConvergeStepSummary | undefined\n\n for (const step of steps) {\n const d = step.durationMs\n if (!first || d > first.durationMs) {\n third = second\n second = first\n first = step\n continue\n }\n if (!second || d > second.durationMs) {\n third = second\n second = step\n continue\n }\n if (!third || d > third.durationMs) {\n third = step\n }\n }\n\n if (!first) return []\n if (!second) return [first]\n if (!third) return [first, second]\n return [first, second, third]\n}\n\nconst normalizePositiveInt = (value: unknown): number | undefined => {\n if (typeof value !== 'number' || !Number.isFinite(value)) return undefined\n const n = Math.floor(value)\n return n > 0 ? n : undefined\n}\n\nconst insertTopKHotspot = (args: {\n readonly hotspots: Array<TraitConvergeHotspot>\n readonly next: TraitConvergeHotspot\n readonly topK: number\n}): void => {\n const { hotspots, next, topK } = args\n if (topK <= 0) return\n\n const idx = (() => {\n for (let i = 0; i < hotspots.length; i++) {\n if (next.durationMs > hotspots[i]!.durationMs) return i\n }\n return hotspots.length\n })()\n\n if (idx >= topK) return\n hotspots.splice(idx, 0, next)\n if (hotspots.length > topK) {\n hotspots.length = topK\n }\n}\n\n/**\n * convergeInTransaction:\n * - Execute one derived converge pass within an already-started StateTransaction context.\n * - Currently covers computed/link only (check/source will be added in later phases).\n */\nexport const convergeInTransaction = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: ConvergeContext<S>,\n): Effect.Effect<ConvergeOutcome> =>\n Effect.gen(function* () {\n yield* emitSchemaMismatch(program, ctx)\n\n const decisionStartedAt = ctx.now()\n let decisionDurationMs: number | undefined\n let executionStartedAt = decisionStartedAt\n const base = ctx.getDraft()\n const requestedMode: TraitConvergeRequestedMode = ctx.requestedMode ?? 'auto'\n const reasons: Array<TraitConvergeReason> = []\n let mode: ConvergeMode = requestedMode === 'dirty' ? 'dirty' : requestedMode === 'full' ? 'full' : 'full'\n\n const ir = program.convergeIr\n if (!ir) {\n return { _tag: 'Noop' } as const\n }\n\n // 049: Exec IR must be tied to the generation lifecycle and should not be rebuilt on every txn window.\n let execIr = program.convergeExecIr\n if (!execIr || execIr.generation !== ir.generation) {\n execIr = makeConvergeExecIr(ir)\n ;(program as any).convergeExecIr = execIr\n }\n\n if (ir.configError) {\n throw new StateTraitConfigError(ir.configError.code, ir.configError.message, ir.configError.fields)\n }\n\n const stepsInTopoOrder = (ir.stepsById ?? []) as ReadonlyArray<StateTraitEntry<any, string>>\n const totalSteps = stepsInTopoOrder.length\n\n if (totalSteps === 0) {\n return { _tag: 'Noop' } as const\n }\n\n const stack = yield* getMiddlewareStack()\n const diagnosticsLevel: Debug.DiagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n const debugSinks = yield* FiberRef.get(Debug.currentDebugSinks)\n // Decision / TraitSummary gate is based on \"will it be consumed\" (sinks), not diagnosticsLevel.\n // diagnosticsLevel only controls exportable/heavy details (trace payload, hotspots, static IR export, etc.).\n const shouldCollectDecision = debugSinks.length > 0 && !Debug.isErrorOnlyOnlySinks(debugSinks)\n const shouldCollectDecisionDetails = shouldCollectDecision\n const shouldCollectDecisionHeavyDetails = shouldCollectDecision && diagnosticsLevel !== 'off'\n const execVmMode = yield* FiberRef.get(currentExecVmMode)\n\n // 044: deterministic sampling for sampled mode (uses txnSeq as a stable anchor by default).\n let diagnosticsSampling: TraitConvergeDiagnosticsSamplingSummary | undefined\n if (diagnosticsLevel === 'sampled') {\n const cfg = yield* FiberRef.get(Debug.currentTraitConvergeDiagnosticsSampling)\n const sampleEveryN = normalizePositiveInt(cfg.sampleEveryN) ?? 32\n const topK = normalizePositiveInt(cfg.topK) ?? 3\n const txnSeq = ctx.txnSeq\n const sampled =\n typeof txnSeq === 'number' && Number.isFinite(txnSeq) && txnSeq > 0\n ? (Math.floor(txnSeq) - 1) % sampleEveryN === 0\n : false\n diagnosticsSampling = {\n strategy: 'txnSeq_interval',\n sampleEveryN,\n topK,\n sampled,\n }\n }\n\n const shouldTimeStepsForHotspots =\n shouldCollectDecision && (diagnosticsLevel === 'full' || diagnosticsSampling?.sampled === true)\n const hotspotsTopK = diagnosticsLevel === 'full' ? 3 : (diagnosticsSampling?.topK ?? 3)\n const hotspots: Array<TraitConvergeHotspot> | undefined = shouldTimeStepsForHotspots ? [] : undefined\n const schedulingScope = ctx.schedulingScope ?? 'all'\n const scopeStepIds =\n ctx.schedulingScopeStepIds ??\n (schedulingScope === 'immediate'\n ? execIr.topoOrderImmediateInt32\n : schedulingScope === 'deferred'\n ? execIr.topoOrderDeferredInt32\n : execIr.topoOrderInt32)\n const scopeStepCount = scopeStepIds.length\n const immediateStepCount = execIr.topoOrderImmediateInt32.length\n const deferredStepCount = execIr.topoOrderDeferredInt32.length\n const timeSlicingSummary =\n deferredStepCount > 0\n ? {\n scope: schedulingScope,\n immediateStepCount,\n deferredStepCount,\n }\n : undefined\n\n if (deferredStepCount > 0) {\n if (schedulingScope === 'immediate' && !reasons.includes('time_slicing_immediate')) {\n reasons.push('time_slicing_immediate')\n } else if (schedulingScope === 'deferred' && !reasons.includes('time_slicing_deferred')) {\n reasons.push('time_slicing_deferred')\n }\n }\n\n const emitTraitConvergeTraceEvent = (decision: TraitConvergeDecisionSummary): Effect.Effect<void> =>\n !shouldCollectDecision\n ? Effect.void\n : Debug.record({\n type: 'trace:trait:converge',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n data: decision as any,\n })\n\n const registry = ir.fieldPathIdRegistry\n const dirtyPaths = ctx.dirtyPaths == null ? [] : Array.isArray(ctx.dirtyPaths) ? ctx.dirtyPaths : ctx.dirtyPaths\n\n const dirtyRootIds = dirtyPathsToRootIds({\n dirtyPaths,\n registry,\n dirtyAllReason: ctx.dirtyAllReason,\n })\n\n const DIRTY_ROOT_IDS_TOP_K = 3\n const AUTO_FLOOR_RATIO = 1.05\n const dirtySummary: TraitConvergeDirtySummary | undefined = !shouldCollectDecisionDetails\n ? undefined\n : dirtyRootIds.dirtyAll\n ? {\n dirtyAll: true,\n reason: dirtyRootIds.reason ?? 'unknownWrite',\n rootCount: 0,\n rootIds: [],\n rootIdsTruncated: false,\n }\n : {\n dirtyAll: false,\n rootCount: dirtyRootIds.rootCount,\n rootIds: dirtyRootIds.rootIds.slice(0, DIRTY_ROOT_IDS_TOP_K),\n rootIdsTruncated: dirtyRootIds.rootIds.length > DIRTY_ROOT_IDS_TOP_K,\n }\n\n const configScope: TraitConvergeConfigScope = ctx.configScope ?? 'builtin'\n const generationEvidence: TraitConvergeGenerationEvidence = ctx.generation ?? {\n generation: ir.generation,\n }\n const generation = generationEvidence.generation\n const staticIrDigest = !shouldCollectDecisionHeavyDetails ? '' : getConvergeStaticIrDigest(ir)\n const decisionBudgetMs = requestedMode === 'auto' ? ctx.decisionBudgetMs : undefined\n const cacheMissReasonHint = ctx.cacheMissReasonHint\n\n if (cacheMissReasonHint === 'generation_bumped' && !reasons.includes('generation_bumped')) {\n reasons.push('generation_bumped')\n }\n\n const isDecisionBudgetExceeded = (): boolean =>\n typeof decisionBudgetMs === 'number' &&\n Number.isFinite(decisionBudgetMs) &&\n decisionBudgetMs > 0 &&\n ctx.now() - decisionStartedAt > decisionBudgetMs\n\n const markDecisionBudgetCutoff = (): void => {\n if (!reasons.includes('budget_cutoff')) reasons.push('budget_cutoff')\n }\n\n const prefixFieldPathIdsByPathId = execIr.prefixFieldPathIdsByPathId\n const prefixOffsetsByPathId = execIr.prefixOffsetsByPathId\n\n const dirtyPrefixBitSet = execIr.scratch.dirtyPrefixBitSet\n const reachableStepBitSet = execIr.scratch.reachableStepBitSet\n const dirtyPrefixQueue = execIr.scratch.dirtyPrefixQueue\n const planScratch = execIr.scratch.planStepIds\n const triggerStepIdsByFieldPathId = execIr.triggerStepIdsByFieldPathId\n const triggerStepOffsetsByFieldPathId = execIr.triggerStepOffsetsByFieldPathId\n\n const addPathPrefixes = (pathId: number): void => {\n const start = prefixOffsetsByPathId[pathId]\n const end = prefixOffsetsByPathId[pathId + 1]\n if (start == null || end == null) return\n for (let i = start; i < end; i++) {\n dirtyPrefixBitSet.add(prefixFieldPathIdsByPathId[i]!)\n }\n }\n\n const hasAnyDirtyPrefix = (pathId: number): boolean => {\n const start = prefixOffsetsByPathId[pathId]\n const end = prefixOffsetsByPathId[pathId + 1]\n if (start == null || end == null) return false\n for (let i = start; i < end; i++) {\n if (dirtyPrefixBitSet.has(prefixFieldPathIdsByPathId[i]!)) return true\n }\n return false\n }\n\n const shouldRunStepById = (stepId: number): boolean => {\n const outId = execIr.stepOutFieldPathIdByStepId[stepId]\n if (typeof outId === 'number' && hasAnyDirtyPrefix(outId)) {\n return true\n }\n const depsStart = execIr.stepDepsOffsetsByStepId[stepId]\n const depsEnd = execIr.stepDepsOffsetsByStepId[stepId + 1]\n if (depsStart == null || depsEnd == null) return false\n for (let i = depsStart; i < depsEnd; i++) {\n if (hasAnyDirtyPrefix(execIr.stepDepsFieldPathIds[i]!)) return true\n }\n return false\n }\n\n const computePlanStepIds = (\n rootIds: ReadonlyArray<number>,\n options?: { readonly stopOnDecisionBudget?: boolean },\n ): { readonly plan?: Int32Array; readonly budgetCutoff?: true } => {\n // Small graphs and custom step slices are cheap to scan; keep the simpler logic.\n if (totalSteps < 32 || ctx.schedulingScopeStepIds != null) {\n dirtyPrefixBitSet.clear()\n for (let i = 0; i < rootIds.length; i++) {\n addPathPrefixes(rootIds[i]!)\n }\n\n let planLen = 0\n let checks = 0\n for (let i = 0; i < scopeStepIds.length; i++) {\n const stepId = scopeStepIds[i]!\n if (options?.stopOnDecisionBudget) {\n checks += 1\n if (checks >= 32) {\n checks = 0\n if (isDecisionBudgetExceeded()) {\n dirtyPrefixBitSet.clear()\n return { budgetCutoff: true } as const\n }\n }\n }\n\n if (!shouldRunStepById(stepId)) {\n continue\n }\n\n planScratch[planLen] = stepId\n planLen += 1\n addPathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n\n const plan = execVmMode ? planScratch.subarray(0, planLen) : new Int32Array(planLen)\n if (!execVmMode && planLen > 0) {\n plan.set(planScratch.subarray(0, planLen))\n }\n dirtyPrefixBitSet.clear()\n return { plan } as const\n }\n\n // 059: Typed reachability (prefixId -> stepIds) + queue + bitset.\n dirtyPrefixBitSet.clear()\n reachableStepBitSet.clear()\n\n let queueLen = 0\n const enqueuePathPrefixes = (pathId: number): void => {\n const start = prefixOffsetsByPathId[pathId]\n const end = prefixOffsetsByPathId[pathId + 1]\n if (start == null || end == null) return\n for (let i = start; i < end; i++) {\n const prefixId = prefixFieldPathIdsByPathId[i]!\n if (dirtyPrefixBitSet.has(prefixId)) continue\n dirtyPrefixBitSet.add(prefixId)\n dirtyPrefixQueue[queueLen] = prefixId\n queueLen += 1\n }\n }\n\n for (let i = 0; i < rootIds.length; i++) {\n enqueuePathPrefixes(rootIds[i]!)\n }\n\n const isStepInScope = (stepId: number): boolean => {\n if (schedulingScope === 'all') return true\n const flag = execIr.stepSchedulingByStepId[stepId]\n return schedulingScope === 'immediate' ? flag === 0 : flag === 1\n }\n\n let cursor = 0\n let checks = 0\n while (cursor < queueLen) {\n if (options?.stopOnDecisionBudget) {\n checks += 1\n if (checks >= 32) {\n checks = 0\n if (isDecisionBudgetExceeded()) {\n dirtyPrefixBitSet.clear()\n return { budgetCutoff: true } as const\n }\n }\n }\n\n const prefixId = dirtyPrefixQueue[cursor]!\n cursor += 1\n\n const start = triggerStepOffsetsByFieldPathId[prefixId]\n const end = triggerStepOffsetsByFieldPathId[prefixId + 1]\n if (start == null || end == null) continue\n for (let i = start; i < end; i++) {\n const stepId = triggerStepIdsByFieldPathId[i]!\n if (!isStepInScope(stepId)) continue\n if (reachableStepBitSet.has(stepId)) continue\n reachableStepBitSet.add(stepId)\n enqueuePathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n }\n\n let planLen = 0\n for (let i = 0; i < scopeStepIds.length; i++) {\n const stepId = scopeStepIds[i]!\n if (!reachableStepBitSet.has(stepId)) continue\n planScratch[planLen] = stepId\n planLen += 1\n }\n\n const plan = execVmMode ? planScratch.subarray(0, planLen) : new Int32Array(planLen)\n if (!execVmMode && planLen > 0) {\n plan.set(planScratch.subarray(0, planLen))\n }\n dirtyPrefixBitSet.clear()\n return { plan } as const\n }\n\n const cache = ctx.planCache\n if (\n cacheMissReasonHint === 'generation_bumped' &&\n typeof generationEvidence.generationBumpCount === 'number' &&\n generationEvidence.generationBumpCount >= 3 &&\n cache &&\n !cache.isDisabled()\n ) {\n cache.disable('generation_thrash')\n }\n const canUseCache =\n !!cache &&\n !cache.isDisabled() &&\n ctx.schedulingScopeStepIds == null &&\n !dirtyRootIds.dirtyAll &&\n dirtyRootIds.rootIds.length > 0\n const planKeyHash = dirtyRootIds.keyHash ^ (schedulingScope === 'all' ? 0 : schedulingScope === 'immediate' ? 1 : 2)\n const rootIdsKey = canUseCache ? dirtyRootIds.rootIds : undefined\n\n let cacheEvidence: TraitConvergePlanCacheEvidence | undefined = shouldCollectDecisionHeavyDetails\n ? {\n capacity: 0,\n size: 0,\n hits: 0,\n misses: 0,\n evicts: 0,\n hit: false,\n }\n : undefined\n\n let affectedSteps: number | undefined\n let planStepIds: Int32Array | undefined\n\n const getOrComputePlan = (options?: {\n readonly missReason?: TraitConvergePlanCacheEvidence['missReason']\n readonly stopOnDecisionBudget?: boolean\n }): { readonly plan?: Int32Array; readonly hit: boolean; readonly budgetCutoff?: true } => {\n if (dirtyRootIds.dirtyAll) {\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirtyRootIds.keySize,\n missReason: options?.missReason ?? 'unknown',\n })\n }\n const fullPlan = scopeStepIds\n affectedSteps = fullPlan.length\n return { plan: fullPlan, hit: false }\n }\n\n if (canUseCache && cache && rootIdsKey) {\n const cached = cache.get(planKeyHash, rootIdsKey)\n if (cached) {\n if (cacheEvidence) {\n cacheEvidence = cache.evidence({\n hit: true,\n keySize: dirtyRootIds.keySize,\n })\n }\n affectedSteps = cached.length\n return { plan: cached, hit: true }\n }\n }\n\n // Decision budget is designed to cap worst-case plan computation cost.\n // For small graphs (<32 steps), the plan scan is bounded and the early cutoff\n // can introduce flakiness due to sub-ms clock jitter on some platforms.\n if (options?.stopOnDecisionBudget && totalSteps >= 32 && isDecisionBudgetExceeded()) {\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirtyRootIds.keySize,\n missReason: options?.missReason ?? 'unknown',\n })\n }\n return { hit: false, budgetCutoff: true } as const\n }\n\n const computed = computePlanStepIds(dirtyRootIds.rootIds, {\n stopOnDecisionBudget: options?.stopOnDecisionBudget,\n })\n if (computed.budgetCutoff) {\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirtyRootIds.keySize,\n missReason: options?.missReason ?? 'unknown',\n })\n }\n return { hit: false, budgetCutoff: true } as const\n }\n\n const plan = computed.plan ?? new Int32Array(0)\n if (canUseCache && cache && rootIdsKey) {\n cache.set(planKeyHash, rootIdsKey, execVmMode ? plan.slice() : plan)\n }\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirtyRootIds.keySize,\n missReason: options?.missReason ?? 'not_cached',\n })\n }\n affectedSteps = plan.length\n return { plan, hit: false }\n }\n\n const NEAR_FULL_ROOT_RATIO_THRESHOLD = 0.75\n const NEAR_FULL_PLAN_RATIO_THRESHOLD = 0.9\n\n if (requestedMode === 'auto') {\n if (ctx.txnSeq === 1) {\n mode = 'full'\n reasons.push('cold_start')\n } else if (dirtyRootIds.dirtyAll) {\n mode = 'full'\n reasons.push('dirty_all')\n reasons.push('unknown_write')\n } else if (dirtyRootIds.rootIds.length === 0) {\n mode = 'full'\n reasons.push('unknown_write')\n } else {\n const rootRatio = scopeStepCount > 0 ? dirtyRootIds.rootCount / scopeStepCount : 1\n if (rootRatio >= NEAR_FULL_ROOT_RATIO_THRESHOLD) {\n mode = 'full'\n reasons.push('near_full')\n } else {\n const { plan, hit, budgetCutoff } = getOrComputePlan({\n missReason: cacheMissReasonHint ?? 'not_cached',\n stopOnDecisionBudget: decisionBudgetMs != null,\n })\n if (budgetCutoff || !plan) {\n mode = 'full'\n markDecisionBudgetCutoff()\n } else {\n planStepIds = plan\n reasons.push(hit ? 'cache_hit' : 'cache_miss')\n const ratio = scopeStepCount > 0 ? plan.length / scopeStepCount : 1\n if (ratio >= NEAR_FULL_PLAN_RATIO_THRESHOLD) {\n mode = 'full'\n reasons.push('near_full')\n } else {\n mode = 'dirty'\n }\n }\n }\n }\n } else {\n reasons.push('module_override')\n if (mode === 'dirty') {\n const { plan, hit } = getOrComputePlan({ missReason: cacheMissReasonHint ?? 'not_cached' })\n planStepIds = plan\n if (dirtyRootIds.dirtyAll) {\n reasons.push('dirty_all')\n } else if (cache && dirtyRootIds.rootIds.length > 0) {\n reasons.push(hit ? 'cache_hit' : 'cache_miss')\n }\n }\n }\n\n if (\n cacheEvidence?.disabled &&\n cacheEvidence.disableReason === 'low_hit_rate' &&\n !reasons.includes('low_hit_rate_protection')\n ) {\n reasons.push('low_hit_rate_protection')\n }\n\n executionStartedAt = ctx.now()\n if (requestedMode === 'auto') {\n decisionDurationMs = Math.max(0, executionStartedAt - decisionStartedAt)\n }\n\n let changedCount = 0\n\n const makeDecisionSummary = (params: {\n readonly outcome: TraitConvergeOutcomeTag\n readonly executedSteps: number\n readonly executionDurationMs: number\n }): TraitConvergeDecisionSummary => {\n const stepStats: TraitConvergeStepStats = {\n totalSteps,\n executedSteps: params.executedSteps,\n skippedSteps: Math.max(0, totalSteps - params.executedSteps),\n changedSteps: changedCount,\n ...(typeof affectedSteps === 'number' ? { affectedSteps } : null),\n }\n\n const base = {\n requestedMode,\n executedMode: mode,\n outcome: params.outcome,\n configScope,\n staticIrDigest,\n executionBudgetMs: ctx.budgetMs,\n executionDurationMs: params.executionDurationMs,\n ...(requestedMode === 'auto' && ctx.decisionBudgetMs != null\n ? { decisionBudgetMs: ctx.decisionBudgetMs }\n : null),\n ...(requestedMode === 'auto' && decisionDurationMs != null ? { decisionDurationMs } : null),\n reasons,\n stepStats,\n } satisfies TraitConvergeDecisionSummary\n\n if (!shouldCollectDecisionDetails) {\n return base\n }\n\n if (!shouldCollectDecisionHeavyDetails) {\n return {\n ...base,\n dirty: dirtySummary,\n } satisfies TraitConvergeDecisionSummary\n }\n\n return {\n ...base,\n thresholds: { floorRatio: AUTO_FLOOR_RATIO },\n dirty: dirtySummary,\n cache: cacheEvidence,\n generation: generationEvidence,\n staticIr: {\n fieldPathCount: ir.fieldPaths.length,\n stepCount: totalSteps,\n buildDurationMs: ir.buildDurationMs,\n },\n ...(timeSlicingSummary ? { timeSlicing: timeSlicingSummary } : {}),\n ...(diagnosticsSampling ? { diagnosticsSampling } : {}),\n ...(hotspots && hotspots.length > 0 ? { top3: hotspots.slice() } : {}),\n } satisfies TraitConvergeDecisionSummary\n }\n\n const steps: Array<ConvergeStepSummary> | undefined = diagnosticsLevel === 'full' ? [] : undefined\n let executedSteps = 0\n const canUseInPlaceDraft = ctx.allowInPlaceDraft === true && execIr.allOutPathsShallow\n const draft = canUseInPlaceDraft ? new ShallowInPlaceDraft(base) : new CowDraft(base)\n let budgetChecks = 0\n const rollbackDraft = (): void => {\n if (draft instanceof ShallowInPlaceDraft) {\n draft.rollback()\n }\n ctx.setDraft(base)\n }\n\n try {\n if (mode === 'dirty' && !planStepIds) {\n const { plan } = getOrComputePlan({ missReason: cacheMissReasonHint ?? 'not_cached' })\n planStepIds = plan\n }\n\n let dirtyPrefixSet: typeof dirtyPrefixBitSet | undefined\n if (mode === 'dirty' && !dirtyRootIds.dirtyAll) {\n dirtyPrefixBitSet.clear()\n const roots = dirtyRootIds.rootIds\n for (let i = 0; i < roots.length; i++) {\n addPathPrefixes(roots[i]!)\n }\n dirtyPrefixSet = dirtyPrefixBitSet\n }\n\n const stepIds = mode === 'dirty' && planStepIds ? planStepIds : scopeStepIds\n\n for (let i = 0; i < stepIds.length; i++) {\n const stepId = stepIds[i]!\n const entry = stepsInTopoOrder[stepId]\n if (!entry) continue\n\n const fieldPath = entry.fieldPath\n\n if (steps) {\n if (ctx.now() - executionStartedAt > ctx.budgetMs) {\n // Budget exceeded: soft degrade, roll back to base (avoid partially-applied state).\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const top3 = pickTop3Steps(steps)\n const summary: ConvergeSummary = {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3,\n }\n if (!reasons.includes('budget_cutoff')) reasons.push('budget_cutoff')\n const decision = makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n if (decision && diagnosticsLevel !== 'off') {\n yield* emitTraitConvergeTraceEvent(decision)\n }\n return {\n _tag: 'Degraded',\n reason: 'budget_exceeded',\n summary,\n ...(decision ? { decision } : null),\n } as const\n }\n } else {\n budgetChecks += 1\n if (budgetChecks >= 32) {\n budgetChecks = 0\n if (ctx.now() - executionStartedAt > ctx.budgetMs) {\n // Budget exceeded: soft degrade, roll back to base (avoid partially-applied state).\n rollbackDraft()\n if (!reasons.includes('budget_cutoff')) reasons.push('budget_cutoff')\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'budget_exceeded',\n ...(decision ? { decision } : null),\n } as const\n }\n }\n }\n\n if (mode === 'dirty' && dirtyPrefixSet) {\n const shouldRun = shouldRunStepById(stepId)\n if (!shouldRun) {\n continue\n }\n }\n\n executedSteps += 1\n\n if (steps) {\n const stepStartedAt = ctx.now()\n const exit = yield* Effect.exit(\n runWriterStep(ctx, execIr, draft, stepId, entry, shouldCollectDecision, diagnosticsLevel, stack),\n )\n const stepEndedAt = ctx.now()\n const durationMs = Math.max(0, stepEndedAt - stepStartedAt)\n const stepKind = entry.kind === 'computed' ? 'computed' : 'link'\n const stepLabel = execIr.stepLabelByStepId[stepId] ?? String(stepId)\n const changed = exit._tag === 'Success' ? exit.value : false\n if (hotspots) {\n insertTopKHotspot({\n hotspots,\n topK: hotspotsTopK,\n next: {\n kind: stepKind,\n stepId,\n outFieldPathId: execIr.stepOutFieldPathIdByStepId[stepId],\n durationMs,\n changed,\n },\n })\n }\n steps.push({\n stepId: stepLabel,\n kind: stepKind,\n fieldPath,\n durationMs,\n changed,\n })\n if (exit._tag === 'Failure') {\n const error = toSerializableErrorSummary(exit.cause)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const top3 = pickTop3Steps(steps)\n const summary: ConvergeSummary = {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3,\n }\n const decision = makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n if (decision && diagnosticsLevel !== 'off') {\n yield* emitTraitConvergeTraceEvent(decision)\n }\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n summary,\n ...(decision ? { decision } : null),\n } as const\n }\n if (exit.value) {\n changedCount += 1\n if (mode === 'dirty' && dirtyPrefixSet) {\n addPathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n }\n continue\n }\n\n if (hotspots) {\n const stepStartedAt = ctx.now()\n const exit = yield* Effect.exit(\n runWriterStep(ctx, execIr, draft, stepId, entry, shouldCollectDecision, diagnosticsLevel, stack),\n )\n const stepEndedAt = ctx.now()\n const durationMs = Math.max(0, stepEndedAt - stepStartedAt)\n const stepKind = entry.kind === 'computed' ? 'computed' : 'link'\n const changed = exit._tag === 'Success' ? exit.value : false\n insertTopKHotspot({\n hotspots,\n topK: hotspotsTopK,\n next: {\n kind: stepKind,\n stepId,\n outFieldPathId: execIr.stepOutFieldPathIdByStepId[stepId],\n durationMs,\n changed,\n },\n })\n\n if (exit._tag === 'Failure') {\n const error = toSerializableErrorSummary(exit.cause)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n if (decision && diagnosticsLevel !== 'off') {\n yield* emitTraitConvergeTraceEvent(decision)\n }\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(decision ? { decision } : null),\n } as const\n }\n\n if (changed) {\n changedCount += 1\n if (mode === 'dirty' && dirtyPrefixSet) {\n addPathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n }\n\n continue\n }\n\n // Off-fast-path: enabled only when middleware is empty and diagnostics=off, to keep near-zero overhead in off mode.\n // If you need deps tracing / mismatch diagnostics, switch to light/full/sampled explicitly.\n if (diagnosticsLevel === 'off' && stack.length === 0) {\n try {\n if (runWriterStepOffFast(ctx, execIr, draft, stepId, entry)) {\n changedCount += 1\n if (mode === 'dirty' && dirtyPrefixSet) {\n addPathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n }\n } catch (e) {\n const error = toSerializableErrorSummary(e)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(decision ? { decision } : null),\n } as const\n }\n continue\n }\n\n const exit = yield* Effect.exit(\n runWriterStep(ctx, execIr, draft, stepId, entry, shouldCollectDecision, diagnosticsLevel, stack),\n )\n if (exit._tag === 'Failure') {\n const error = toSerializableErrorSummary(exit.cause)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(decision ? { decision } : null),\n } as const\n }\n if (exit.value) {\n changedCount += 1\n if (mode === 'dirty' && dirtyPrefixSet) {\n addPathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n }\n }\n } catch (e) {\n // Config error: hard fail (let the outer transaction entrypoint block commit).\n if (e instanceof StateTraitConfigError) {\n throw e\n }\n const error = toSerializableErrorSummary(e)\n // Runtime error: soft degrade, roll back to base (avoid partially-applied state).\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const summary: ConvergeSummary | undefined = steps\n ? {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3: pickTop3Steps(steps),\n }\n : undefined\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(summary ? { summary } : null),\n ...(decision ? { decision } : null),\n } as const\n }\n\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const outcome: TraitConvergeOutcomeTag = changedCount > 0 ? 'Converged' : 'Noop'\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome,\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n\n return changedCount > 0\n ? ({\n _tag: 'Converged',\n patchCount: changedCount,\n ...(steps\n ? {\n summary: {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3: pickTop3Steps(steps),\n } satisfies ConvergeSummary,\n }\n : null),\n ...(decision ? { decision } : null),\n } as const)\n : ({\n _tag: 'Noop',\n ...(steps\n ? {\n summary: {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3: pickTop3Steps(steps),\n } satisfies ConvergeSummary,\n }\n : null),\n ...(decision ? { decision } : null),\n } as const)\n })\n","import { Effect, FiberRef, Option } from 'effect'\nimport * as SchemaAST from 'effect/SchemaAST'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport { isDevEnv } from '../runtime/core/env.js'\nimport { RunSessionTag } from '../observability/runSession.js'\nimport { normalizeFieldPath } from '../field-path.js'\nimport * as DepsTrace from './deps-trace.js'\nimport type { ConvergeContext } from './converge.types.js'\nimport type { StateTraitProgram, StateTraitSchemaPathRef } from './model.js'\n\nconst onceKeysFallback = new Set<string>()\n\nexport const onceInRunSession = (key: string): Effect.Effect<boolean> =>\n Effect.serviceOption(RunSessionTag).pipe(\n Effect.map((maybe) => {\n if (Option.isSome(maybe)) {\n return maybe.value.local.once(key)\n }\n if (onceKeysFallback.has(key)) return false\n onceKeysFallback.add(key)\n return true\n }),\n )\n\nconst formatList = (items: ReadonlyArray<string>, limit = 10): string => {\n if (items.length === 0) return ''\n if (items.length <= limit) return items.join(', ')\n return `${items.slice(0, limit).join(', ')}, …(+${items.length - limit})`\n}\n\nexport const emitDepsMismatch = (params: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'computed' | 'source'\n readonly fieldPath: string\n readonly diff: DepsTrace.DepsDiff\n}): Effect.Effect<void> => {\n return Effect.gen(function* () {\n const key = `${params.moduleId ?? 'unknown'}::${params.instanceId ?? 'unknown'}::${params.kind}::${params.fieldPath}`\n const shouldEmit = yield* onceInRunSession(`deps_mismatch:${key}`)\n if (!shouldEmit) return\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: params.moduleId,\n instanceId: params.instanceId,\n code: 'state_trait::deps_mismatch',\n severity: 'warning',\n message:\n `[deps] ${params.kind} \"${params.fieldPath}\" declared=[${formatList(params.diff.declared)}] ` +\n `reads=[${formatList(params.diff.reads)}] missing=[${formatList(params.diff.missing)}] ` +\n `unused=[${formatList(params.diff.unused)}]`,\n hint:\n 'deps is the single source of truth for dependencies: incremental scheduling / reverse closures / performance optimizations rely on deps only. ' +\n 'Keep deps consistent with actual reads; if you really depend on the whole object, declare a coarser-grained dep (e.g. \"profile\") to cover sub-field reads.',\n kind: `deps_mismatch:${params.kind}`,\n })\n })\n}\n\nconst schemaHasPath = (\n ast: SchemaAST.AST,\n segments: ReadonlyArray<string>,\n seen: Set<SchemaAST.AST> = new Set(),\n): boolean => {\n if (segments.length === 0) return true\n\n let current = ast\n\n // unwrap Suspend/Refinement (common for recursive schemas and branded schemas)\n while (true) {\n if (SchemaAST.isSuspend(current)) {\n if (seen.has(current)) {\n // Recursion: if we can't statically decide further, allow conservatively to avoid false positives.\n return true\n }\n seen.add(current)\n current = current.f()\n continue\n }\n if (SchemaAST.isRefinement(current)) {\n current = current.from\n continue\n }\n break\n }\n\n // Transformation: prefer `to` (decoded shape), but also allow `from` to reduce false positives.\n if (SchemaAST.isTransformation(current)) {\n return schemaHasPath(current.to, segments, seen) || schemaHasPath(current.from, segments, seen)\n }\n\n if (SchemaAST.isUnion(current)) {\n return current.types.some((t) => schemaHasPath(t, segments, seen))\n }\n\n if (SchemaAST.isTupleType(current)) {\n const candidates: Array<SchemaAST.AST> = []\n for (const e of current.elements) candidates.push(e.type)\n for (const r of current.rest) candidates.push(r.type)\n if (candidates.length === 0) return true\n return candidates.some((t) => schemaHasPath(t, segments, seen))\n }\n\n if (SchemaAST.isTypeLiteral(current)) {\n const [head, ...tail] = segments\n\n for (const ps of current.propertySignatures) {\n if (String(ps.name) !== head) continue\n return schemaHasPath(ps.type, tail, seen)\n }\n\n // index signature: open objects like Record<string, T> allow any key\n for (const sig of current.indexSignatures) {\n let param: SchemaAST.AST = sig.parameter as unknown as SchemaAST.AST\n while (SchemaAST.isRefinement(param)) {\n param = param.from\n }\n const tag = (param as any)?._tag\n if (tag === 'StringKeyword' || tag === 'TemplateLiteral') {\n return schemaHasPath(sig.type, tail, seen)\n }\n }\n\n return false\n }\n\n const tag = (current as any)?._tag\n if (tag === 'AnyKeyword' || tag === 'UnknownKeyword' || tag === 'ObjectKeyword' || tag === 'Declaration') {\n return true\n }\n\n return false\n}\n\nconst schemaHasFieldPath = (stateSchemaAst: SchemaAST.AST, path: string): boolean => {\n if (!path) return true\n if (path === '$root') return true\n\n const normalized = normalizeFieldPath(path)\n if (!normalized) return false\n\n const segs = normalized[0] === '$root' ? normalized.slice(1) : normalized\n return schemaHasPath(stateSchemaAst, segs)\n}\n\nconst formatSchemaMismatchLine = (ref: StateTraitSchemaPathRef): string => {\n if (ref.kind === 'fieldPath') {\n return `- ${ref.entryKind} \"${ref.entryFieldPath}\" fieldPath=\"${ref.path}\"`\n }\n if (ref.kind === 'dep') {\n const rule = ref.ruleName ? ` rule=\"${ref.ruleName}\"` : ''\n return `- ${ref.entryKind} \"${ref.entryFieldPath}\" deps=\"${ref.path}\"${rule}`\n }\n if (ref.kind === 'link_from') {\n return `- link \"${ref.entryFieldPath}\" from=\"${ref.path}\"`\n }\n if (ref.kind === 'check_writeback') {\n return `- check \"${ref.entryFieldPath}\" writeback=\"${ref.path}\"`\n }\n return `- ${ref.entryKind} \"${ref.entryFieldPath}\" path=\"${ref.path}\"`\n}\n\nexport const emitSchemaMismatch = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: Pick<ConvergeContext<S>, 'moduleId' | 'instanceId'>,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (!isDevEnv()) return\n\n const level = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n if (level === 'off') return\n\n const key = `${ctx.moduleId ?? 'unknown'}::${ctx.instanceId}`\n const shouldEmit = yield* onceInRunSession(`schema_mismatch:${key}`)\n if (!shouldEmit) return\n\n const refs = (program.schemaPaths ?? []) as ReadonlyArray<StateTraitSchemaPathRef>\n if (refs.length === 0) return\n\n const stateSchemaAst = program.stateSchema.ast as unknown as SchemaAST.AST\n\n const mismatches: Array<StateTraitSchemaPathRef> = []\n const seen = new Set<string>()\n\n for (const ref of refs) {\n if (schemaHasFieldPath(stateSchemaAst, ref.path)) continue\n const k = `${ref.kind}|${ref.entryKind}|${ref.entryFieldPath}|${ref.ruleName ?? ''}|${ref.path}`\n if (seen.has(k)) continue\n seen.add(k)\n mismatches.push(ref)\n }\n\n if (mismatches.length === 0) return\n\n const limit = level === 'light' ? 8 : 24\n const lines = mismatches.slice(0, limit).map(formatSchemaMismatchLine)\n if (mismatches.length > limit) {\n lines.push(`- …(+${mismatches.length - limit})`)\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n code: 'state_trait::schema_mismatch',\n severity: 'warning',\n message: `[schema] The following paths are not declared in stateSchema (total ${mismatches.length}):\\n${lines.join('\\n')}`,\n hint: 'StateTrait writeback will create missing objects/fields. Declare all fieldPath/deps/link.from and errors.* writeback paths in stateSchema, or fix typos in trait paths.',\n kind: 'schema_mismatch',\n })\n })\n","import { Effect, FiberRef, Layer } from 'effect'\n\n/**\n * ExecVmMode:\n * - Allows kernel implementations (core-ng) to switch converge hot-path execution form without changing public semantics.\n * - Currently mainly affects typed-array reuse strategy in converge plan computation.\n * - Disabled by default in core (enable explicitly via Layer for perf/comparison runs).\n */\nexport const currentExecVmMode = FiberRef.unsafeMake<boolean>(false)\n\nexport const withExecVmMode = <A, E, R>(effect: Effect.Effect<A, E, R>): Effect.Effect<A, E, R> =>\n Effect.locally(currentExecVmMode, true)(effect)\n\nexport const execVmModeLayer = (enabled: boolean): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentExecVmMode, () => enabled) as Layer.Layer<any, never, never>\n","import { Effect, Option } from 'effect'\nimport * as EffectOp from '../effect-op.js'\nimport * as EffectOpCore from '../runtime/core/EffectOpCore.js'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport { isDevEnv } from '../runtime/core/env.js'\nimport { RunSessionTag } from '../observability/runSession.js'\nimport type { PatchReason } from '../runtime/core/StateTransaction.js'\nimport type { FieldPath } from '../field-path.js'\nimport { emitDepsMismatch, onceInRunSession } from './converge-diagnostics.js'\nimport type { ConvergeContext } from './converge.types.js'\nimport type { ConvergeExecIr } from './converge-exec-ir.js'\nimport * as DepsTrace from './deps-trace.js'\nimport type { StateTraitEntry } from './model.js'\n\nexport const getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack> =>\n Effect.serviceOption(EffectOpCore.EffectOpMiddlewareTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\ntype WriterKind = 'computed' | 'link'\n\nconst getWriterKind = (entry: StateTraitEntry<any, string>): WriterKind | undefined =>\n entry.kind === 'computed' ? 'computed' : entry.kind === 'link' ? 'link' : undefined\n\nconst getWriterDeps = (entry: StateTraitEntry<any, string>): ReadonlyArray<string> => {\n if (entry.kind === 'computed') {\n return ((entry.meta as any)?.deps ?? []) as ReadonlyArray<string>\n }\n if (entry.kind === 'link') {\n return [entry.meta.from as string]\n }\n return []\n}\n\nconst shouldSkip = (entry: StateTraitEntry<any, string>, prev: unknown, next: unknown): boolean => {\n if (entry.kind === 'computed') {\n const equals = (entry.meta as any)?.equals as ((a: unknown, b: unknown) => boolean) | undefined\n return equals ? equals(prev, next) : Object.is(prev, next)\n }\n return Object.is(prev, next)\n}\n\nexport const runWriterStepOffFast = <S extends object>(\n ctx: ConvergeContext<S>,\n execIr: ConvergeExecIr,\n draft: {\n readonly getRoot: () => S\n readonly getAt: (path: FieldPath) => unknown\n readonly setAt: (path: FieldPath, value: unknown, prev?: unknown) => void\n },\n stepId: number,\n entry: StateTraitEntry<any, string>,\n): boolean => {\n const kind = getWriterKind(entry)\n if (!kind) return false\n\n const reason: PatchReason = kind === 'computed' ? 'trait-computed' : 'trait-link'\n const from = kind === 'link' ? (entry.meta as any).from : undefined\n const outPathId = execIr.stepOutFieldPathIdByStepId[stepId]\n const outPath = execIr.fieldPathsById[outPathId]!\n const fromPathId = execIr.stepFromFieldPathIdByStepId[stepId]\n const fromPath = fromPathId >= 0 ? execIr.fieldPathsById[fromPathId] : undefined\n\n const current = draft.getRoot() as any\n const prev = draft.getAt(outPath)\n\n let next: unknown\n if (kind === 'computed') {\n const derive = (entry.meta as any).derive as (s: any) => unknown\n next = derive(current)\n } else {\n if (!fromPath) {\n throw new Error(`[StateTrait.converge] Missing link.from FieldPathId: from=\"${String(from)}\"`)\n }\n next = draft.getAt(fromPath)\n }\n\n const changed = !shouldSkip(entry, prev, next)\n if (!changed) return false\n\n draft.setAt(outPath, next, prev)\n ctx.setDraft(draft.getRoot())\n ctx.recordPatch(outPathId, reason, prev, next)\n return true\n}\n\nexport const runWriterStep = <S extends object>(\n ctx: ConvergeContext<S>,\n execIr: ConvergeExecIr,\n draft: {\n readonly getRoot: () => S\n readonly getAt: (path: FieldPath) => unknown\n readonly setAt: (path: FieldPath, value: unknown, prev?: unknown) => void\n },\n stepId: number,\n entry: StateTraitEntry<any, string>,\n shouldCollectDecision: boolean,\n diagnosticsLevel: Debug.DiagnosticsLevel,\n stack: EffectOp.MiddlewareStack,\n): Effect.Effect<boolean> => {\n const moduleId = ctx.moduleId\n const instanceId = ctx.instanceId\n const fieldPath = entry.fieldPath\n\n const kind = getWriterKind(entry)\n if (!kind) return Effect.succeed(false)\n\n const reason: PatchReason = kind === 'computed' ? 'trait-computed' : 'trait-link'\n const opKind: EffectOp.EffectOp<any, any, any>['kind'] = kind === 'computed' ? 'trait-computed' : 'trait-link'\n const opName = kind === 'computed' ? 'computed:update' : 'link:propagate'\n\n const deps = getWriterDeps(entry)\n const from = kind === 'link' ? (entry.meta as any).from : undefined\n const outPathId = execIr.stepOutFieldPathIdByStepId[stepId]\n const outPath = execIr.fieldPathsById[outPathId]!\n const fromPathId = execIr.stepFromFieldPathIdByStepId[stepId]\n const fromPath = fromPathId >= 0 ? execIr.fieldPathsById[fromPathId] : undefined\n\n return Effect.gen(function* () {\n const stepLabel = diagnosticsLevel === 'off' ? undefined : (execIr.stepLabelByStepId[stepId] ?? String(stepId))\n\n let shouldTraceDeps = false\n if (kind === 'computed' && shouldCollectDecision && isDevEnv()) {\n const traceKey = `${moduleId ?? 'unknown'}::${instanceId ?? 'unknown'}::computed::${fieldPath}`\n shouldTraceDeps = yield* onceInRunSession(`deps_trace:settled:${traceKey}`)\n }\n\n const body: Effect.Effect<boolean> = Effect.sync(() => {\n const current = draft.getRoot() as any\n\n const prev = draft.getAt(outPath)\n\n let next: unknown\n let depsDiff: DepsTrace.DepsDiff | undefined\n\n if (kind === 'computed') {\n const derive = (entry.meta as any).derive as (s: any) => unknown\n if (shouldTraceDeps) {\n const traced = DepsTrace.trace((s) => derive(s), current)\n next = traced.value\n depsDiff = DepsTrace.diffDeps(((entry.meta as any).deps ?? []) as ReadonlyArray<string>, traced.reads)\n } else {\n next = derive(current)\n }\n } else {\n if (!fromPath) {\n throw new Error(`[StateTrait.converge] Missing link.from FieldPathId: from=\"${String(from)}\"`)\n }\n next = draft.getAt(fromPath)\n }\n\n const changed = !shouldSkip(entry, prev, next)\n if (!changed) {\n return { changed: false, depsDiff }\n }\n\n draft.setAt(outPath, next, prev)\n ctx.setDraft(draft.getRoot())\n ctx.recordPatch(outPathId, reason, prev, next, undefined, stepId)\n return { changed: true, depsDiff }\n }).pipe(\n Effect.flatMap(({ changed, depsDiff }) =>\n depsDiff && kind === 'computed'\n ? emitDepsMismatch({\n moduleId,\n instanceId,\n kind: 'computed',\n fieldPath,\n diff: depsDiff,\n }).pipe(Effect.as(changed))\n : Effect.succeed(changed),\n ),\n )\n\n const meta: any = {\n moduleId,\n instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n fieldPath,\n deps,\n ...(kind === 'link'\n ? {\n from,\n to: fieldPath,\n }\n : null),\n ...(stepLabel ? { stepId: stepLabel } : null),\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const key = instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', key)\n }\n }\n\n const op = EffectOp.make<boolean, never, never>({\n kind: opKind,\n name: opName,\n effect: body,\n meta,\n })\n\n if (stack.length === 0) {\n return yield* body\n }\n\n return yield* EffectOp.run(op, stack)\n })\n}\n","import { Effect, FiberRef } from 'effect'\nimport { create } from 'mutative'\nimport type { PatchReason, StateTxnOrigin } from '../runtime/core/StateTransaction.js'\nimport { normalizeFieldPath, type FieldPath, type FieldPathId } from '../field-path.js'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport { buildDependencyGraph } from './graph.js'\nimport type { StateTraitEntry, StateTraitProgram } from './model.js'\nimport { reverseClosure } from './reverse-closure.js'\nimport type * as RowId from './rowid.js'\n\nexport type ValidateMode = 'submit' | 'blur' | 'valueChange' | 'manual'\n\n/**\n * RULE_SKIP:\n * - Used by rules to indicate \"skip execution for this run\" (e.g. validateOn gating).\n * - Distinct from `undefined` (\"no error after execution\"): skip must not clear existing errors.\n */\nconst RULE_SKIP = Symbol.for('logix.state-trait.validate.skip')\n\nexport type ValidateTarget =\n | { readonly kind: 'root' }\n | { readonly kind: 'field'; readonly path: string }\n | {\n readonly kind: 'list'\n readonly path: string\n readonly listIndexPath?: ReadonlyArray<number>\n }\n | {\n readonly kind: 'item'\n readonly path: string\n readonly listIndexPath?: ReadonlyArray<number>\n readonly index: number\n readonly field?: string\n }\n\nexport interface ScopedValidateRequest {\n readonly mode: ValidateMode\n readonly target: ValidateTarget\n}\n\nexport interface ValidateContext<S> {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly txnSeq?: number\n readonly txnId?: string\n /**\n * External trigger at transaction start: stabilizes validate attribution to the txn origin,\n * preventing in-transaction derived writes from polluting attribution.\n */\n readonly origin?: StateTxnOrigin\n /**\n * RowIdStore: stable row identity for list scopes (later `$rowId` and rowIdMode depend on this).\n */\n readonly rowIdStore?: RowId.RowIdStore\n /**\n * List config hint from StateTraitSpec.list.identityHint (trackBy), used for rowIdMode explanation and degrade diagnostics.\n */\n readonly listConfigs?: ReadonlyArray<RowId.ListConfig>\n readonly getDraft: () => S\n readonly setDraft: (next: S) => void\n readonly recordPatch: (\n path: string | FieldPath | FieldPathId | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n}\n\ntype RuleContext = {\n readonly mode: ValidateMode\n readonly state: unknown\n readonly scope: {\n readonly fieldPath: string\n readonly listPath?: string\n readonly listIndexPath?: ReadonlyArray<number>\n readonly index?: number\n }\n}\n\nconst parseSegments = (path: string): ReadonlyArray<string | number> => {\n if (!path) return []\n return path.split('.').map((seg) => (/^[0-9]+$/.test(seg) ? Number(seg) : seg))\n}\n\nconst getAtPath = (state: any, path: string): any => {\n if (!path || state == null) return state\n const segments = parseSegments(path)\n let current: any = state\n for (const seg of segments) {\n if (current == null) return undefined\n current = current[seg as any]\n }\n return current\n}\n\nconst setAtPathMutating = (draft: unknown, path: string, value: unknown): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const nextKey = segments[i + 1]!\n\n const next = current[key as any]\n if (next == null || typeof next !== 'object') {\n current[key as any] = typeof nextKey === 'number' ? [] : {}\n }\n current = current[key as any]\n }\n\n const last = segments[segments.length - 1]!\n current[last as any] = value\n}\n\nconst unsetAtPathMutating = (draft: unknown, path: string): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const next = current[key as any]\n if (next == null || typeof next !== 'object') {\n return\n }\n current = next\n }\n\n const last = segments[segments.length - 1]!\n if (Array.isArray(current) && typeof last === 'number') {\n current[last] = undefined\n return\n }\n if (current && typeof current === 'object') {\n delete current[last as any]\n }\n}\n\nconst isPlainObject = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\nconst normalizeErrorValue = (value: unknown): unknown => (value === undefined || value === null ? undefined : value)\n\nconst mergeRuleErrors = (errors: ReadonlyArray<unknown>): unknown => {\n if (errors.length === 0) return undefined\n if (errors.length === 1) return errors[0]\n\n // ErrorValue constraint: arrays must not represent \"multiple errors\"; for duplicates on the same field, keep the first deterministically.\n if (errors.every(isPlainObject)) {\n const merged: Record<string, unknown> = {}\n for (const patch of errors as ReadonlyArray<Record<string, unknown>>) {\n for (const key of Object.keys(patch)) {\n const incoming = normalizeErrorValue(patch[key])\n if (incoming === undefined) continue\n if (!(key in merged)) merged[key] = incoming\n }\n }\n return Object.keys(merged).length > 0 ? merged : undefined\n }\n\n return errors[0]\n}\n\ntype ErrorValueLeafObject = {\n readonly message: string\n readonly code?: string\n readonly details?: unknown\n}\n\nconst isErrorValueLeafObject = (value: unknown): value is ErrorValueLeafObject => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return false\n const anyValue = value as Record<string, unknown>\n const msg = anyValue.message\n if (typeof msg !== 'string' || msg.length === 0) return false\n for (const key of Object.keys(anyValue)) {\n if (key !== 'message' && key !== 'code' && key !== 'details') return false\n }\n const code = anyValue.code\n if (code !== undefined && (typeof code !== 'string' || code.length === 0)) return false\n return true\n}\n\nconst countErrorLeaves = (value: unknown): number => {\n if (value === null || value === undefined) return 0\n if (typeof value === 'string') return value.length > 0 ? 1 : 0\n if (Array.isArray(value)) return value.reduce((acc, v) => acc + countErrorLeaves(v), 0)\n if (typeof value === 'object') {\n if (isErrorValueLeafObject(value)) return 1\n let acc = 0\n for (const [k, v] of Object.entries(value as any)) {\n if (k === '$rowId') continue\n acc += countErrorLeaves(v)\n }\n return acc\n }\n return 1\n}\n\ntype ListScopeResult = {\n readonly listError?: unknown\n readonly rows?: ReadonlyArray<unknown>\n readonly traces?: ReadonlyArray<ListScopeRuleTrace>\n readonly touchedKeys: ReadonlySet<string>\n readonly touchedListError: boolean\n}\n\ntype TraitCheckOp = 'set' | 'unset' | 'insert' | 'remove'\n\ntype ListScopeRuleTrace = {\n readonly ruleId: string\n readonly summary: {\n readonly scannedRows: number\n readonly affectedRows: number\n readonly changedRows: number\n readonly setCount?: number\n readonly clearedCount?: number\n readonly durationMs?: number\n }\n}\n\ntype TraitCheckRowIdMode = 'trackBy' | 'store' | 'index'\n\ntype TraitCheckDegraded = {\n readonly kind: string\n readonly message?: string\n}\n\nconst nowMs = (() => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === 'function') {\n return () => perf.now()\n }\n return () => Date.now()\n})()\n\nconst isTraitCheckOp = (value: unknown): value is TraitCheckOp =>\n value === 'set' || value === 'unset' || value === 'insert' || value === 'remove'\n\nconst normalizeTraitCheckPath = (path: string): ReadonlyArray<string> => normalizeFieldPath(path) ?? ['$root']\n\nconst sameFieldPath = (a: ReadonlyArray<string>, b: ReadonlyArray<string>): boolean => {\n if (a.length !== b.length) return false\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) return false\n }\n return true\n}\n\nconst toTraitCheckTrigger = (\n origin: StateTxnOrigin | undefined,\n fallbackPath: string,\n): { readonly kind: string; readonly path: ReadonlyArray<string>; readonly op: TraitCheckOp } => {\n const details = origin?.details\n const detailsObj = isPlainObject(details) ? (details as Record<string, unknown>) : undefined\n const tag = detailsObj && typeof detailsObj._tag === 'string' ? detailsObj._tag : undefined\n\n const kindBase = origin?.kind && origin.kind.length > 0 ? origin.kind : 'unknown'\n const kind =\n tag && tag.length > 0\n ? `${kindBase}:${tag}`\n : origin?.name && origin.name.length > 0\n ? `${kindBase}:${origin.name}`\n : kindBase\n\n const opRaw = detailsObj?.op\n const op: TraitCheckOp = isTraitCheckOp(opRaw) ? opRaw : 'set'\n\n const pathRaw = detailsObj && typeof detailsObj.path === 'string' ? detailsObj.path : undefined\n const path = normalizeTraitCheckPath(pathRaw ?? fallbackPath)\n\n return { kind, path, op }\n}\n\nconst toTraitCheckRowIdMode = (params: {\n readonly trackBy?: string\n readonly rowIdStore?: RowId.RowIdStore\n}): TraitCheckRowIdMode => {\n if (params.trackBy) return 'trackBy'\n if (params.rowIdStore) return 'store'\n return 'index'\n}\n\nconst toTraitCheckDegraded = (\n trigger: { readonly op: TraitCheckOp; readonly path: ReadonlyArray<string> },\n scopeFieldPath: ReadonlyArray<string>,\n rowIdMode: TraitCheckRowIdMode,\n): TraitCheckDegraded | undefined => {\n if (rowIdMode === 'trackBy') return undefined\n if (trigger.op !== 'set') return undefined\n if (!sameFieldPath(trigger.path, scopeFieldPath)) return undefined\n return {\n kind: 'rowId:degraded:no_trackBy_root_replace',\n message: 'list root was replaced without trackBy; rowId stability is degraded',\n }\n}\n\nconst mergeRowPatchPreferFirst = (\n base: Record<string, unknown> | undefined,\n incoming: unknown,\n): Record<string, unknown> | undefined => {\n if (!isPlainObject(incoming)) return base\n const next: Record<string, unknown> = base ? { ...base } : {}\n for (const key of Object.keys(incoming)) {\n const v = normalizeErrorValue(incoming[key])\n if (v === undefined) continue\n if (!(key in next)) next[key] = v\n }\n return Object.keys(next).length > 0 ? next : undefined\n}\n\nconst shallowEqualPlainObject = (a: Record<string, unknown>, b: Record<string, unknown>): boolean => {\n const aKeys = Object.keys(a)\n const bKeys = Object.keys(b)\n if (aKeys.length !== bKeys.length) return false\n for (const key of aKeys) {\n if (!Object.prototype.hasOwnProperty.call(b, key)) return false\n if (!Object.is(a[key], b[key])) return false\n }\n return true\n}\n\nconst collectRuleKeysFromDeps = (rule: unknown, listPath: string): ReadonlyArray<string> => {\n if (!rule || typeof rule !== 'object') return []\n const deps = (rule as any).deps\n if (!Array.isArray(deps)) return []\n\n const prefix = `${listPath}[].`\n const keys: Array<string> = []\n for (const dep of deps) {\n if (typeof dep !== 'string') continue\n if (dep.startsWith(prefix)) {\n const key = dep.slice(prefix.length)\n if (key) keys.push(key)\n continue\n }\n if (dep.length > 0 && !dep.includes('.') && !dep.includes('[') && !dep.includes(']')) {\n keys.push(dep)\n }\n }\n\n return Array.from(new Set(keys)).sort()\n}\n\nconst evalListScopeCheck = (\n entry: Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }>,\n input: unknown,\n ctx: RuleContext,\n options?: {\n readonly trace?: {\n readonly listPath: string\n readonly errorsBasePath: string\n readonly errorsRoot: unknown\n }\n },\n): ListScopeResult | typeof RULE_SKIP => {\n const rules = entry.meta.rules as Record<string, any>\n const names = Object.keys(rules).sort()\n\n let listError: unknown | undefined = undefined\n let rows: Array<Record<string, unknown> | undefined> | undefined = undefined\n let traces: Array<ListScopeRuleTrace> | undefined = undefined\n let ran = false\n let touchedListError = false\n const touchedKeys = new Set<string>()\n const listPath = ctx.scope.listPath ?? ctx.scope.fieldPath\n\n const mergeRows = (incomingRows: ReadonlyArray<unknown>): void => {\n if (!rows) rows = []\n const limit = Math.max(rows.length, incomingRows.length)\n if (rows.length < limit) rows.length = limit\n\n for (let i = 0; i < incomingRows.length; i++) {\n const merged = mergeRowPatchPreferFirst(rows[i], incomingRows[i])\n rows[i] = merged\n }\n }\n\n const summarizeRuleRows = (\n errorsBasePath: string,\n keys: ReadonlyArray<string>,\n scannedRows: number,\n rowsPatch: ReadonlyArray<unknown> | undefined,\n ): {\n readonly affectedRows: number\n readonly changedRows: number\n readonly setCount: number\n readonly clearedCount: number\n } => {\n if (keys.length === 0 || scannedRows <= 0) {\n return { affectedRows: 0, changedRows: 0, setCount: 0, clearedCount: 0 }\n }\n\n let affectedRows = 0\n let changedRows = 0\n let setCount = 0\n let clearedCount = 0\n\n for (let index = 0; index < scannedRows; index++) {\n const prevRow = getAtPath(options?.trace?.errorsRoot as any, `${errorsBasePath}.rows.${index}`)\n const prevObj = isPlainObject(prevRow) ? (prevRow as Record<string, unknown>) : undefined\n const patch = rowsPatch?.[index]\n const patchObj = isPlainObject(patch) ? (patch as Record<string, unknown>) : undefined\n\n let hasPrev = false\n let hasNext = false\n let rowChanged = false\n\n for (const key of keys) {\n const prev = normalizeErrorValue(prevObj?.[key])\n const next = normalizeErrorValue(patchObj?.[key])\n if (prev !== undefined) hasPrev = true\n if (next !== undefined) hasNext = true\n if (Object.is(prev, next)) continue\n rowChanged = true\n if (next === undefined) {\n if (prev !== undefined) clearedCount += 1\n } else {\n setCount += 1\n }\n }\n\n if (hasPrev || hasNext) affectedRows += 1\n if (rowChanged) changedRows += 1\n }\n\n return { affectedRows, changedRows, setCount, clearedCount }\n }\n\n for (const name of names) {\n const rule = rules[name]\n const collectTrace = options?.trace?.listPath && options?.trace?.errorsRoot\n const startedAt = collectTrace ? nowMs() : 0\n try {\n const out =\n typeof rule === 'function'\n ? rule(input, ctx)\n : rule && typeof rule === 'object'\n ? rule.validate(input, ctx)\n : undefined\n\n if (out === RULE_SKIP) continue\n ran = true\n\n for (const key of collectRuleKeysFromDeps(rule, listPath)) {\n touchedKeys.add(key)\n }\n\n if (collectTrace) {\n const traceListPath = options!.trace!.listPath\n const traceErrorsBasePath = options!.trace!.errorsBasePath\n const keys = collectRuleKeysFromDeps(rule, traceListPath)\n const scannedRows = Array.isArray(input) ? input.length : 0\n\n const rowsPatch: ReadonlyArray<unknown> | undefined = Array.isArray(out)\n ? out\n : isPlainObject(out) && Array.isArray((out as any).rows)\n ? ((out as any).rows as ReadonlyArray<unknown>)\n : undefined\n\n const summary = summarizeRuleRows(traceErrorsBasePath, keys, scannedRows, rowsPatch)\n const durationMs = Math.max(0, nowMs() - startedAt)\n\n if (!traces) traces = []\n traces.push({\n ruleId: `${entry.fieldPath}#${name}`,\n summary: {\n scannedRows,\n affectedRows: summary.affectedRows,\n changedRows: summary.changedRows,\n setCount: summary.setCount,\n clearedCount: summary.clearedCount,\n durationMs,\n },\n })\n }\n\n if (out === undefined) continue\n\n if (Array.isArray(out)) {\n mergeRows(out)\n continue\n }\n\n if (isPlainObject(out)) {\n const maybeRows = (out as any).rows\n const hasListKey = Object.prototype.hasOwnProperty.call(out, '$list')\n if (hasListKey) touchedListError = true\n const maybeListError = normalizeErrorValue((out as any).$list)\n if (maybeListError !== undefined && listError === undefined) {\n listError = maybeListError\n }\n if (Array.isArray(maybeRows)) {\n mergeRows(maybeRows)\n } else if (!hasListKey && maybeListError === undefined) {\n // Allow list-scope rules to return a `$list` error value (string/object) directly without implying a rows structure.\n const v = normalizeErrorValue(out)\n if (v !== undefined && listError === undefined) listError = v\n touchedListError = true\n }\n continue\n }\n\n // Non object/array: treat as a `$list` error value.\n const v = normalizeErrorValue(out)\n if (v !== undefined && listError === undefined) listError = v\n touchedListError = true\n } catch {\n // Rule runtime error: keep it a no-op to avoid producing a partial error tree.\n // Diagnostics and degrade handling are handled by DebugSink/DevtoolsHub in later phases.\n }\n }\n\n if (!ran) return RULE_SKIP\n return {\n listError,\n rows,\n traces,\n touchedKeys,\n touchedListError,\n }\n}\n\nconst toPatternPath = (path: string): string => {\n if (!path) return path\n const segments = path.split('.').filter(Boolean)\n const out: Array<string> = []\n\n for (const seg of segments) {\n if (/^[0-9]+$/.test(seg)) {\n if (out.length === 0) continue\n const last = out[out.length - 1]!\n if (!last.endsWith('[]')) out[out.length - 1] = `${last}[]`\n continue\n }\n out.push(seg)\n }\n\n return out.join('.')\n}\n\nconst toGraphTargets = (target: ValidateTarget): ReadonlyArray<string> => {\n if (target.kind === 'root') {\n return []\n }\n if (target.kind === 'field') {\n return [toPatternPath(target.path)]\n }\n if (target.kind === 'list') {\n // A list target should hit both list-scope check (fieldPath=listPath) and item-scope check (fieldPath=listPath[]).\n return [target.path, `${target.path}[]`]\n }\n // item\n const base = `${target.path}[]`\n const field = target.field ? toPatternPath(target.field) : undefined\n return [field ? `${base}.${field}` : base]\n}\n\nconst normalizeListIndexPath = (listIndexPath: ReadonlyArray<number> | undefined): ReadonlyArray<number> => {\n if (!Array.isArray(listIndexPath) || listIndexPath.length === 0) return []\n const out: Array<number> = []\n for (const n of listIndexPath) {\n if (!Number.isInteger(n) || n < 0) continue\n out.push(n)\n }\n return out\n}\n\nconst toListInstanceKey = (listPath: string, listIndexPath: ReadonlyArray<number> | undefined): string => {\n const p = normalizeListIndexPath(listIndexPath)\n return p.length === 0 ? `${listPath}@@` : `${listPath}@@${p.join(',')}`\n}\n\nconst extractIndexBindings = (requests: ReadonlyArray<ScopedValidateRequest>): Map<string, ReadonlySet<number>> => {\n const map = new Map<string, Set<number>>()\n for (const req of requests) {\n if (req.target.kind !== 'item') continue\n const key = toListInstanceKey(req.target.path, req.target.listIndexPath)\n const set = map.get(key) ?? new Set<number>()\n set.add(req.target.index)\n map.set(key, set)\n }\n return map\n}\n\nconst extractListBindings = (\n requests: ReadonlyArray<ScopedValidateRequest>,\n): {\n readonly all: ReadonlySet<string>\n readonly instances: ReadonlySet<string>\n} => {\n const all = new Set<string>()\n const instances = new Set<string>()\n for (const req of requests) {\n if (req.target.kind !== 'list') continue\n if (!req.target.path) continue\n if (req.target.listIndexPath && req.target.listIndexPath.length > 0) {\n instances.add(toListInstanceKey(req.target.path, req.target.listIndexPath))\n continue\n }\n all.add(req.target.path)\n }\n return { all, instances }\n}\n\nconst resolveMode = (requests: ReadonlyArray<ScopedValidateRequest>): ValidateMode => {\n const priorities: Record<ValidateMode, number> = {\n submit: 4,\n blur: 3,\n valueChange: 2,\n manual: 1,\n }\n let best: ValidateMode = 'manual'\n let bestP = priorities[best]\n for (const r of requests) {\n const p = priorities[r.mode]\n if (p > bestP) {\n bestP = p\n best = r.mode\n }\n }\n return best\n}\n\nconst evalCheck = (\n entry: Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }>,\n input: unknown,\n ctx: RuleContext,\n): unknown => {\n const rules = entry.meta.rules as Record<string, any>\n const names = Object.keys(rules).sort()\n const results: Array<unknown> = []\n let ran = false\n\n for (const name of names) {\n const rule = rules[name]\n try {\n const out =\n typeof rule === 'function'\n ? rule(input, ctx)\n : rule && typeof rule === 'object'\n ? rule.validate(input, ctx)\n : undefined\n if (out === RULE_SKIP) continue\n ran = true\n const normalized = normalizeErrorValue(out)\n if (normalized !== undefined) results.push(normalized)\n } catch {\n // Rule runtime error: keep it a no-op to avoid producing a partial error tree.\n // Diagnostics and degrade handling are handled by DebugSink/DevtoolsHub in later phases.\n }\n }\n\n if (!ran) return RULE_SKIP\n return mergeRuleErrors(results)\n}\n\ntype ErrorUpdate = {\n readonly errorPath: string\n readonly prev: unknown\n readonly next: unknown\n readonly stepId: string\n}\n\n/**\n * validateInTransaction:\n * - Execute a batch of scoped validate requests within an already-started StateTransaction.\n * - Compute the minimal check set via ReverseClosure, and write results back to `state.errors.*`.\n * - If no actual error changes occur, do not update the draft (preserve 0-commit semantics).\n */\nexport const validateInTransaction = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: ValidateContext<S>,\n requests: ReadonlyArray<ScopedValidateRequest>,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n const diagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n const enableTrace = diagnosticsLevel !== 'off'\n const traceEvents: Array<Debug.Event> | undefined = enableTrace ? [] : undefined\n\n yield* Effect.sync(() => {\n if (requests.length === 0) return\n\n const checks = program.entries.filter(\n (e): e is Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }> => (e as any).kind === 'check',\n )\n if (checks.length === 0) return\n\n const hasRoot = requests.some((r) => r.target.kind === 'root')\n const draft = ctx.getDraft() as any\n\n // Compute check scopes to execute (set of field paths).\n const scopesToValidate = (() => {\n if (hasRoot) {\n return new Set<string>(checks.map((c) => c.fieldPath))\n }\n const graph = buildDependencyGraph(program)\n const set = new Set<string>()\n for (const req of requests) {\n for (const t of toGraphTargets(req.target)) {\n for (const node of reverseClosure(graph, t)) {\n set.add(node)\n }\n }\n }\n return set\n })()\n\n const selectedChecks = checks.filter((c) => scopesToValidate.has(c.fieldPath))\n if (selectedChecks.length === 0) return\n\n const mode = resolveMode(requests)\n\n if (enableTrace && traceEvents) {\n traceEvents.push({\n type: 'trace:trait:validate',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n data: {\n mode,\n requestCount: requests.length,\n selectedCheckCount: selectedChecks.length,\n hasRoot,\n },\n })\n }\n\n // Item-scope bindings: used only for non-root validate (root validate runs full length by current arrays).\n const indexBindings = extractIndexBindings(requests)\n const listBindings = extractListBindings(requests)\n const listBindingsAll = listBindings.all\n const listBindingsInstances = listBindings.instances\n\n const instanceIndexPathByKey = (() => {\n const map = new Map<string, ReadonlyArray<number>>()\n for (const req of requests) {\n if (req.target.kind !== 'item' && req.target.kind !== 'list') continue\n const key = toListInstanceKey(req.target.path, req.target.listIndexPath)\n map.set(key, normalizeListIndexPath(req.target.listIndexPath))\n }\n return map\n })()\n\n const updates: Array<ErrorUpdate> = []\n\n const listConfigByPath = (() => {\n const map = new Map<string, RowId.ListConfig>()\n const configs = ctx.listConfigs ?? []\n for (const cfg of configs) {\n if (!cfg || typeof (cfg as any).path !== 'string') continue\n map.set((cfg as any).path, cfg as any)\n }\n return map\n })()\n\n const readTrackBy = (item: unknown, trackBy: string): unknown => {\n if (!item || typeof item !== 'object') return undefined\n const segments = trackBy.split('.')\n let current: any = item\n for (const seg of segments) {\n if (current == null) return undefined\n current = current[seg as any]\n }\n return current\n }\n\n const makeStepId = (fieldPath: string, index?: number): string =>\n index === undefined ? `check:${fieldPath}` : `check:${fieldPath}@${index}`\n\n type ListRuntime = {\n readonly listPath: string\n readonly listIndexPath: ReadonlyArray<number>\n readonly valuePath: string\n readonly errorBasePath: string\n readonly parentRowId?: RowId.RowId\n readonly items: ReadonlyArray<unknown>\n readonly trackBy?: string\n readonly rowIds?: ReadonlyArray<string>\n }\n\n const listPaths = Array.from(listConfigByPath.keys())\n const listPathSet = new Set(listPaths)\n\n const parentOf = (path: string): string | undefined => {\n const segments = path.split('.').filter(Boolean)\n let best: string | undefined\n for (let i = 1; i < segments.length; i++) {\n const prefix = segments.slice(0, i).join('.')\n if (listPathSet.has(prefix)) best = prefix\n }\n return best\n }\n\n const parentByPath = new Map<string, string | undefined>()\n const suffixByPath = new Map<string, string>()\n for (const path of listPaths) {\n const parent = parentOf(path)\n parentByPath.set(path, parent)\n const suffix = parent ? path.slice(parent.length + 1) : path\n suffixByPath.set(path, suffix)\n }\n\n const normalizeInstanceIndexPath = (\n listPath: string,\n listIndexPath: ReadonlyArray<number> | undefined,\n ): ReadonlyArray<number> | undefined => {\n const normalized = normalizeListIndexPath(listIndexPath)\n let expected = 0\n let p = parentByPath.get(listPath)\n while (p) {\n expected += 1\n p = parentByPath.get(p)\n }\n if (expected === 0) return []\n if (normalized.length !== expected) return undefined\n return normalized\n }\n\n const listRuntimeByKey = new Map<string, ListRuntime>()\n\n const getListRuntime = (listPath: string, listIndexPath: ReadonlyArray<number>): ListRuntime | undefined => {\n const parent = parentByPath.get(listPath)\n const cacheKey = parent ? `${listPath}@@#${listIndexPath.join(',')}` : `${listPath}@@root`\n\n const cached = listRuntimeByKey.get(cacheKey)\n if (cached) return cached\n\n const listCfg = listConfigByPath.get(listPath)\n const trackBy =\n listCfg && typeof (listCfg as any).trackBy === 'string' ? ((listCfg as any).trackBy as string) : undefined\n\n if (!parent) {\n const listValue = getAtPath(draft, listPath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n const rowIds: ReadonlyArray<string> | undefined = ctx.rowIdStore\n ? ctx.rowIdStore.ensureList(listPath, items, trackBy)\n : undefined\n\n const out: ListRuntime = {\n listPath,\n listIndexPath: [],\n valuePath: listPath,\n errorBasePath: `errors.${listPath}`,\n items,\n trackBy,\n rowIds,\n }\n listRuntimeByKey.set(cacheKey, out)\n return out\n }\n\n if (listIndexPath.length === 0) return undefined\n const parentIndexPath = listIndexPath.slice(0, -1)\n const parentIndex = listIndexPath[listIndexPath.length - 1]!\n const parentRuntime = getListRuntime(parent, parentIndexPath)\n if (!parentRuntime) return undefined\n if (parentIndex < 0 || parentIndex >= parentRuntime.items.length) return undefined\n\n const suffix = suffixByPath.get(listPath) ?? ''\n if (!suffix) return undefined\n\n const valuePath = `${parentRuntime.valuePath}.${parentIndex}.${suffix}`\n const errorBasePath = `${parentRuntime.errorBasePath}.rows.${parentIndex}.${suffix}`\n\n const listValue = getAtPath(draft, valuePath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n\n const parentRowId =\n (parentRuntime.rowIds?.[parentIndex] as any) ??\n (ctx.rowIdStore ? ctx.rowIdStore.getRowId(parent, parentIndex, parentRuntime.parentRowId) : undefined)\n\n const rowIds: ReadonlyArray<string> | undefined = ctx.rowIdStore\n ? ctx.rowIdStore.ensureList(listPath, items, trackBy, parentRowId)\n : undefined\n\n const out: ListRuntime = {\n listPath,\n listIndexPath,\n valuePath,\n errorBasePath,\n parentRowId,\n items,\n trackBy,\n rowIds,\n }\n listRuntimeByKey.set(cacheKey, out)\n return out\n }\n\n const enumerateAllListInstances = (listPath: string): ReadonlyArray<ListRuntime> => {\n const parent = parentByPath.get(listPath)\n if (!parent) {\n const rt = getListRuntime(listPath, [])\n return rt ? [rt] : []\n }\n\n const parentInstances = enumerateAllListInstances(parent)\n const out: Array<ListRuntime> = []\n for (const p of parentInstances) {\n for (let i = 0; i < p.items.length; i++) {\n const childIndexPath = [...p.listIndexPath, i]\n const rt = getListRuntime(listPath, childIndexPath)\n if (rt) out.push(rt)\n }\n }\n return out\n }\n\n type RowDraft = {\n readonly listPath: string\n readonly listIndexPath: ReadonlyArray<number>\n readonly parentRowId?: RowId.RowId\n readonly index: number\n readonly errorBasePath: string\n readonly errorPath: string\n readonly prev: unknown\n readonly next: Record<string, unknown>\n readonly stepId: string\n removed?: boolean\n }\n\n const rowDrafts = new Map<string, RowDraft>()\n\n const getOrCreateRowDraft = (list: ListRuntime, index: number, stepId: string): RowDraft => {\n const errorPath = `${list.errorBasePath}.rows.${index}`\n const existing = rowDrafts.get(errorPath)\n if (existing) return existing\n\n const prev = getAtPath(draft, errorPath)\n const next: Record<string, unknown> = isPlainObject(prev) ? { ...(prev as any) } : {}\n\n const out: RowDraft = {\n listPath: list.listPath,\n listIndexPath: list.listIndexPath,\n parentRowId: list.parentRowId,\n index,\n errorBasePath: list.errorBasePath,\n errorPath,\n prev,\n next,\n stepId,\n removed: false,\n }\n rowDrafts.set(errorPath, out)\n return out\n }\n\n const applyScopedRowPatch = (\n row: RowDraft,\n keysFromDeps: ReadonlySet<string>,\n patchObj: Record<string, unknown> | undefined,\n ): void => {\n if (keysFromDeps.size === 0) return\n\n const patchKeys = patchObj ? Object.keys(patchObj) : []\n const existingKeys = Object.keys(row.next).filter((k) => k !== '$rowId')\n\n const keysToApply = new Set<string>()\n for (const key of existingKeys) {\n if (keysFromDeps.has(key)) keysToApply.add(key)\n }\n for (const key of patchKeys) {\n if (keysFromDeps.has(key)) keysToApply.add(key)\n }\n\n if (keysToApply.size === 0) return\n\n for (const key of keysToApply) {\n const v = normalizeErrorValue(patchObj?.[key])\n if (v === undefined) {\n delete row.next[key]\n } else {\n row.next[key] = v\n }\n }\n }\n\n for (const check of selectedChecks) {\n const scopeFieldPath = check.fieldPath\n\n // list-scope check: write back into `$list/rows[]` (errors.<listPath>.$list / errors.<listPath>.rows[i].*).\n const listCfg = listConfigByPath.get(scopeFieldPath)\n if (listCfg) {\n const listPath = scopeFieldPath\n const listInstances = (() => {\n if (hasRoot) return enumerateAllListInstances(listPath)\n if (listBindingsAll.has(listPath)) return enumerateAllListInstances(listPath)\n\n const keys = new Set<string>()\n for (const k of listBindingsInstances) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n for (const k of indexBindings.keys()) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n\n if (keys.size === 0) return enumerateAllListInstances(listPath)\n\n const out: Array<ListRuntime> = []\n for (const k of keys) {\n const indexPath = instanceIndexPathByKey.get(k)\n const normalized = normalizeInstanceIndexPath(listPath, indexPath)\n if (!normalized) continue\n const rt = getListRuntime(listPath, normalized)\n if (rt) out.push(rt)\n }\n return out\n })()\n\n for (const listRuntime of listInstances) {\n const items = listRuntime.items\n\n const trigger = enableTrace ? toTraitCheckTrigger(ctx.origin, listPath) : undefined\n\n const scopeFieldPathSegments = enableTrace ? normalizeTraitCheckPath(listPath) : undefined\n\n const rowIdMode = enableTrace\n ? toTraitCheckRowIdMode({\n trackBy: listRuntime.trackBy,\n rowIdStore: ctx.rowIdStore,\n })\n : undefined\n\n const degraded =\n enableTrace && trigger && scopeFieldPathSegments && rowIdMode\n ? toTraitCheckDegraded(trigger, scopeFieldPathSegments, rowIdMode)\n : undefined\n\n const next = evalListScopeCheck(\n check,\n items,\n {\n mode,\n state: draft,\n scope: { fieldPath: scopeFieldPath, listPath, listIndexPath: listRuntime.listIndexPath },\n },\n enableTrace\n ? {\n trace: { listPath, errorsBasePath: listRuntime.errorBasePath, errorsRoot: draft },\n }\n : undefined,\n )\n if (next === RULE_SKIP) continue\n const keysFromDeps = next.touchedKeys\n\n if (\n enableTrace &&\n traceEvents &&\n trigger &&\n scopeFieldPathSegments &&\n rowIdMode &&\n next.traces &&\n next.traces.length > 0\n ) {\n for (const t of next.traces) {\n const data: any = {\n ruleId: t.ruleId,\n scopeFieldPath: scopeFieldPathSegments,\n mode,\n trigger,\n summary: t.summary,\n rowIdMode,\n }\n if (degraded) {\n data.degraded = degraded\n }\n traceEvents.push({\n type: 'trace:trait:check',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n data,\n })\n }\n }\n\n const listErrorPath = `${listRuntime.errorBasePath}.$list`\n const prevListError = getAtPath(draft, listErrorPath)\n const nextListError = normalizeErrorValue(next.listError)\n\n if (next.touchedListError && !Object.is(prevListError, nextListError)) {\n updates.push({\n errorPath: listErrorPath,\n prev: prevListError,\n next: nextListError,\n stepId: makeStepId(scopeFieldPath),\n })\n }\n\n const rows = next.rows ?? []\n const prevRows = getAtPath(draft, `${listRuntime.errorBasePath}.rows`)\n const prevLen = Array.isArray(prevRows) ? prevRows.length : 0\n const limit = Math.max(items.length, rows.length, prevLen)\n\n for (let index = 0; index < limit; index++) {\n const rowErrorPath = `${listRuntime.errorBasePath}.rows.${index}`\n const existing = rowDrafts.get(rowErrorPath)\n\n if (index >= items.length) {\n const prevRow = existing?.prev ?? getAtPath(draft, rowErrorPath)\n if (prevRow === undefined && !existing) continue\n const row = existing ?? getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index))\n row.removed = true\n for (const key of Object.keys(row.next)) {\n delete row.next[key]\n }\n continue\n }\n\n const patch = rows[index]\n const patchObj = isPlainObject(patch) ? patch : undefined\n\n if (existing) {\n applyScopedRowPatch(existing, keysFromDeps, patchObj)\n continue\n }\n\n const patchHasRelevant = patchObj && Object.keys(patchObj).some((k) => keysFromDeps.has(k))\n\n if (patchHasRelevant) {\n const row = getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index))\n applyScopedRowPatch(row, keysFromDeps, patchObj)\n continue\n }\n\n const prevRow = getAtPath(draft, rowErrorPath)\n const prevHasRelevant =\n isPlainObject(prevRow) && Object.keys(prevRow).some((k) => k !== '$rowId' && keysFromDeps.has(k))\n const prevOnlyRowId =\n isPlainObject(prevRow) && Object.keys(prevRow).length === 1 && Object.keys(prevRow)[0] === '$rowId'\n\n if (prevHasRelevant || prevOnlyRowId) {\n const row = getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index))\n applyScopedRowPatch(row, keysFromDeps, undefined)\n }\n }\n\n continue\n }\n\n continue\n }\n\n // Phase 2: supports list.item scope (\"items[]\" / \"orders.items[]\"), and uses listIndexPath for nested writebacks.\n if (scopeFieldPath.endsWith('[]')) {\n const listPath = scopeFieldPath.slice(0, -2)\n\n const listInstances = (() => {\n if (hasRoot) return enumerateAllListInstances(listPath)\n if (listBindingsAll.has(listPath)) return enumerateAllListInstances(listPath)\n\n const keys = new Set<string>()\n for (const k of listBindingsInstances) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n for (const k of indexBindings.keys()) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n\n if (keys.size === 0) return enumerateAllListInstances(listPath)\n\n const out: Array<ListRuntime> = []\n for (const k of keys) {\n const indexPath = instanceIndexPathByKey.get(k)\n const normalized = normalizeInstanceIndexPath(listPath, indexPath)\n if (!normalized) continue\n const rt = getListRuntime(listPath, normalized)\n if (rt) out.push(rt)\n }\n return out\n })()\n\n const rules = check.meta.rules as Record<string, any>\n const names = Object.keys(rules).sort()\n\n for (const listRuntime of listInstances) {\n const instanceKey = toListInstanceKey(listPath, listRuntime.listIndexPath)\n const indices: ReadonlyArray<number> =\n hasRoot || listBindingsAll.has(listPath) || listBindingsInstances.has(instanceKey)\n ? listRuntime.items.map((_, i) => i)\n : Array.from(indexBindings.get(instanceKey) ?? [])\n\n if (indices.length === 0) continue\n\n for (const index of indices) {\n if (index < 0 || index >= listRuntime.items.length) continue\n\n const boundValuePath = `${listRuntime.valuePath}.${index}`\n const input = getAtPath(draft, boundValuePath)\n\n const rowErrorPath = `${listRuntime.errorBasePath}.rows.${index}`\n const prevRow = getAtPath(draft, rowErrorPath)\n const prevObj = isPlainObject(prevRow) ? (prevRow as Record<string, unknown>) : undefined\n const prevOnlyRowId =\n isPlainObject(prevRow) && Object.keys(prevRow).length === 1 && Object.keys(prevRow)[0] === '$rowId'\n\n let rowDraft: RowDraft | undefined = undefined\n let lockedKeys: Set<string> | undefined = undefined\n\n const ctxForRule: RuleContext = {\n mode,\n state: draft,\n scope: { fieldPath: scopeFieldPath, listPath, listIndexPath: listRuntime.listIndexPath, index },\n }\n\n const ensureRowDraft = (): RowDraft => {\n if (rowDraft) return rowDraft\n rowDraft = getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index))\n return rowDraft\n }\n\n for (const name of names) {\n const rule = rules[name]\n try {\n const out =\n typeof rule === 'function'\n ? rule(input, ctxForRule)\n : rule && typeof rule === 'object'\n ? rule.validate(input, ctxForRule)\n : undefined\n\n if (out === RULE_SKIP) continue\n\n const keys = collectRuleKeysFromDeps(rule, listPath)\n if (keys.length === 0) continue\n\n const patchObj = isPlainObject(out) ? (out as Record<string, unknown>) : undefined\n const patchHasRelevant =\n patchObj && Object.keys(patchObj).some((k) => k !== '$rowId' && keys.includes(k))\n const prevHasRelevant =\n prevObj && Object.keys(prevObj).some((k) => k !== '$rowId' && keys.includes(k))\n\n if (!rowDraft && !patchHasRelevant && !prevHasRelevant && !prevOnlyRowId) {\n continue\n }\n\n const row = ensureRowDraft()\n for (const key of keys) {\n if (key === '$rowId') continue\n if (lockedKeys?.has(key)) continue\n const v = normalizeErrorValue(patchObj?.[key])\n if (v === undefined) {\n delete row.next[key]\n } else {\n row.next[key] = v\n if (!lockedKeys) lockedKeys = new Set<string>()\n lockedKeys.add(key)\n }\n }\n } catch {\n // Rule runtime error: keep it a no-op to avoid producing a partial error tree.\n // Diagnostics and degrade handling are handled by DebugSink/DevtoolsHub in later phases.\n }\n }\n }\n }\n\n continue\n }\n\n const input = scopeFieldPath === '$root' ? draft : getAtPath(draft, scopeFieldPath)\n\n const nextError = evalCheck(check, input, {\n mode,\n state: draft,\n scope: { fieldPath: scopeFieldPath },\n })\n if (nextError === RULE_SKIP) continue\n\n const writebackPath = (() => {\n const wb = (check as any)?.meta?.writeback\n const p = wb && typeof wb === 'object' ? (wb as any).path : undefined\n return typeof p === 'string' && p.startsWith('errors.') ? p : undefined\n })()\n\n const errorPath = writebackPath ?? `errors.${scopeFieldPath}`\n const prev = getAtPath(draft, errorPath)\n\n if (!Object.is(prev, nextError)) {\n updates.push({\n errorPath,\n prev,\n next: nextError,\n stepId: makeStepId(scopeFieldPath),\n })\n }\n }\n\n for (const row of rowDrafts.values()) {\n const prevRow = row.prev\n\n const nextRow = (() => {\n if (row.removed) return undefined\n\n delete row.next.$rowId\n const errorKeys = Object.keys(row.next).filter((k) => k !== '$rowId')\n if (errorKeys.length === 0) return undefined\n\n const listRuntime = getListRuntime(row.listPath, row.listIndexPath)\n const item = listRuntime?.items[row.index]\n const rowId = (() => {\n if (listRuntime?.trackBy) {\n const k = readTrackBy(item, listRuntime.trackBy)\n if (k !== undefined) return String(k)\n }\n const fromStore =\n listRuntime?.rowIds?.[row.index] ?? ctx.rowIdStore?.getRowId(row.listPath, row.index, row.parentRowId)\n if (typeof fromStore === 'string' && fromStore.length > 0) return fromStore\n return String(row.index)\n })()\n\n const nextRowRaw: Record<string, unknown> = { $rowId: rowId, ...row.next }\n return isPlainObject(prevRow) && shallowEqualPlainObject(prevRow, nextRowRaw) ? prevRow : nextRowRaw\n })()\n\n if (!Object.is(prevRow, nextRow)) {\n updates.push({\n errorPath: row.errorPath,\n prev: prevRow,\n next: nextRow,\n stepId: row.stepId,\n })\n }\n }\n\n if (updates.length === 0) {\n return\n }\n\n const reason: PatchReason = 'unknown'\n\n const prevFormErrorCount =\n draft &&\n typeof draft === 'object' &&\n (draft as any).$form &&\n typeof (draft as any).$form === 'object' &&\n !Array.isArray((draft as any).$form) &&\n typeof (draft as any).$form.errorCount === 'number'\n ? ((draft as any).$form.errorCount as number)\n : undefined\n\n const errorCountDelta =\n prevFormErrorCount === undefined\n ? 0\n : updates.reduce((acc, u) => acc + (countErrorLeaves(u.next) - countErrorLeaves(u.prev)), 0)\n\n const nextState = create(draft, (nextDraft) => {\n for (const u of updates) {\n if (u.next === undefined) {\n unsetAtPathMutating(nextDraft, u.errorPath)\n } else {\n setAtPathMutating(nextDraft, u.errorPath, u.next)\n }\n }\n\n if (prevFormErrorCount !== undefined && errorCountDelta !== 0) {\n const meta = nextDraft.$form\n if (meta && typeof meta === 'object' && !Array.isArray(meta)) {\n meta.errorCount = Math.max(0, prevFormErrorCount + errorCountDelta)\n }\n }\n }) as unknown as S\n\n ctx.setDraft(nextState)\n\n for (const u of updates) {\n const normalized = normalizeFieldPath(u.errorPath) ?? []\n ctx.recordPatch(normalized, reason, u.prev, u.next)\n }\n })\n\n if (traceEvents && traceEvents.length > 0) {\n yield* Effect.forEach(traceEvents, (event) => Debug.record(event), {\n discard: true,\n })\n }\n })\n","import { Effect, Fiber, FiberRef, Option } from 'effect'\nimport { create } from 'mutative'\nimport * as EffectOp from '../effect-op.js'\nimport { Snapshot, internal as ResourceInternal, keyHash as hashKey } from '../resource.js'\nimport * as EffectOpCore from '../runtime/core/EffectOpCore.js'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport * as TaskRunner from '../runtime/core/TaskRunner.js'\nimport { isDevEnv, ReplayModeConfigTag } from '../runtime/core/env.js'\nimport * as ReplayLog from '../runtime/core/ReplayLog.js'\nimport type { PatchReason } from '../runtime/core/StateTransaction.js'\nimport type { FieldPath, FieldPathId } from '../field-path.js'\nimport { normalizeFieldPath } from '../field-path.js'\nimport type { BoundApi } from '../runtime/core/module.js'\nimport { getBoundInternals } from '../runtime/core/runtimeInternalsAccessor.js'\nimport { RunSessionTag } from '../observability/runSession.js'\nimport * as DepsTrace from './deps-trace.js'\nimport * as RowId from './rowid.js'\nimport type { StateTraitEntry, StateTraitPlanStep, StateTraitProgram } from './model.js'\n\nexport interface SourceSyncContext<S> {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly getDraft: () => S\n readonly setDraft: (next: S) => void\n readonly recordPatch: (\n path: string | FieldPath | FieldPathId | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n}\n\nconst onceInRunSession = (key: string): Effect.Effect<boolean, never, any> =>\n Effect.serviceOption(RunSessionTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.local.once(key) : true)),\n )\n\nconst formatList = (items: ReadonlyArray<string>, limit = 10): string => {\n if (items.length === 0) return ''\n if (items.length <= limit) return items.join(', ')\n return `${items.slice(0, limit).join(', ')}, …(+${items.length - limit})`\n}\n\nconst emitDepsMismatch = (params: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'computed' | 'source'\n readonly fieldPath: string\n readonly diff: DepsTrace.DepsDiff\n}): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const key = `${params.instanceId ?? 'unknown'}::${params.kind}::${params.fieldPath}`\n const shouldEmit = yield* onceInRunSession(`deps_mismatch:${key}`)\n if (!shouldEmit) return\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: params.moduleId,\n instanceId: params.instanceId,\n code: 'state_trait::deps_mismatch',\n severity: 'warning',\n message:\n `[deps] ${params.kind} \"${params.fieldPath}\" declared=[${formatList(params.diff.declared)}] ` +\n `reads=[${formatList(params.diff.reads)}] missing=[${formatList(params.diff.missing)}] ` +\n `unused=[${formatList(params.diff.unused)}]`,\n hint:\n 'deps is the single source of truth for dependencies: incremental scheduling / reverse closures / performance optimizations rely on deps only. ' +\n 'Keep deps consistent with actual reads; if you really depend on the whole object, declare a coarser-grained dep (e.g. \"profile\") to cover sub-field reads.',\n kind: `deps_mismatch:${params.kind}`,\n })\n })\n\nconst getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack, never, any> =>\n Effect.serviceOption(EffectOpCore.EffectOpMiddlewareTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\nconst recordTraitPatch = (\n bound: BoundApi<any, any>,\n path: string,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n): void => {\n const normalized = normalizeFieldPath(path) ?? []\n try {\n const internals = getBoundInternals(bound as any)\n internals.txn.recordStatePatch(normalized, reason, from, to, traitNodeId)\n } catch {\n // no-op for legacy/mocked bound\n }\n}\n\nconst recordReplayEvent = (bound: BoundApi<any, any>, event: ReplayLog.ReplayLogEvent): void => {\n try {\n const internals = getBoundInternals(bound as any)\n internals.txn.recordReplayEvent(event)\n } catch {\n // no-op for legacy/mocked bound\n }\n}\n\nconst getBoundScope = (bound: BoundApi<any, any>): { readonly moduleId?: string; readonly instanceId?: string } => {\n try {\n const internals = getBoundInternals(bound as any)\n return { moduleId: internals.moduleId, instanceId: internals.instanceId }\n } catch {\n return { moduleId: undefined, instanceId: undefined }\n }\n}\n\nconst setSnapshotInTxn = (\n bound: BoundApi<any, any>,\n fieldPath: string,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n traitNodeId?: string,\n): Effect.Effect<boolean, never, any> =>\n Effect.gen(function* () {\n let wrote = false\n yield* bound.state.mutate((draft) => {\n const prev = RowId.getAtPath(draft, fieldPath)\n if (Object.is(prev, next)) return\n wrote = true\n RowId.setAtPathMutating(draft, fieldPath, next)\n recordTraitPatch(bound, fieldPath, reason, prev, next, traitNodeId)\n })\n return wrote\n })\n\nconst writebackIfCurrentKeyHash = (\n bound: BoundApi<any, any>,\n fieldPath: string,\n keyHash: string,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n traitNodeId?: string,\n replayEvent?: ReplayLog.ReplayLogEvent,\n): Effect.Effect<boolean, never, any> =>\n Effect.gen(function* () {\n let wrote = false\n yield* bound.state.mutate((draft) => {\n const current = RowId.getAtPath(draft, fieldPath)\n const currentKeyHash = current && typeof current === 'object' ? (current as any).keyHash : undefined\n if (currentKeyHash !== keyHash) return\n\n const prev = current\n if (Object.is(prev, next)) return\n\n wrote = true\n RowId.setAtPathMutating(draft, fieldPath, next)\n if (replayEvent) {\n recordReplayEvent(bound, replayEvent)\n }\n recordTraitPatch(bound, fieldPath, reason, prev, next, traitNodeId)\n })\n return wrote\n })\n\n/**\n * syncIdleInTransaction:\n * - Synchronously evaluate all source.key(state) within the transaction window.\n * - If a key becomes empty (undefined), synchronously reset the field to an idle snapshot (avoid tearing).\n */\nexport const syncIdleInTransaction = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: SourceSyncContext<S>,\n): Effect.Effect<void> =>\n Effect.sync(() => {\n const draft = ctx.getDraft() as any\n const updates: Array<{ readonly fieldPath: string; readonly prev: unknown }> = []\n\n for (const entry of program.entries) {\n if (entry.kind !== 'source') continue\n const fieldPath = entry.fieldPath\n const listItem = RowId.parseListItemFieldPath(fieldPath)\n\n if (listItem) {\n // list.item scope: evaluate key per row by index, and synchronously write back idle for inactive rows.\n const listValue = RowId.getAtPath(draft, listItem.listPath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n\n for (let index = 0; index < items.length; index++) {\n const item = items[index]\n\n let key: unknown\n try {\n key = (entry.meta as any).key(item)\n } catch {\n continue\n }\n\n if (key !== undefined) continue\n\n const concretePath = RowId.toListItemValuePath(listItem.listPath, index, listItem.itemPath)\n const prev = RowId.getAtPath(draft, concretePath)\n const prevStatus = prev && typeof prev === 'object' ? (prev as any).status : undefined\n if (prevStatus === 'idle') {\n const data = (prev as any)?.data\n const error = (prev as any)?.error\n if (data === undefined && error === undefined) {\n continue\n }\n }\n\n updates.push({ fieldPath: concretePath, prev })\n }\n\n continue\n }\n\n let key: unknown\n try {\n key = (entry.meta as any).key(draft)\n } catch {\n continue\n }\n\n if (key !== undefined) continue\n\n const prev = RowId.getAtPath(draft, fieldPath)\n const prevStatus = prev && typeof prev === 'object' ? (prev as any).status : undefined\n if (prevStatus === 'idle') {\n // Still ensure data/error are cleared.\n const data = (prev as any)?.data\n const error = (prev as any)?.error\n if (data === undefined && error === undefined) {\n continue\n }\n }\n\n updates.push({ fieldPath, prev })\n }\n\n if (updates.length === 0) return\n\n const reason: PatchReason = 'source-refresh'\n\n const nextDraft = create(draft, (next) => {\n for (const u of updates) {\n RowId.setAtPathMutating(next, u.fieldPath, Snapshot.idle())\n }\n })\n\n ctx.setDraft(nextDraft as S)\n\n for (const u of updates) {\n const normalized = normalizeFieldPath(u.fieldPath) ?? []\n ctx.recordPatch(normalized, reason, u.prev, Snapshot.idle(), `source:${u.fieldPath}:idle`)\n }\n })\n\n/**\n * installSourceRefresh:\n * - Register the refresh implementation for a single source field (ResourceSnapshot + keyHash gate + concurrency).\n */\nexport const installSourceRefresh = <S>(\n bound: BoundApi<any, any>,\n step: StateTraitPlanStep,\n entry: Extract<StateTraitEntry<S, string>, { readonly kind: 'source' }>,\n): Effect.Effect<void, never, any> => {\n if (!step.targetFieldPath) return Effect.void\n\n const fieldPath = step.targetFieldPath\n const resourceId = step.resourceId ?? entry.meta.resource\n const listItem = RowId.parseListItemFieldPath(fieldPath)\n\n let internals: ReturnType<typeof getBoundInternals> | undefined\n try {\n internals = getBoundInternals(bound as any)\n } catch {\n return Effect.void\n }\n\n const register = internals.traits.registerSourceRefresh\n\n const recordSnapshot = (\n replayMode: 'live' | 'replay',\n replayLog: ReplayLog.ReplayLogService | undefined,\n input:\n | ReplayLog.ReplayLogEvent\n | {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly concurrency?: string\n readonly phase: ReplayLog.ResourceSnapshotPhase\n readonly snapshot: unknown\n },\n ): Effect.Effect<void, never, any> => {\n if (!replayLog) return Effect.void\n if (replayMode !== 'live') return Effect.void\n const event: ReplayLog.ReplayLogEvent =\n input && typeof input === 'object' && (input as any)._tag === 'ResourceSnapshot'\n ? (input as ReplayLog.ReplayLogEvent)\n : {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: (input as any).fieldPath,\n keyHash: (input as any).keyHash,\n concurrency: (input as any).concurrency,\n phase: (input as any).phase,\n snapshot: (input as any).snapshot,\n timestamp: Date.now(),\n moduleId: (input as any).moduleId,\n instanceId: (input as any).instanceId,\n }\n return replayLog.record(event)\n }\n\n // list.item scope: in-flight gating by RowID (avoid writing to the wrong row under insert/remove/reorder).\n if (listItem) {\n const store = internals.traits.rowIdStore as RowId.RowIdStore | undefined\n if (!store) {\n return Effect.void\n }\n\n const listPath = listItem.listPath\n const itemPath = listItem.itemPath\n if (!itemPath) {\n // Never write the snapshot back to the whole item (it would overwrite business values).\n return Effect.void\n }\n\n const concurrency = (entry.meta as any).concurrency as 'switch' | 'exhaust-trailing' | undefined\n const mode = concurrency ?? 'switch'\n\n const inFlight = new Map<\n RowId.RowId,\n {\n readonly gen: number\n readonly fiber: Fiber.RuntimeFiber<void, never>\n readonly keyHash: string\n }\n >()\n const trailing = new Map<RowId.RowId, { readonly key: unknown; readonly keyHash: string }>()\n let gen = 0\n\n // When a row is removed: clear trailing/inFlight references to avoid wrong attribution or memory leaks.\n store.onRemoved(listPath, (rowId) => {\n trailing.delete(rowId)\n inFlight.delete(rowId)\n })\n\n const setSnapshotForRowInTxn = (\n rowId: RowId.RowId,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n ): Effect.Effect<string | undefined, never, any> =>\n Effect.gen(function* () {\n let wrotePath: string | undefined\n yield* bound.state.mutate((draft) => {\n const index = store.getIndex(listPath, rowId)\n if (index === undefined) return\n const concretePath = RowId.toListItemValuePath(listPath, index, itemPath)\n const prev = RowId.getAtPath(draft, concretePath)\n if (Object.is(prev, next)) return\n wrotePath = concretePath\n RowId.setAtPathMutating(draft, concretePath, next)\n recordTraitPatch(bound, concretePath, reason, prev, next, step.debugInfo?.graphNodeId)\n })\n return wrotePath\n })\n\n const writebackIfCurrentKeyHashForRow = (\n rowId: RowId.RowId,\n keyHash: string,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n phase?: ReplayLog.ResourceSnapshotPhase,\n ): Effect.Effect<string | undefined, never, any> =>\n Effect.gen(function* () {\n let wrotePath: string | undefined\n yield* bound.state.mutate((draft) => {\n const index = store.getIndex(listPath, rowId)\n if (index === undefined) return\n const concretePath = RowId.toListItemValuePath(listPath, index, itemPath)\n\n const current = RowId.getAtPath(draft, concretePath)\n const currentKeyHash = current && typeof current === 'object' ? (current as any).keyHash : undefined\n if (currentKeyHash !== keyHash) return\n\n const prev = current\n if (Object.is(prev, next)) return\n\n wrotePath = concretePath\n RowId.setAtPathMutating(draft, concretePath, next)\n if (phase) {\n const { moduleId, instanceId } = getBoundScope(bound)\n recordReplayEvent(bound, {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: concretePath,\n keyHash,\n concurrency: mode,\n phase,\n snapshot: next,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n })\n }\n recordTraitPatch(bound, concretePath, reason, prev, next, step.debugInfo?.graphNodeId)\n })\n return wrotePath\n })\n\n const startFetch = (\n rowId: RowId.RowId,\n key: unknown,\n keyHash: string,\n replayMode: 'live' | 'replay',\n replayLog: ReplayLog.ReplayLogService | undefined,\n ): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n\n const indexForLog = store.getIndex(listPath, rowId)\n const logFieldPath =\n indexForLog === undefined ? undefined : RowId.toListItemValuePath(listPath, indexForLog, itemPath)\n\n let loadingSnapshot: unknown = Snapshot.loading({ keyHash })\n if (replayMode === 'replay' && replayLog && logFieldPath) {\n const replayLoading = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath: logFieldPath,\n keyHash,\n phase: 'loading',\n })\n if (replayLoading) {\n loadingSnapshot = replayLoading.snapshot\n }\n }\n const wroteLoadingPath = yield* setSnapshotForRowInTxn(\n rowId,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:loading`,\n )\n if (wroteLoadingPath) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteLoadingPath,\n keyHash,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n\n const io = Effect.gen(function* () {\n if (replayMode === 'replay' && replayLog) {\n // Let loading commit become visible first, then replay the settled phase (preserve the async-resource timeline shape).\n yield* Effect.yieldNow()\n const consumePath = wroteLoadingPath ?? logFieldPath\n if (!consumePath) return yield* Effect.void\n\n const replayed = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath: consumePath,\n keyHash,\n })\n if (!replayed) return yield* Effect.void\n\n if (replayed.phase === 'success') {\n yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:success`,\n 'success',\n )\n } else if (replayed.phase === 'error') {\n yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:error`,\n 'error',\n )\n }\n\n return yield* Effect.void\n }\n\n const stack = yield* getMiddlewareStack()\n\n const registryOpt = yield* Effect.serviceOption(ResourceInternal.ResourceRegistryTag)\n const registry = Option.isSome(registryOpt) ? registryOpt.value : undefined\n const spec = registry?.specs.get(resourceId)\n\n if (!spec) {\n return yield* Effect.void\n }\n\n const loadEffect = (spec.load as any)(key) as Effect.Effect<any, any, any>\n\n const meta: any = {\n moduleId,\n instanceId,\n fieldPath,\n resourceId,\n key,\n keyHash,\n rowId,\n traitNodeId: step.debugInfo?.graphNodeId,\n stepId: step.id,\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const seqKey = instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', seqKey)\n }\n }\n\n const op = EffectOp.make<any, any, any>({\n kind: 'service',\n name: resourceId,\n effect: loadEffect,\n meta,\n })\n\n const exit = yield* Effect.exit(EffectOp.run(op, stack))\n\n if (exit._tag === 'Success') {\n const successSnapshot = Snapshot.success({ keyHash, data: exit.value })\n const wroteSuccessPath = yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n successSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:success`,\n 'success',\n )\n if (wroteSuccessPath) {\n yield* recordSnapshot(replayMode, replayLog, {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteSuccessPath,\n keyHash,\n concurrency: mode,\n phase: 'success',\n snapshot: successSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n })\n }\n } else {\n const errorSnapshot = Snapshot.error({ keyHash, error: exit.cause })\n const wroteErrorPath = yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n errorSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:error`,\n 'error',\n )\n if (wroteErrorPath) {\n yield* recordSnapshot(replayMode, replayLog, {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteErrorPath,\n keyHash,\n concurrency: mode,\n phase: 'error',\n snapshot: errorSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n })\n }\n }\n }).pipe(Effect.catchAllCause(() => Effect.void))\n\n // list.item: IO fibers must detach from the sync-transaction FiberRef; otherwise they'd be misclassified as \"in txn window\"\n // and block subsequent writeback entrypoints.\n const fiber = yield* Effect.forkScoped(Effect.locally(TaskRunner.inSyncTransactionFiber, false)(io))\n const myGen = (gen += 1)\n inFlight.set(rowId, { gen: myGen, fiber, keyHash })\n\n yield* Effect.forkScoped(\n Effect.locally(\n TaskRunner.inSyncTransactionFiber,\n false,\n )(\n Fiber.await(fiber).pipe(\n Effect.zipRight(\n Effect.sync(() => {\n const current = inFlight.get(rowId)\n if (current && current.gen === myGen) {\n inFlight.delete(rowId)\n }\n }),\n ),\n Effect.zipRight(\n mode === 'exhaust-trailing'\n ? Effect.gen(function* () {\n const next = trailing.get(rowId)\n trailing.delete(rowId)\n if (next) {\n yield* startFetch(rowId, next.key, next.keyHash, replayMode, replayLog)\n }\n })\n : Effect.void,\n ),\n Effect.catchAllCause(() => Effect.void),\n ),\n ),\n )\n })\n\n register(fieldPath, (state: any) =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n const replayModeOpt = yield* Effect.serviceOption(ReplayModeConfigTag)\n const replayMode = Option.isSome(replayModeOpt) ? replayModeOpt.value.mode : 'live'\n const replayLogOpt = yield* Effect.serviceOption(ReplayLog.ReplayLog)\n const replayLog = Option.isSome(replayLogOpt) ? replayLogOpt.value : undefined\n const force = yield* FiberRef.get(TaskRunner.forceSourceRefresh)\n\n const listValue = RowId.getAtPath(state, listPath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n const ids = store.ensureList(listPath, items)\n\n // dev-mode: trace deps once for the first row (diagnostics only; does not affect execution semantics).\n const traceKey = `${instanceId ?? 'unknown'}::source::${fieldPath}`\n if (isDevEnv() && (yield* onceInRunSession(`deps_trace_settled:${traceKey}`))) {\n try {\n const sample = items[0]\n if (sample !== undefined) {\n const traced = DepsTrace.trace((s) => (entry.meta as any).key(s), sample as any)\n const prefixedReads = traced.reads.map((r) => (r ? `${listPath}[].${r}` : `${listPath}[]`))\n const diff = DepsTrace.diffDeps(((entry.meta as any).deps ?? []) as ReadonlyArray<string>, prefixedReads)\n if (diff) {\n yield* emitDepsMismatch({\n moduleId,\n instanceId,\n kind: 'source',\n fieldPath,\n diff,\n })\n }\n }\n } catch {\n // tracing failure should never break refresh flow\n }\n }\n\n for (let index = 0; index < items.length; index++) {\n const rowId = ids[index]\n if (!rowId) continue\n\n const concretePath = RowId.toListItemValuePath(listPath, index, itemPath)\n const prevSnapshot = RowId.getAtPath(state, concretePath) as any\n\n let key: unknown\n try {\n key = (entry.meta as any).key(items[index])\n } catch {\n key = undefined\n }\n\n const current = inFlight.get(rowId)\n\n if (key === undefined) {\n trailing.delete(rowId)\n inFlight.delete(rowId)\n\n // If it's already clean idle, avoid redundant writeback (prevents meaningless patches and UI jitter).\n if (\n prevSnapshot &&\n typeof prevSnapshot === 'object' &&\n prevSnapshot.status === 'idle' &&\n prevSnapshot.data === undefined &&\n prevSnapshot.error === undefined\n ) {\n continue\n }\n\n const idleSnapshot = Snapshot.idle()\n const wroteIdlePath = yield* setSnapshotForRowInTxn(\n rowId,\n idleSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:idle`,\n )\n if (wroteIdlePath) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteIdlePath,\n keyHash: undefined,\n concurrency: mode,\n phase: 'idle',\n snapshot: idleSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n continue\n }\n\n const h = hashKey(key)\n\n // keyHash unchanged: avoid redundant refresh while keeping in-flight.\n if (!force && current && current.keyHash === h) {\n continue\n }\n\n // Not in-flight: if snapshot.keyHash already matches, treat it as already up-to-date (avoid full refresh and row jitter).\n const prevKeyHash =\n prevSnapshot && typeof prevSnapshot === 'object' ? (prevSnapshot as any).keyHash : undefined\n if (!force && !current && prevKeyHash === h) {\n continue\n }\n\n if (mode === 'exhaust-trailing' && current) {\n trailing.set(rowId, { key, keyHash: h })\n const loadingSnapshot = Snapshot.loading({ keyHash: h })\n const wroteLoadingPath = yield* setSnapshotForRowInTxn(\n rowId,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:loading`,\n )\n if (wroteLoadingPath) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteLoadingPath,\n keyHash: h,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n continue\n }\n\n if (mode === 'switch' && current) {\n // Do not rely on cancellation correctness: stale writebacks are dropped by the keyHash gate.\n trailing.delete(rowId)\n inFlight.delete(rowId)\n }\n\n yield* startFetch(rowId, key, h, replayMode, replayLog)\n }\n }),\n )\n\n return Effect.void\n }\n\n // in-flight state (per field)\n let inFlight:\n | {\n readonly gen: number\n readonly fiber: Fiber.RuntimeFiber<void, never>\n readonly keyHash: string\n }\n | undefined\n let gen = 0\n let trailing: { readonly key: unknown; readonly keyHash: string } | undefined\n\n const concurrency = (entry.meta as any).concurrency as 'switch' | 'exhaust-trailing' | undefined\n const mode = concurrency ?? 'switch'\n\n const startFetch = (\n key: unknown,\n keyHash: string,\n replayMode: 'live' | 'replay',\n replayLog: ReplayLog.ReplayLogService | undefined,\n ): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n\n // 1) pending: synchronously write a loading snapshot (within the current transaction window).\n let loadingSnapshot: unknown = Snapshot.loading({ keyHash })\n if (replayMode === 'replay' && replayLog) {\n const replayLoading = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath,\n keyHash,\n phase: 'loading',\n })\n if (replayLoading) {\n loadingSnapshot = replayLoading.snapshot\n }\n }\n const wroteLoading = yield* setSnapshotInTxn(\n bound,\n fieldPath,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:loading`,\n step.debugInfo?.graphNodeId,\n )\n if (wroteLoading) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n\n // 2) IO: run in a background fiber (avoid blocking the current transaction).\n const io = Effect.gen(function* () {\n if (replayMode === 'replay' && replayLog) {\n // Let loading commit become visible first, then replay the settled phase (preserve the async-resource timeline shape).\n yield* Effect.yieldNow()\n const replayed = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath,\n keyHash,\n })\n if (!replayed) return yield* Effect.void\n\n if (replayed.phase === 'success') {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'success',\n snapshot: replayed.snapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:success`,\n step.debugInfo?.graphNodeId,\n event,\n )\n } else if (replayed.phase === 'error') {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'error',\n snapshot: replayed.snapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:error`,\n step.debugInfo?.graphNodeId,\n event,\n )\n }\n\n return yield* Effect.void\n }\n\n const stack = yield* getMiddlewareStack()\n\n const registryOpt = yield* Effect.serviceOption(ResourceInternal.ResourceRegistryTag)\n const registry = Option.isSome(registryOpt) ? registryOpt.value : undefined\n const spec = registry?.specs.get(resourceId)\n\n if (!spec) {\n return yield* Effect.void\n }\n\n const loadEffect = (spec.load as any)(key) as Effect.Effect<any, any, any>\n\n const meta: any = {\n moduleId,\n instanceId,\n fieldPath,\n resourceId,\n key,\n keyHash,\n traitNodeId: step.debugInfo?.graphNodeId,\n stepId: step.id,\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const seqKey = instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', seqKey)\n }\n }\n\n const op = EffectOp.make<any, any, any>({\n kind: 'trait-source',\n name: resourceId,\n effect: loadEffect,\n meta,\n })\n\n const exit = yield* Effect.exit(EffectOp.run(op, stack))\n\n // 3) writeback: use a keyHash gate to prevent stale results from writing back onto a new key.\n if (exit._tag === 'Success') {\n const successSnapshot = Snapshot.success({ keyHash, data: exit.value })\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'success',\n snapshot: successSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n const wroteSuccess = yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n successSnapshot,\n 'source-refresh',\n `source:${fieldPath}:success`,\n step.debugInfo?.graphNodeId,\n event,\n )\n if (wroteSuccess) {\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n } else {\n const errorSnapshot = Snapshot.error({ keyHash, error: exit.cause })\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'error',\n snapshot: errorSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n const wroteError = yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n errorSnapshot,\n 'source-refresh',\n `source:${fieldPath}:error`,\n step.debugInfo?.graphNodeId,\n event,\n )\n if (wroteError) {\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n }\n }).pipe(Effect.catchAllCause(() => Effect.void))\n\n // Do not wait for IO completion: forkScoped into the runtime scope so unmount will interrupt automatically.\n const fiber = yield* Effect.forkScoped(Effect.locally(TaskRunner.inSyncTransactionFiber, false)(io))\n const myGen = (gen += 1)\n inFlight = { gen: myGen, fiber, keyHash }\n\n // After in-flight completes, clean up; in exhaust-trailing mode, run one trailing fetch if present.\n yield* Effect.forkScoped(\n Effect.locally(\n TaskRunner.inSyncTransactionFiber,\n false,\n )(\n Fiber.await(fiber).pipe(\n Effect.zipRight(\n Effect.sync(() => {\n if (inFlight && inFlight.gen === myGen) {\n inFlight = undefined\n }\n }),\n ),\n Effect.zipRight(\n mode === 'exhaust-trailing'\n ? Effect.gen(function* () {\n const next = trailing\n trailing = undefined\n if (next) {\n yield* startFetch(next.key, next.keyHash, replayMode, replayLog)\n }\n })\n : Effect.void,\n ),\n Effect.catchAllCause(() => Effect.void),\n ),\n ),\n )\n })\n\n register(fieldPath, (state: any) =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n const replayModeOpt = yield* Effect.serviceOption(ReplayModeConfigTag)\n const replayMode = Option.isSome(replayModeOpt) ? replayModeOpt.value.mode : 'live'\n const replayLogOpt = yield* Effect.serviceOption(ReplayLog.ReplayLog)\n const replayLog = Option.isSome(replayLogOpt) ? replayLogOpt.value : undefined\n const force = yield* FiberRef.get(TaskRunner.forceSourceRefresh)\n\n let key: unknown\n try {\n key = (entry.meta as any).key(state)\n } catch {\n key = undefined\n }\n\n // dev-mode: detect mismatch between actual reads in keySelector and declared deps (diagnostics only; does not affect execution semantics).\n const traceKey = `${instanceId ?? 'unknown'}::source::${fieldPath}`\n if (isDevEnv() && (yield* onceInRunSession(`deps_trace_settled:${traceKey}`))) {\n try {\n const traced = DepsTrace.trace((s) => (entry.meta as any).key(s), state)\n const diff = DepsTrace.diffDeps(((entry.meta as any).deps ?? []) as ReadonlyArray<string>, traced.reads)\n if (diff) {\n yield* emitDepsMismatch({\n moduleId,\n instanceId,\n kind: 'source',\n fieldPath,\n diff,\n })\n }\n } catch {\n // tracing failure should never break refresh flow\n }\n }\n\n // Key becomes empty: synchronously clear to idle (and interrupt in-flight).\n if (key === undefined) {\n if (inFlight) {\n yield* Fiber.interruptFork(inFlight.fiber)\n inFlight = undefined\n }\n trailing = undefined\n\n const idleSnapshot = Snapshot.idle()\n const wroteIdle = yield* setSnapshotInTxn(\n bound,\n fieldPath,\n idleSnapshot,\n 'source-refresh',\n `source:${fieldPath}:idle`,\n step.debugInfo?.graphNodeId,\n )\n if (wroteIdle) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash: undefined,\n concurrency: mode,\n phase: 'idle',\n snapshot: idleSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n return\n }\n\n const h = hashKey(key)\n\n // Default semantics: when a non-idle snapshot already exists for the same keyHash, refresh should be a no-op when possible\n // (avoid duplicate IO/writeback). Explicit refresh/invalidate can bypass via force.\n if (!force) {\n if (inFlight && inFlight.keyHash === h) {\n return\n }\n\n const currentSnapshot = RowId.getAtPath(state, fieldPath) as any\n const currentKeyHash =\n currentSnapshot && typeof currentSnapshot === 'object' ? (currentSnapshot as any).keyHash : undefined\n const currentStatus =\n currentSnapshot && typeof currentSnapshot === 'object' ? (currentSnapshot as any).status : undefined\n if (currentStatus && currentStatus !== 'idle' && currentKeyHash === h) {\n return\n }\n }\n\n if (mode === 'exhaust-trailing' && inFlight) {\n // Busy: record trailing and update loading immediately; stale in-flight writebacks will be blocked by the keyHash gate.\n trailing = { key, keyHash: h }\n const loadingSnapshot = Snapshot.loading({ keyHash: h })\n const wroteLoading = yield* setSnapshotInTxn(\n bound,\n fieldPath,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:loading`,\n step.debugInfo?.graphNodeId,\n )\n if (wroteLoading) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash: h,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n return\n }\n\n if (mode === 'switch' && inFlight) {\n yield* Fiber.interruptFork(inFlight.fiber)\n inFlight = undefined\n trailing = undefined\n }\n\n // start fetch (pending tx + fork IO)\n yield* startFetch(key, h, replayMode, replayLog)\n }),\n )\n\n return Effect.void\n}\n","import { Context, Layer } from 'effect'\nimport { isDevEnv } from './runtime/core/env.js'\n\nexport interface ResourceSpec<Key, Out, Err, Env> {\n readonly id: string\n readonly keySchema: import('effect').Schema.Schema<Key, any>\n readonly load: (key: Key) => import('effect').Effect.Effect<Out, Err, Env>\n readonly meta?: {\n readonly cacheGroup?: string\n readonly description?: string\n readonly [k: string]: unknown\n }\n}\n\nexport type AnyResourceSpec = ResourceSpec<any, any, any, any>\n\nexport type ResourceStatus = 'idle' | 'loading' | 'success' | 'error'\n\nexport interface ResourceSnapshot<Data = unknown, Err = unknown> {\n readonly status: ResourceStatus\n readonly keyHash?: string\n readonly data?: Data\n readonly error?: Err\n}\n\nconst stableStringify = (value: unknown): string => {\n const seen = new WeakSet<object>()\n const encode = (input: unknown): unknown => {\n if (input === null) return null\n if (typeof input === 'string' || typeof input === 'number' || typeof input === 'boolean') {\n return input\n }\n if (typeof input === 'bigint') return input.toString()\n if (typeof input === 'undefined') return '__undefined__'\n if (typeof input === 'symbol') return `__symbol__:${String(input)}`\n if (typeof input === 'function') return '__function__'\n\n if (Array.isArray(input)) {\n return input.map((v) => encode(v))\n }\n if (input instanceof Date) {\n return `__date__:${input.toISOString()}`\n }\n if (input instanceof Error) {\n return {\n _tag: 'Error',\n name: input.name,\n message: input.message,\n }\n }\n if (input && typeof input === 'object') {\n const obj = input as object\n if (seen.has(obj)) return '__cycle__'\n seen.add(obj)\n\n const record = input as Record<string, unknown>\n const keys = Object.keys(record).sort()\n const out: Record<string, unknown> = {}\n for (const k of keys) {\n out[k] = encode(record[k])\n }\n return out\n }\n return String(input)\n }\n\n try {\n return JSON.stringify(encode(value))\n } catch {\n return String(value)\n }\n}\n\nexport const keyHash = (key: unknown): string => stableStringify(key)\n\nexport const Snapshot = {\n idle: <Data = never, Err = never>(): ResourceSnapshot<Data, Err> => ({\n status: 'idle',\n keyHash: undefined,\n data: undefined,\n error: undefined,\n }),\n loading: <Data = never, Err = never>(params: { readonly keyHash: string }): ResourceSnapshot<Data, Err> => ({\n status: 'loading',\n keyHash: params.keyHash,\n data: undefined,\n error: undefined,\n }),\n success: <Data>(params: { readonly keyHash: string; readonly data: Data }): ResourceSnapshot<Data, never> => ({\n status: 'success',\n keyHash: params.keyHash,\n data: params.data,\n error: undefined,\n }),\n error: <Err>(params: { readonly keyHash: string; readonly error: Err }): ResourceSnapshot<never, Err> => ({\n status: 'error',\n keyHash: params.keyHash,\n data: undefined,\n error: params.error,\n }),\n} as const\n\nexport interface ResourceRegistry {\n readonly specs: ReadonlyMap<string, AnyResourceSpec>\n}\n\nexport class ResourceRegistryTag extends Context.Tag('@logixjs/core/ResourceRegistry')<\n ResourceRegistryTag,\n ResourceRegistry\n>() {}\n\nexport const internal = {\n ResourceRegistryTag,\n}\n\nexport type Spec<Key, Out, Err, Env> = ResourceSpec<Key, Out, Err, Env>\n\nexport const make = <Key, Out, Err, Env>(spec: ResourceSpec<Key, Out, Err, Env>): ResourceSpec<Key, Out, Err, Env> =>\n spec\n\nexport const layer = (specs: ReadonlyArray<AnyResourceSpec>): Layer.Layer<ResourceRegistryTag, never, never> =>\n Layer.succeed(\n ResourceRegistryTag,\n (() => {\n const map = new Map<string, AnyResourceSpec>()\n for (const spec of specs) {\n if (isDevEnv() && map.has(spec.id) && map.get(spec.id) !== spec) {\n throw new Error(`[Resource.layer] Duplicate resource id \"${spec.id}\" detected in the same runtime scope`)\n }\n map.set(spec.id, spec)\n }\n return { specs: map }\n })(),\n )\n","import { Context, Effect, Layer } from 'effect'\n\nexport type ResourceSnapshotPhase = 'idle' | 'loading' | 'success' | 'error'\n\nexport type ReplayLogEvent =\n | {\n readonly _tag: 'ResourceSnapshot'\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n /**\n * Optional: source concurrency policy (e.g. \"switch\" / \"exhaust-trailing\").\n * - Must remain slim & serializable.\n * - Used by Devtools/replay to explain why old results are dropped / why trailing happens.\n */\n readonly concurrency?: string\n readonly phase: ResourceSnapshotPhase\n readonly snapshot: unknown\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n }\n | {\n readonly _tag: 'InvalidateRequest'\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'resource' | 'query'\n readonly target: string\n readonly meta?: unknown\n }\n\nexport type ResourceSnapshotEvent = Extract<ReplayLogEvent, { readonly _tag: 'ResourceSnapshot' }>\n\nexport interface ReplayLogService {\n readonly record: (event: ReplayLogEvent) => Effect.Effect<void>\n readonly snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>>\n readonly resetCursor: Effect.Effect<void>\n readonly consumeNext: (predicate: (event: ReplayLogEvent) => boolean) => Effect.Effect<ReplayLogEvent | undefined>\n readonly consumeNextResourceSnapshot: (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }) => Effect.Effect<ResourceSnapshotEvent | undefined>\n}\n\nexport class ReplayLog extends Context.Tag('@logixjs/core/ReplayLog')<ReplayLog, ReplayLogService>() {}\n\nexport const make = (initial?: ReadonlyArray<ReplayLogEvent>): ReplayLogService => {\n const events: Array<ReplayLogEvent> = initial ? Array.from(initial) : []\n let cursor = 0\n\n const consumeNext = (predicate: (event: ReplayLogEvent) => boolean): Effect.Effect<ReplayLogEvent | undefined> =>\n Effect.sync(() => {\n for (let i = cursor; i < events.length; i++) {\n const event = events[i]\n if (!predicate(event)) continue\n cursor = i + 1\n return event\n }\n return undefined\n })\n\n const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }): Effect.Effect<ResourceSnapshotEvent | undefined> =>\n consumeNext((event): event is ResourceSnapshotEvent => {\n if (event._tag !== 'ResourceSnapshot') return false\n if (event.resourceId !== params.resourceId) return false\n if (event.fieldPath !== params.fieldPath) return false\n if (params.keyHash !== undefined && event.keyHash !== params.keyHash) {\n return false\n }\n if (params.phase !== undefined && event.phase !== params.phase) {\n return false\n }\n return true\n }).pipe(Effect.map((event) => event as ResourceSnapshotEvent | undefined))\n\n return {\n record: (event) => Effect.sync(() => events.push(event)),\n snapshot: Effect.sync(() => events.slice()),\n resetCursor: Effect.sync(() => {\n cursor = 0\n }),\n consumeNext,\n consumeNextResourceSnapshot,\n }\n}\n\nexport const layer = (initial?: ReadonlyArray<ReplayLogEvent>): Layer.Layer<ReplayLog, never, never> =>\n Layer.succeed(ReplayLog, make(initial))\n\nexport const record = (event: ReplayLogEvent): Effect.Effect<void, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.record(event)\n })\n\nexport const snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.snapshot\n})\n\nexport const resetCursor: Effect.Effect<void, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.resetCursor\n})\n\nexport const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n}): Effect.Effect<ResourceSnapshotEvent | undefined, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.consumeNextResourceSnapshot(params)\n })\n","import { Effect, Option } from 'effect'\nimport type { ConcurrencyDiagnostics } from './ConcurrencyDiagnostics.js'\nimport {\n ConcurrencyPolicyOverridesTag,\n ConcurrencyPolicyTag,\n type ConcurrencyLimit,\n type ConcurrencyPolicy,\n type ConcurrencyPolicyOverrides,\n type ConcurrencyPolicyPatch,\n} from './env.js'\nimport { normalizeBoolean, normalizePositiveInt, normalizePositiveNumber } from './normalize.js'\n\nexport type ConcurrencyPolicyConfigScope = 'builtin' | 'runtime_default' | 'runtime_module' | 'provider'\n\nexport type ResolvedConcurrencyPolicy = {\n readonly concurrencyLimit: ConcurrencyLimit\n readonly losslessBackpressureCapacity: number\n readonly allowUnbounded: boolean\n readonly pressureWarningThreshold: {\n readonly backlogCount: number\n readonly backlogDurationMs: number\n }\n readonly warningCooldownMs: number\n readonly configScope: ConcurrencyPolicyConfigScope\n /** Field-level scope for the effective concurrency limit. */\n readonly concurrencyLimitScope: ConcurrencyPolicyConfigScope\n /** The originally requested concurrency limit (for explaining the unbounded gate). */\n readonly requestedConcurrencyLimit: ConcurrencyLimit\n /** Field-level scope for the originally requested concurrency limit. */\n readonly requestedConcurrencyLimitScope: ConcurrencyPolicyConfigScope\n /** Field-level scope for allowUnbounded. */\n readonly allowUnboundedScope: ConcurrencyPolicyConfigScope\n}\n\nconst normalizeConcurrencyLimit = (v: unknown): ConcurrencyLimit | undefined =>\n v === 'unbounded' ? 'unbounded' : normalizePositiveInt(v)\n\nexport const makeResolveConcurrencyPolicy = (args: {\n /** Original options.moduleId (may be undefined); used for module overrides map lookup. */\n readonly moduleId: string | undefined\n /** Optional: one-shot audit diagnostics for unbounded opt-in/blocked. */\n readonly diagnostics?: ConcurrencyDiagnostics\n}): (() => Effect.Effect<ResolvedConcurrencyPolicy>) => {\n const builtinConcurrencyLimit: ConcurrencyLimit = 16\n const builtinLosslessBackpressureCapacity = 4096\n const builtinAllowUnbounded = false\n const builtinThresholdBacklogCount = 1000\n const builtinThresholdBacklogDurationMs = 5000\n const builtinWarningCooldownMs = 30_000\n\n return () =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(ConcurrencyPolicyTag)\n const overridesOpt = yield* Effect.serviceOption(ConcurrencyPolicyOverridesTag)\n\n const runtimeConfig: ConcurrencyPolicy | undefined = Option.isSome(runtimeConfigOpt)\n ? runtimeConfigOpt.value\n : undefined\n const providerOverrides: ConcurrencyPolicyOverrides | undefined = Option.isSome(overridesOpt)\n ? overridesOpt.value\n : undefined\n\n let concurrencyLimit: ConcurrencyLimit = builtinConcurrencyLimit\n let concurrencyLimitScope: ConcurrencyPolicyConfigScope = 'builtin'\n let lastBoundedConcurrencyLimit = builtinConcurrencyLimit as number\n let lastBoundedConcurrencyLimitScope: ConcurrencyPolicyConfigScope = 'builtin'\n\n let losslessBackpressureCapacity = builtinLosslessBackpressureCapacity\n let allowUnbounded = builtinAllowUnbounded\n let allowUnboundedScope: ConcurrencyPolicyConfigScope = 'builtin'\n let thresholdBacklogCount = builtinThresholdBacklogCount\n let thresholdBacklogDurationMs = builtinThresholdBacklogDurationMs\n let warningCooldownMs = builtinWarningCooldownMs\n\n let configScope: ConcurrencyPolicyConfigScope = 'builtin'\n\n const applyPatch = (\n patch: ConcurrencyPolicy | ConcurrencyPolicyPatch | ConcurrencyPolicyOverrides | undefined,\n scope: ConcurrencyPolicyConfigScope,\n ): void => {\n if (!patch) return\n let changed = false\n\n const limit = normalizeConcurrencyLimit((patch as any).concurrencyLimit)\n if (limit) {\n concurrencyLimit = limit\n concurrencyLimitScope = scope\n if (limit !== 'unbounded') {\n lastBoundedConcurrencyLimit = limit\n lastBoundedConcurrencyLimitScope = scope\n }\n changed = true\n }\n\n const capacity = normalizePositiveInt((patch as any).losslessBackpressureCapacity)\n if (capacity != null) {\n losslessBackpressureCapacity = capacity\n changed = true\n }\n\n const allow = normalizeBoolean((patch as any).allowUnbounded)\n if (allow != null) {\n allowUnbounded = allow\n allowUnboundedScope = scope\n changed = true\n }\n\n const threshold = (patch as any).pressureWarningThreshold\n if (threshold && typeof threshold === 'object') {\n const count = normalizePositiveInt((threshold as any).backlogCount)\n if (count != null) {\n thresholdBacklogCount = count\n changed = true\n }\n\n const duration = normalizePositiveNumber((threshold as any).backlogDurationMs)\n if (duration != null) {\n thresholdBacklogDurationMs = duration\n changed = true\n }\n }\n\n const cooldownMs = normalizePositiveNumber((patch as any).warningCooldownMs)\n if (cooldownMs != null) {\n warningCooldownMs = cooldownMs\n changed = true\n }\n\n if (changed) {\n configScope = scope\n }\n }\n\n const moduleId = args.moduleId\n const runtimeModulePatch: ConcurrencyPolicyPatch | undefined =\n moduleId && runtimeConfig?.overridesByModuleId ? runtimeConfig.overridesByModuleId[moduleId] : undefined\n const providerModulePatch: ConcurrencyPolicyPatch | undefined =\n moduleId && providerOverrides?.overridesByModuleId ? providerOverrides.overridesByModuleId[moduleId] : undefined\n\n // priority: provider > runtime_module > runtime_default > builtin\n applyPatch(runtimeConfig, 'runtime_default')\n applyPatch(runtimeModulePatch, 'runtime_module')\n applyPatch(providerOverrides, 'provider')\n applyPatch(providerModulePatch, 'provider')\n\n const requestedConcurrencyLimit = concurrencyLimit\n const requestedConcurrencyLimitScope = concurrencyLimitScope\n\n // Unbounded gate: effective unbounded requires an explicit allowUnbounded=true (FR-004).\n if (typeof concurrencyLimit === 'string' && !allowUnbounded) {\n concurrencyLimit = lastBoundedConcurrencyLimit\n concurrencyLimitScope = lastBoundedConcurrencyLimitScope\n }\n\n // NOTE: diagnostics may add implementation-level metrics (e.g. \"saturated duration\"); the resolver only decides configuration.\n const resolved: ResolvedConcurrencyPolicy = {\n concurrencyLimit,\n losslessBackpressureCapacity,\n allowUnbounded,\n pressureWarningThreshold: {\n backlogCount: thresholdBacklogCount,\n backlogDurationMs: thresholdBacklogDurationMs,\n },\n warningCooldownMs,\n configScope,\n concurrencyLimitScope,\n requestedConcurrencyLimit,\n requestedConcurrencyLimitScope,\n allowUnboundedScope,\n }\n\n if (args.diagnostics) {\n yield* args.diagnostics.emitUnboundedPolicyIfNeeded({\n policy: resolved,\n trigger: { kind: 'concurrencyPolicy', name: 'resolve' },\n })\n }\n\n return resolved\n })\n}\n","import { Effect, Option } from 'effect'\nimport {\n StateTransactionConfigTag,\n StateTransactionOverridesTag,\n type StateTransactionOverrides,\n type TxnLanesPatch,\n} from './env.js'\nimport { normalizeBoolean, normalizeNonNegativeNumber } from './normalize.js'\n\nexport type TxnLanePolicyScope = 'provider' | 'runtime_module' | 'runtime_default' | 'builtin'\n\nexport type TxnLaneQueueMode = 'fifo' | 'lanes'\n\nexport type TxnLaneYieldStrategy = 'baseline' | 'inputPending'\n\nexport type ResolvedTxnLanePolicy = {\n readonly enabled: boolean\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n readonly configScope: TxnLanePolicyScope\n readonly budgetMs: number\n readonly debounceMs: number\n readonly maxLagMs: number\n readonly allowCoalesce: boolean\n readonly yieldStrategy: TxnLaneYieldStrategy\n readonly queueMode: TxnLaneQueueMode\n}\n\ntype ModuleStateTransactionOptions =\n | {\n readonly txnLanes?: TxnLanesPatch\n }\n | undefined\n\nconst normalizeMs = normalizeNonNegativeNumber\nconst normalizeBool = normalizeBoolean\n\nexport const makeResolveTxnLanePolicy = (args: {\n /** Raw options.moduleId (may be undefined), used to query overrides maps. */\n readonly moduleId: string | undefined\n readonly stateTransaction: ModuleStateTransactionOptions\n}): (() => Effect.Effect<ResolvedTxnLanePolicy>) => {\n const builtinEnabled = normalizeBool(args.stateTransaction?.txnLanes?.enabled) ?? true\n const builtinBudgetMs = normalizeMs(args.stateTransaction?.txnLanes?.budgetMs) ?? 1\n const builtinDebounceMs = normalizeMs(args.stateTransaction?.txnLanes?.debounceMs) ?? 0\n const builtinMaxLagMs = normalizeMs(args.stateTransaction?.txnLanes?.maxLagMs) ?? 50\n const builtinAllowCoalesce = normalizeBool(args.stateTransaction?.txnLanes?.allowCoalesce) ?? true\n const builtinYieldStrategy: TxnLaneYieldStrategy =\n args.stateTransaction?.txnLanes?.yieldStrategy === 'inputPending' ? 'inputPending' : 'baseline'\n\n return () =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(StateTransactionConfigTag)\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n\n const runtimeConfig = Option.isSome(runtimeConfigOpt) ? runtimeConfigOpt.value : undefined\n const providerOverrides = Option.isSome(overridesOpt) ? overridesOpt.value : undefined\n\n let enabled = builtinEnabled\n let budgetMs = builtinBudgetMs\n let debounceMs = builtinDebounceMs\n let maxLagMs = builtinMaxLagMs\n let allowCoalesce = builtinAllowCoalesce\n let yieldStrategy: TxnLaneYieldStrategy = builtinYieldStrategy\n\n let overrideMode: ResolvedTxnLanePolicy['overrideMode'] = undefined\n\n let configScope: TxnLanePolicyScope = 'builtin'\n\n const applyPatch = (\n patch: TxnLanesPatch | StateTransactionOverrides | undefined,\n scope: TxnLanePolicyScope,\n ): void => {\n if (!patch) return\n\n const raw = (patch as any).txnLanes != null ? (patch as any).txnLanes : patch\n if (!raw || typeof raw !== 'object') return\n\n let changed = false\n\n const nextEnabled = normalizeBool((raw as any).enabled)\n if (nextEnabled != null) {\n enabled = nextEnabled\n changed = true\n }\n\n const nextOverrideMode = (raw as any).overrideMode\n if (nextOverrideMode === 'forced_off' || nextOverrideMode === 'forced_sync') {\n overrideMode = nextOverrideMode\n changed = true\n }\n\n const nextBudgetMs = normalizeMs((raw as any).budgetMs)\n if (nextBudgetMs != null) {\n budgetMs = nextBudgetMs\n changed = true\n }\n\n const nextDebounceMs = normalizeMs((raw as any).debounceMs)\n if (nextDebounceMs != null) {\n debounceMs = nextDebounceMs\n changed = true\n }\n\n const nextMaxLagMs = normalizeMs((raw as any).maxLagMs)\n if (nextMaxLagMs != null) {\n maxLagMs = nextMaxLagMs\n changed = true\n }\n\n const nextAllowCoalesce = normalizeBool((raw as any).allowCoalesce)\n if (nextAllowCoalesce != null) {\n allowCoalesce = nextAllowCoalesce\n changed = true\n }\n\n const nextYieldStrategy = (raw as any).yieldStrategy\n if (nextYieldStrategy === 'baseline' || nextYieldStrategy === 'inputPending') {\n yieldStrategy = nextYieldStrategy\n changed = true\n }\n\n if (changed) {\n configScope = scope\n }\n }\n\n const moduleId = args.moduleId\n const runtimeModulePatch =\n moduleId && runtimeConfig?.txnLanesOverridesByModuleId\n ? runtimeConfig.txnLanesOverridesByModuleId[moduleId]\n : undefined\n const providerModulePatch =\n moduleId && providerOverrides?.txnLanesOverridesByModuleId\n ? providerOverrides.txnLanesOverridesByModuleId[moduleId]\n : undefined\n\n // priority: provider > runtime_module > runtime_default > builtin\n applyPatch(runtimeConfig, 'runtime_default')\n applyPatch(runtimeModulePatch, 'runtime_module')\n applyPatch(providerOverrides, 'provider')\n applyPatch(providerModulePatch, 'provider')\n\n const effectiveEnabled = overrideMode ? false : enabled\n const queueMode: TxnLaneQueueMode = effectiveEnabled ? 'lanes' : 'fifo'\n\n return {\n enabled: effectiveEnabled,\n ...(overrideMode ? { overrideMode } : {}),\n configScope,\n budgetMs,\n debounceMs,\n maxLagMs,\n allowCoalesce,\n yieldStrategy,\n queueMode,\n }\n })\n}\n","import { Effect, Option } from 'effect'\nimport {\n StateTransactionConfigTag,\n StateTransactionOverridesTag,\n type StateTransactionOverrides,\n type StateTransactionTraitConvergeOverrides,\n type TraitConvergeTimeSlicingPatch,\n} from './env.js'\nimport { normalizePositiveNumber } from './normalize.js'\nimport type { TraitConvergeConfigScope, TraitConvergeRequestedMode } from '../../state-trait/model.js'\n\nexport type ResolvedTraitConvergeTimeSlicingConfig = {\n readonly enabled: boolean\n readonly debounceMs: number\n readonly maxLagMs: number\n}\n\nexport type ResolvedTraitConvergeConfig = {\n readonly traitConvergeMode: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs: number\n readonly traitConvergeDecisionBudgetMs: number\n readonly traitConvergeTimeSlicing: ResolvedTraitConvergeTimeSlicingConfig\n readonly configScope: TraitConvergeConfigScope\n}\n\ntype ModuleStateTransactionOptions =\n | {\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeMode?: 'auto' | 'full' | 'dirty'\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n }\n | undefined\n\nconst normalizePositiveMs = normalizePositiveNumber\n\nconst normalizeRequestedMode = (mode: unknown): TraitConvergeRequestedMode | undefined =>\n mode === 'auto' || mode === 'full' || mode === 'dirty' ? mode : undefined\n\nconst normalizeBool = (value: unknown): boolean | undefined => (typeof value === 'boolean' ? value : undefined)\n\nexport const makeResolveTraitConvergeConfig = (args: {\n /** Original options.moduleId (may be undefined); used for module overrides map lookup. */\n readonly moduleId: string | undefined\n readonly stateTransaction: ModuleStateTransactionOptions\n}): (() => Effect.Effect<ResolvedTraitConvergeConfig>) => {\n const builtinTraitConvergeBudgetMs: number = normalizePositiveMs(args.stateTransaction?.traitConvergeBudgetMs) ?? 200\n const builtinTraitConvergeDecisionBudgetMs: number =\n normalizePositiveMs(args.stateTransaction?.traitConvergeDecisionBudgetMs) ?? 0.5\n const builtinTraitConvergeMode: TraitConvergeRequestedMode =\n normalizeRequestedMode(args.stateTransaction?.traitConvergeMode) ?? 'auto'\n\n const builtinTimeSlicingEnabled: boolean =\n normalizeBool(args.stateTransaction?.traitConvergeTimeSlicing?.enabled) ?? false\n const builtinTimeSlicingDebounceMs: number =\n normalizePositiveMs(args.stateTransaction?.traitConvergeTimeSlicing?.debounceMs) ?? 16\n const builtinTimeSlicingMaxLagMs: number =\n normalizePositiveMs(args.stateTransaction?.traitConvergeTimeSlicing?.maxLagMs) ?? 200\n\n return () =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(StateTransactionConfigTag)\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n\n const runtimeConfig = Option.isSome(runtimeConfigOpt) ? runtimeConfigOpt.value : undefined\n const providerOverrides = Option.isSome(overridesOpt) ? overridesOpt.value : undefined\n\n let traitConvergeMode = builtinTraitConvergeMode\n let traitConvergeBudgetMs = builtinTraitConvergeBudgetMs\n let traitConvergeDecisionBudgetMs = builtinTraitConvergeDecisionBudgetMs\n let traitConvergeTimeSlicingEnabled = builtinTimeSlicingEnabled\n let traitConvergeTimeSlicingDebounceMs = builtinTimeSlicingDebounceMs\n let traitConvergeTimeSlicingMaxLagMs = builtinTimeSlicingMaxLagMs\n\n let configScope: TraitConvergeConfigScope = 'builtin'\n\n const applyPatch = (\n patch: StateTransactionTraitConvergeOverrides | StateTransactionOverrides | undefined,\n scope: TraitConvergeConfigScope,\n ): void => {\n if (!patch) return\n let changed = false\n\n const mode = normalizeRequestedMode((patch as any).traitConvergeMode)\n if (mode) {\n traitConvergeMode = mode\n changed = true\n }\n\n const budgetMs = normalizePositiveMs((patch as any).traitConvergeBudgetMs)\n if (budgetMs != null) {\n traitConvergeBudgetMs = budgetMs\n changed = true\n }\n\n const decisionBudgetMs = normalizePositiveMs((patch as any).traitConvergeDecisionBudgetMs)\n if (decisionBudgetMs != null) {\n traitConvergeDecisionBudgetMs = decisionBudgetMs\n changed = true\n }\n\n const timeSlicing = (patch as any).traitConvergeTimeSlicing\n if (timeSlicing && typeof timeSlicing === 'object') {\n const enabled = normalizeBool((timeSlicing as any).enabled)\n if (enabled != null) {\n traitConvergeTimeSlicingEnabled = enabled\n changed = true\n }\n\n const debounceMs = normalizePositiveMs((timeSlicing as any).debounceMs)\n if (debounceMs != null) {\n traitConvergeTimeSlicingDebounceMs = debounceMs\n changed = true\n }\n\n const maxLagMs = normalizePositiveMs((timeSlicing as any).maxLagMs)\n if (maxLagMs != null) {\n traitConvergeTimeSlicingMaxLagMs = maxLagMs\n changed = true\n }\n }\n\n if (changed) {\n configScope = scope\n }\n }\n\n const moduleId = args.moduleId\n const runtimeModulePatch =\n moduleId && runtimeConfig?.traitConvergeOverridesByModuleId\n ? runtimeConfig.traitConvergeOverridesByModuleId[moduleId]\n : undefined\n const providerModulePatch =\n moduleId && providerOverrides?.traitConvergeOverridesByModuleId\n ? providerOverrides.traitConvergeOverridesByModuleId[moduleId]\n : undefined\n\n // priority: provider > runtime_module > runtime_default > builtin\n applyPatch(runtimeConfig, 'runtime_default')\n applyPatch(runtimeModulePatch, 'runtime_module')\n applyPatch(providerOverrides, 'provider')\n applyPatch(providerModulePatch, 'provider')\n\n return {\n traitConvergeMode,\n traitConvergeBudgetMs,\n traitConvergeDecisionBudgetMs,\n traitConvergeTimeSlicing: {\n enabled: traitConvergeTimeSlicingEnabled,\n debounceMs: traitConvergeTimeSlicingDebounceMs,\n maxLagMs: traitConvergeTimeSlicingMaxLagMs,\n },\n configScope,\n }\n })\n}\n","import type { Schema } from 'effect'\nimport * as SchemaAST from 'effect/SchemaAST'\nimport {\n type StateTraitProgram,\n type StateTraitSpec,\n type StateTraitEntry,\n type StateTraitGraph,\n type StateTraitGraphEdge,\n type StateTraitGraphNode,\n type StateTraitField,\n type StateTraitFieldTrait,\n type StateTraitPlan,\n type StateTraitPlanStep,\n type StateTraitResource,\n type StateTraitSchemaPathRef,\n collectNodeMeta,\n normalizeSpec,\n} from './model.js'\nimport * as Meta from './meta.js'\nimport {\n compareFieldPath,\n getFieldPathId,\n makeFieldPathIdRegistry,\n normalizeFieldPath,\n type FieldPath,\n type FieldPathId,\n} from '../field-path.js'\nimport { fnv1a32, stableStringify } from '../digest.js'\nimport type { ConvergeStaticIrRegistry } from './converge-ir.js'\n\nconst nowPerf = (): number =>\n typeof globalThis.performance !== 'undefined' && typeof globalThis.performance.now === 'function'\n ? globalThis.performance.now()\n : Date.now()\n\ntype ConvergeWriter = Extract<StateTraitEntry<any, string>, { readonly kind: 'computed' | 'link' }>\n\nconst getConvergeWriterDeps = (entry: ConvergeWriter): ReadonlyArray<string> => {\n if (entry.kind === 'computed') {\n return ((entry.meta as any)?.deps ?? []) as ReadonlyArray<string>\n }\n return [entry.meta.from as string]\n}\n\nconst computeConvergeTopoOrder = (\n writers: ReadonlyArray<ConvergeWriter>,\n): { readonly order: ReadonlyArray<string>; readonly configError?: ConvergeStaticIrRegistry['configError'] } => {\n const writerByPath = new Map<string, ConvergeWriter>()\n for (const entry of writers) {\n const existing = writerByPath.get(entry.fieldPath)\n if (existing) {\n return {\n order: [],\n configError: {\n code: 'MULTIPLE_WRITERS',\n message: `[StateTrait.converge] Multiple writers for field \"${entry.fieldPath}\" (${existing.kind} + ${entry.kind}).`,\n fields: [entry.fieldPath],\n },\n }\n }\n writerByPath.set(entry.fieldPath, entry)\n }\n\n const nodes = new Set<string>()\n for (const entry of writers) {\n nodes.add(entry.fieldPath)\n }\n\n const indegree = new Map<string, number>()\n const forward = new Map<string, Array<string>>()\n\n for (const node of nodes) {\n indegree.set(node, 0)\n forward.set(node, [])\n }\n\n for (const entry of writers) {\n const to = entry.fieldPath\n const deps = getConvergeWriterDeps(entry)\n for (const dep of deps) {\n if (!nodes.has(dep)) continue\n forward.get(dep)!.push(to)\n indegree.set(to, (indegree.get(to) ?? 0) + 1)\n }\n }\n\n const queue: Array<string> = []\n for (const [node, deg] of indegree.entries()) {\n if (deg === 0) queue.push(node)\n }\n\n const order: Array<string> = []\n while (queue.length) {\n const n = queue.shift()!\n order.push(n)\n const outs = forward.get(n)!\n for (const to of outs) {\n const next = (indegree.get(to) ?? 0) - 1\n indegree.set(to, next)\n if (next === 0) queue.push(to)\n }\n }\n\n if (order.length !== nodes.size) {\n const remaining = Array.from(nodes).filter((n) => !order.includes(n))\n return {\n order: [],\n configError: {\n code: 'CYCLE_DETECTED',\n message: `[StateTrait.converge] Cycle detected in computed/link graph: ${remaining.join(', ')}`,\n fields: remaining,\n },\n }\n }\n\n return { order }\n}\n\nconst collectSchemaFieldPaths = (schema: Schema.Schema<any, any>): ReadonlyArray<FieldPath> => {\n const byKey = new Map<string, FieldPath>()\n\n const add = (path: FieldPath): void => {\n const normalized = normalizeFieldPath(path)\n if (!normalized) return\n byKey.set(JSON.stringify(normalized), normalized)\n }\n\n const visit = (ast: SchemaAST.AST, prefix: ReadonlyArray<string>, seen: Set<SchemaAST.AST>): void => {\n let current: SchemaAST.AST = ast\n\n // Unwrap Suspend/Refinement (recursive schema / branded schema).\n while (true) {\n if (SchemaAST.isSuspend(current)) {\n if (seen.has(current)) return\n seen.add(current)\n current = current.f()\n continue\n }\n if (SchemaAST.isRefinement(current)) {\n current = current.from\n continue\n }\n break\n }\n\n if (SchemaAST.isTransformation(current)) {\n visit(current.to, prefix, seen)\n visit(current.from, prefix, seen)\n return\n }\n\n if (SchemaAST.isUnion(current)) {\n for (const t of current.types) {\n visit(t, prefix, seen)\n }\n return\n }\n\n // Array / Tuple: indices do not enter the FieldPathId space; recurse into element types to support `items[0].name -> items.name`.\n if (SchemaAST.isTupleType(current)) {\n for (const e of current.elements) {\n visit(e.type, prefix, seen)\n }\n for (const r of current.rest) {\n visit(r.type, prefix, seen)\n }\n return\n }\n\n if (SchemaAST.isTypeLiteral(current)) {\n for (const ps of current.propertySignatures) {\n const seg = String(ps.name)\n if (!seg) continue\n const next = [...prefix, seg]\n add(next)\n visit(ps.type, next, seen)\n }\n // Index signature (Record<string, T>) can't be enumerated statically: avoid generating misaligned dynamic key paths.\n return\n }\n\n // Any / Unknown / Object / Declaration (open types): cannot enumerate nested paths; stop conservatively.\n }\n\n visit(schema.ast as unknown as SchemaAST.AST, [], new Set())\n return Array.from(byKey.values()).sort(compareFieldPath)\n}\n\nconst buildConvergeIr = (\n stateSchema: Schema.Schema<any, any>,\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n): ConvergeStaticIrRegistry => {\n const startedAt = nowPerf()\n const generation = 0\n\n const writers = entries.filter((e): e is ConvergeWriter => e.kind === 'computed' || e.kind === 'link')\n\n const writersKey = writers\n .map((entry) => `${entry.kind}:${entry.fieldPath}`)\n .sort()\n .join('|')\n\n const depsKey = writers\n .map((entry) => {\n const deps = getConvergeWriterDeps(entry).slice().sort().join(',')\n const scheduling = (entry.meta as any)?.scheduling === 'deferred' ? 'd' : 'i'\n return `${entry.kind}:${entry.fieldPath}@${scheduling}=>${deps}`\n })\n .sort()\n .join('|')\n\n const writerByPath = new Map<string, ConvergeWriter>()\n for (const entry of writers) {\n writerByPath.set(entry.fieldPath, entry)\n }\n\n const topo = writers.length > 0 ? computeConvergeTopoOrder(writers) : { order: [] as ReadonlyArray<string> }\n const stepsById: Array<ConvergeWriter> = topo.configError ? [] : topo.order.map((path) => writerByPath.get(path)!)\n\n const fieldPathTable = new Map<string, FieldPath>()\n const addPath = (path: FieldPath): void => {\n for (let i = 1; i <= path.length; i++) {\n const prefix = path.slice(0, i)\n const key = JSON.stringify(prefix)\n if (!fieldPathTable.has(key)) fieldPathTable.set(key, prefix)\n }\n }\n\n // 065: FieldPathId semantics must cover all enumerable field paths of stateSchema; otherwise reducer patchPaths can't map and will fall back to dirtyAll.\n for (const schemaPath of collectSchemaFieldPaths(stateSchema)) {\n addPath(schemaPath)\n }\n\n for (const entry of writers) {\n const out = normalizeFieldPath(entry.fieldPath)\n if (out) addPath(out)\n for (const dep of getConvergeWriterDeps(entry)) {\n const depPath = normalizeFieldPath(dep)\n if (depPath) addPath(depPath)\n }\n }\n\n const fieldPaths = Array.from(fieldPathTable.values()).sort(compareFieldPath)\n const fieldPathIdRegistry = makeFieldPathIdRegistry(fieldPaths)\n const fieldPathsKey = fnv1a32(stableStringify(fieldPaths))\n\n const stepOutFieldPathIdByStepId: Array<FieldPathId> = []\n const stepDepsFieldPathIdsByStepId: Array<ReadonlyArray<FieldPathId>> = []\n const stepSchedulingByStepId: Array<'immediate' | 'deferred'> = []\n\n for (const entry of stepsById) {\n const out = normalizeFieldPath(entry.fieldPath)\n const outId = out != null ? getFieldPathId(fieldPathIdRegistry, out) : undefined\n if (outId == null) {\n throw new Error(`[StateTrait.build] Failed to map converge output fieldPath \"${entry.fieldPath}\" to FieldPathId.`)\n }\n\n const depIds: Array<FieldPathId> = []\n for (const dep of getConvergeWriterDeps(entry)) {\n const depPath = normalizeFieldPath(dep)\n if (!depPath) continue\n const depId = getFieldPathId(fieldPathIdRegistry, depPath)\n if (depId != null) depIds.push(depId)\n }\n\n stepOutFieldPathIdByStepId.push(outId)\n stepDepsFieldPathIdsByStepId.push(depIds)\n stepSchedulingByStepId.push((entry.meta as any)?.scheduling === 'deferred' ? 'deferred' : 'immediate')\n }\n\n const topoOrder = stepsById.map((_, i) => i)\n const buildDurationMs = Math.max(0, nowPerf() - startedAt)\n\n return {\n generation,\n writersKey,\n depsKey,\n fieldPathsKey,\n fieldPaths,\n fieldPathIdRegistry,\n ...(topo.configError ? { configError: topo.configError } : null),\n stepsById,\n stepOutFieldPathIdByStepId,\n stepDepsFieldPathIdsByStepId,\n stepSchedulingByStepId,\n topoOrder,\n buildDurationMs,\n }\n}\n\n/**\n * Builds a normalized FieldTrait from a StateTraitEntry.\n *\n * - Currently uses explicit deps for computed/source and link edges; deeper dependency analysis is intentionally not performed.\n * - If we evolve explicit dependency declarations further, extend here.\n */\nconst toFieldTrait = (entry: StateTraitEntry<any, string>): StateTraitFieldTrait => {\n const deps: Array<string> = []\n\n if (entry.kind === 'computed') {\n const meta = entry.meta as any\n const list = meta.deps as ReadonlyArray<string> | undefined\n if (list) deps.push(...list)\n } else if (entry.kind === 'source') {\n const meta = entry.meta as any\n const list = meta.deps as ReadonlyArray<string> | undefined\n if (list) deps.push(...list)\n } else if (entry.kind === 'link') {\n deps.push(entry.meta.from as string)\n } else if (entry.kind === 'check') {\n const meta = entry.meta as any\n const rules = (meta?.rules ?? {}) as Record<string, any>\n for (const name of Object.keys(rules)) {\n const rule = rules[name]\n if (rule && typeof rule === 'object') {\n const list = rule.deps as ReadonlyArray<string> | undefined\n if (list) deps.push(...list)\n }\n }\n }\n\n return {\n fieldId: entry.fieldPath,\n kind: entry.kind,\n // Keep meta identical to Entry.meta at runtime so install can reuse it directly.\n meta: entry.meta as any,\n deps,\n }\n}\n\n/**\n * Builds Field / Node / Edge / Resource sets from normalized entries.\n */\nconst buildGraph = (\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n nodeMetaByFieldPath: ReadonlyMap<string, Meta.TraitMeta>,\n): {\n readonly graph: StateTraitGraph\n readonly plan: StateTraitPlan\n} => {\n const fieldMap = new Map<string, StateTraitField>()\n const nodes: Array<StateTraitGraphNode> = []\n const edges: Array<StateTraitGraphEdge> = []\n const resourcesById = new Map<string, StateTraitResource>()\n const planSteps: Array<StateTraitPlanStep> = []\n\n const ensureField = (fieldPath: string): StateTraitField => {\n let field = fieldMap.get(fieldPath)\n if (!field) {\n field = {\n id: fieldPath,\n path: fieldPath,\n traits: [],\n }\n fieldMap.set(fieldPath, field)\n }\n return field\n }\n\n for (const entry of entries) {\n const fieldPath = entry.fieldPath\n const field = ensureField(fieldPath)\n const trait = toFieldTrait(entry)\n\n ;(field.traits as Array<StateTraitFieldTrait>).push(trait)\n\n // Build Graph edges and Plan steps by kind.\n if (entry.kind === 'computed') {\n const stepId = `computed:${fieldPath}`\n planSteps.push({\n id: stepId,\n kind: 'computed-update',\n targetFieldPath: fieldPath,\n // Note: the current version does not statically analyze computed dependencies; sourceFieldPaths remains empty.\n })\n // If deps is explicitly declared, add Graph edges (for diagnostics / reverse-closure computation).\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps) {\n for (const dep of deps) {\n ensureField(dep)\n edges.push({\n id: `computed:${dep}->${fieldPath}`,\n from: dep,\n to: fieldPath,\n kind: 'computed',\n })\n }\n }\n } else if (entry.kind === 'link') {\n const from = entry.meta.from as string\n ensureField(from)\n\n const edgeId = `link:${from}->${fieldPath}`\n edges.push({\n id: edgeId,\n from,\n to: fieldPath,\n kind: 'link',\n })\n\n planSteps.push({\n id: `link:${fieldPath}`,\n kind: 'link-propagate',\n targetFieldPath: fieldPath,\n sourceFieldPaths: [from],\n debugInfo: {\n graphEdgeId: edgeId,\n },\n })\n } else if (entry.kind === 'source') {\n const resourceId = entry.meta.resource\n const resourceMeta = Meta.sanitize((entry.meta as any).meta)\n\n const existing = resourcesById.get(resourceId)\n if (existing) {\n const ownerFields = [...existing.ownerFields, fieldPath]\n let meta = existing.meta\n let metaOrigin = existing.metaOrigin\n let metaConflicts = existing.metaConflicts\n\n if (resourceMeta) {\n const merged = Meta.mergeCanonical(\n { meta, origin: metaOrigin, conflicts: metaConflicts },\n { origin: fieldPath, meta: resourceMeta },\n )\n meta = merged.meta\n metaOrigin = merged.origin\n metaConflicts = merged.conflicts\n }\n\n resourcesById.set(resourceId, {\n ...existing,\n ownerFields,\n meta,\n metaOrigin,\n metaConflicts,\n })\n } else {\n resourcesById.set(resourceId, {\n resourceId,\n // Use a simple identifier string for now; may evolve into a structured form based on key rules.\n keySelector: `StateTrait.source@${fieldPath}`,\n ownerFields: [fieldPath],\n meta: resourceMeta,\n metaOrigin: resourceMeta ? fieldPath : undefined,\n })\n }\n\n planSteps.push({\n id: `source:${fieldPath}`,\n kind: 'source-refresh',\n targetFieldPath: fieldPath,\n resourceId,\n keySelectorId: `StateTrait.source@${fieldPath}`,\n })\n\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps) {\n for (const dep of deps) {\n ensureField(dep)\n edges.push({\n id: `source-dep:${dep}->${fieldPath}`,\n from: dep,\n to: fieldPath,\n kind: 'source-dep',\n })\n }\n }\n } else if (entry.kind === 'check') {\n planSteps.push({\n id: `check:${fieldPath}`,\n kind: 'check-validate',\n targetFieldPath: fieldPath,\n })\n\n // If the rule explicitly declares deps, add Graph edges (for ReverseClosure scoped validate).\n if (trait.deps.length > 0) {\n for (const dep of trait.deps) {\n ensureField(dep)\n edges.push({\n id: `check-dep:${dep}->${fieldPath}`,\n from: dep,\n to: fieldPath,\n kind: 'check-dep',\n })\n }\n }\n }\n }\n\n for (const field of fieldMap.values()) {\n nodes.push({\n id: field.id,\n field,\n traits: field.traits,\n meta: nodeMetaByFieldPath.get(field.id),\n })\n }\n\n const graph: StateTraitGraph = {\n _tag: 'StateTraitGraph',\n nodes,\n edges,\n resources: Array.from(resourcesById.values()),\n }\n\n const plan: StateTraitPlan = {\n _tag: 'StateTraitPlan',\n steps: planSteps,\n }\n\n return { graph, plan }\n}\n\n/**\n * Performs a simple cycle detection for link edges.\n *\n * - Only considers edges with kind = 'link'; computed/source do not participate in the first version.\n * - On cycle detection, throws an error with path context to avoid infinite updates at runtime.\n */\nconst assertNoLinkCycles = (edges: ReadonlyArray<StateTraitGraphEdge>): void => {\n const adjacency = new Map<string, string[]>()\n\n for (const edge of edges) {\n if (edge.kind !== 'link') continue\n const list = adjacency.get(edge.from) ?? []\n list.push(edge.to)\n adjacency.set(edge.from, list)\n }\n\n const visited = new Set<string>()\n const stack = new Set<string>()\n\n const dfs = (node: string): void => {\n if (stack.has(node)) {\n throw new Error(\n `[StateTrait.build] link cycle detected at field \"${node}\". ` +\n 'Please check link traits for circular dependencies.',\n )\n }\n if (visited.has(node)) return\n visited.add(node)\n stack.add(node)\n\n const nexts = adjacency.get(node)\n if (nexts) {\n for (const to of nexts) {\n dfs(to)\n }\n }\n\n stack.delete(node)\n }\n\n for (const node of adjacency.keys()) {\n if (!visited.has(node)) {\n dfs(node)\n }\n }\n}\n\nconst collectSchemaPaths = (\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n): ReadonlyArray<StateTraitSchemaPathRef> => {\n const byKey = new Map<string, StateTraitSchemaPathRef>()\n\n const add = (ref: StateTraitSchemaPathRef): void => {\n if (!ref.path) return\n const k = `${ref.kind}|${ref.entryKind}|${ref.entryFieldPath}|${ref.ruleName ?? ''}|${ref.path}`\n byKey.set(k, ref)\n }\n\n const getCheckWritebackPath = (entry: Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }>): string => {\n const wb = (entry.meta as any)?.writeback\n const p = wb && typeof wb === 'object' ? (wb as any).path : undefined\n const writebackPath = typeof p === 'string' && p.startsWith('errors.') ? p : undefined\n\n if (writebackPath) return writebackPath\n\n const fieldPath = entry.fieldPath\n if (fieldPath.endsWith('[]')) {\n return `errors.${fieldPath.slice(0, -2)}`\n }\n return `errors.${fieldPath}`\n }\n\n for (const entry of entries) {\n add({\n kind: 'fieldPath',\n entryKind: entry.kind,\n entryFieldPath: entry.fieldPath,\n path: entry.fieldPath,\n })\n\n if (entry.kind === 'computed' || entry.kind === 'source') {\n const deps = ((entry.meta as any)?.deps ?? []) as ReadonlyArray<string>\n for (const dep of deps) {\n add({\n kind: 'dep',\n entryKind: entry.kind,\n entryFieldPath: entry.fieldPath,\n path: dep,\n })\n }\n }\n\n if (entry.kind === 'link') {\n add({\n kind: 'link_from',\n entryKind: 'link',\n entryFieldPath: entry.fieldPath,\n path: entry.meta.from as string,\n })\n }\n\n if (entry.kind === 'check') {\n add({\n kind: 'check_writeback',\n entryKind: 'check',\n entryFieldPath: entry.fieldPath,\n path: getCheckWritebackPath(entry),\n })\n\n const rules = ((entry.meta as any)?.rules ?? {}) as Record<string, any>\n for (const name of Object.keys(rules)) {\n const rule = rules[name]\n if (!rule || typeof rule !== 'object') continue\n const deps = (rule.deps ?? []) as ReadonlyArray<string>\n for (const dep of deps) {\n add({\n kind: 'dep',\n entryKind: 'check',\n entryFieldPath: entry.fieldPath,\n ruleName: name,\n path: dep,\n })\n }\n }\n }\n }\n\n return Array.from(byKey.entries())\n .sort((a, b) => a[0].localeCompare(b[0]))\n .map(([, v]) => v)\n}\n\n/**\n * Builds a StateTraitProgram from the given stateSchema and trait spec.\n *\n * - Pure function: does not depend on external Env / global state.\n * - Current implementation focuses on:\n * - Normalizing Spec into entries.\n * - Building a lightweight Graph / Plan from entries.\n * - Running basic cycle detection for link edges.\n *\n * If we later need finer-grained dependency analysis (e.g. static analysis for computed/key),\n * evolve it inside this module without changing the public API surface.\n */\nexport const build = <S extends object>(\n stateSchema: Schema.Schema<S, any>,\n spec: StateTraitSpec<S>,\n): StateTraitProgram<S> => {\n const entries = normalizeSpec(spec) as ReadonlyArray<StateTraitEntry<S, string>>\n const nodeMetaByFieldPath = collectNodeMeta(spec)\n\n // Phase 4 (US2): require explicit deps (Graph/diagnostics/replay treat deps as the single dependency source of truth).\n for (const entry of entries) {\n if (entry.kind === 'computed') {\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps === undefined) {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for computed \"${entry.fieldPath}\". ` +\n 'Please use StateTrait.computed({ deps: [...], get: ... }).',\n )\n }\n }\n if (entry.kind === 'source') {\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps === undefined) {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for source \"${entry.fieldPath}\". ` +\n 'Please provide meta.deps for StateTrait.source({ deps: [...], ... }).',\n )\n }\n }\n if (entry.kind === 'check') {\n const rules = ((entry.meta as any)?.rules ?? {}) as Record<string, any>\n for (const name of Object.keys(rules)) {\n const rule = rules[name]\n if (typeof rule === 'function' || !rule || typeof rule !== 'object') {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for check \"${entry.fieldPath}\" rule \"${name}\". ` +\n 'Please use { deps: [...], validate: ... } form.',\n )\n }\n if ((rule as any).deps === undefined) {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for check \"${entry.fieldPath}\" rule \"${name}\". ` +\n 'Please provide deps: [...].',\n )\n }\n }\n }\n }\n\n const { graph, plan } = buildGraph(entries, nodeMetaByFieldPath)\n\n // Run a cycle check for link edges to avoid obvious configuration errors.\n assertNoLinkCycles(graph.edges)\n\n return {\n stateSchema,\n spec,\n entries: entries as ReadonlyArray<StateTraitEntry<any, string>>,\n graph,\n plan,\n convergeIr: buildConvergeIr(stateSchema as any, entries as ReadonlyArray<StateTraitEntry<any, string>>),\n schemaPaths: collectSchemaPaths(entries as ReadonlyArray<StateTraitEntry<any, string>>),\n }\n}\n","import { Effect } from 'effect'\nimport type { ModuleRuntime as PublicModuleRuntime } from './module.js'\nimport type {\n StateTraitProgram,\n TraitConvergeGenerationEvidence,\n TraitConvergePlanCacheEvidence,\n} from '../../state-trait/model.js'\nimport type * as StateTraitConverge from '../../state-trait/converge.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport { setRuntimeInternals } from './runtimeInternalsAccessor.js'\nimport type * as RowId from '../../state-trait/rowid.js'\n\nexport type TraitState = {\n program: StateTraitProgram<any> | undefined\n convergePlanCache: StateTraitConverge.ConvergePlanCache | undefined\n convergeGeneration: TraitConvergeGenerationEvidence\n pendingCacheMissReason: TraitConvergePlanCacheEvidence['missReason'] | undefined\n lastConvergeIrKeys: { readonly writersKey: string; readonly depsKey: string } | undefined\n listConfigs: ReadonlyArray<RowId.ListConfig>\n}\n\nexport const installInternalHooks = <S, A>(args: {\n readonly runtime: PublicModuleRuntime<S, A>\n readonly runtimeInternals: RuntimeInternals\n}): Effect.Effect<void, never, never> =>\n Effect.sync(() => {\n const { runtime, runtimeInternals } = args\n\n setRuntimeInternals(runtime as any, runtimeInternals)\n })\n","import { Context, Deferred } from 'effect'\n\nexport interface RootContext {\n context: Context.Context<any> | undefined\n readonly ready: Deferred.Deferred<Context.Context<any>, never>\n readonly appId?: string\n readonly appModuleIds?: ReadonlyArray<string>\n}\n\nclass RootContextTagImpl extends Context.Tag('@logixjs/core/RootContext')<RootContextTagImpl, RootContext>() {}\n\nexport const RootContextTag = RootContextTagImpl\n","import {\n Cause,\n Context,\n Deferred,\n Duration,\n Effect,\n Fiber,\n FiberRef,\n Layer,\n Option,\n PubSub,\n Queue,\n Ref,\n Scope,\n Stream,\n} from 'effect'\nimport * as Debug from '../DebugSink.js'\nimport { toSerializableErrorSummary } from '../errorSummary.js'\nimport * as TaskRunner from '../TaskRunner.js'\nimport { isDevEnv } from '../env.js'\nimport { getRuntimeInternals } from '../runtimeInternalsAccessor.js'\nimport * as Identity from './identity.js'\nimport * as ProcessConcurrency from './concurrency.js'\nimport * as ProcessEvents from './events.js'\nimport * as Meta from './meta.js'\nimport { makeSchemaSelector, resolveSchemaAst } from './selectorSchema.js'\nimport type {\n ProcessControlRequest,\n ProcessDefinition,\n ProcessEvent,\n ProcessInstallation,\n ProcessInstanceIdentity,\n ProcessInstanceStatus,\n ProcessPlatformEvent,\n ProcessScope,\n ProcessTrigger,\n ProcessTriggerSpec,\n SerializableErrorSummary,\n} from './protocol.js'\nimport * as Supervision from './supervision.js'\n\ntype InstallationKey = string\ntype ProcessInstanceId = string\n\ntype ProcessInstallMode = 'switch' | 'exhaust'\n\ntype InstallationState = {\n readonly identity: {\n readonly processId: string\n readonly scope: ProcessScope\n }\n readonly scopeKey: string\n readonly definition: ProcessDefinition\n env: Context.Context<any>\n forkScope: Scope.Scope\n readonly process: Effect.Effect<void, any, unknown>\n readonly kind: Meta.ProcessMeta['kind']\n enabled: boolean\n installedAt?: string\n nextRunSeq: number\n supervision: Supervision.SupervisionState\n currentInstanceId?: ProcessInstanceId\n pendingStart?: { readonly forkScope: Scope.Scope }\n}\n\ntype InstanceState = {\n readonly installationKey: InstallationKey\n readonly processInstanceId: ProcessInstanceId\n readonly identity: ProcessInstanceIdentity\n readonly processId: string\n readonly scope: ProcessScope\n readonly forkScope: Scope.Scope\n readonly platformTriggersQueue: Queue.Queue<ProcessTrigger>\n status: ProcessInstanceStatus\n nextEventSeq: number\n nextTriggerSeq: number\n fiber?: Fiber.RuntimeFiber<unknown, unknown>\n}\n\nexport interface ProcessRuntime {\n readonly install: <E, R>(\n process: Effect.Effect<void, E, R>,\n options: {\n readonly scope: ProcessScope\n readonly enabled?: boolean\n readonly installedAt?: string\n readonly mode?: ProcessInstallMode\n },\n ) => Effect.Effect<ProcessInstallation | undefined, never, R>\n readonly listInstallations: (filter?: {\n readonly scopeType?: ProcessScope['type']\n readonly scopeKey?: string\n }) => Effect.Effect<ReadonlyArray<ProcessInstallation>>\n readonly getInstanceStatus: (processInstanceId: string) => Effect.Effect<ProcessInstanceStatus | undefined>\n readonly controlInstance: (processInstanceId: string, request: ProcessControlRequest) => Effect.Effect<void>\n readonly deliverPlatformEvent: (event: ProcessPlatformEvent) => Effect.Effect<void>\n readonly events: Stream.Stream<ProcessEvent>\n readonly getEventsSnapshot: () => Effect.Effect<ReadonlyArray<ProcessEvent>>\n}\n\nexport class ProcessRuntimeTag extends Context.Tag('@logixjs/core/ProcessRuntime')<ProcessRuntimeTag, ProcessRuntime>() {}\n\nconst currentProcessTrigger = FiberRef.unsafeMake<ProcessTrigger | undefined>(undefined)\nconst currentProcessEventBudget = FiberRef.unsafeMake<Ref.Ref<ProcessEvents.ProcessRunEventBudgetState> | undefined>(\n undefined,\n)\nconst RUNTIME_BOOT_EVENT = 'runtime:boot' as const\n\nconst deriveDebugModuleId = (processId: string): string => `process:${processId}`\n\ntype NonPlatformTriggerSpec = Exclude<ProcessTriggerSpec, { readonly kind: 'platformEvent' }>\n\nconst deriveTxnAnchor = (event: ProcessEvent): { readonly txnSeq?: number; readonly txnId?: string } => {\n const trigger: any = event.trigger\n if (!trigger) return {}\n if (\n (trigger.kind === 'moduleAction' || trigger.kind === 'moduleStateChange') &&\n typeof trigger.instanceId === 'string' &&\n typeof trigger.txnSeq === 'number' &&\n Number.isFinite(trigger.txnSeq) &&\n trigger.txnSeq >= 1\n ) {\n const txnSeq = Math.floor(trigger.txnSeq)\n return {\n txnSeq,\n txnId: `${trigger.instanceId}::t${txnSeq}`,\n }\n }\n return {}\n}\n\nconst shouldNoopDueToSyncTxn = (scope: ProcessScope, kind: string): Effect.Effect<boolean> => {\n const moduleId = scope.type === 'moduleInstance' ? scope.moduleId : undefined\n const instanceId = scope.type === 'moduleInstance' ? scope.instanceId : undefined\n return TaskRunner.shouldNoopInSyncTransactionFiber({\n moduleId,\n instanceId,\n code: 'process::invalid_usage',\n severity: 'error',\n message:\n 'ProcessRuntime scheduling is not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint:\n \"Trigger/schedule Process outside the transaction window (e.g. in a watcher's run section or a separate fiber); \" +\n 'do not trigger Process directly inside a reducer / synchronous transaction body.',\n kind,\n })\n}\n\nconst resolveRuntimeStateSchemaAst = (runtime: unknown): ReturnType<typeof resolveSchemaAst> => {\n try {\n const internals = getRuntimeInternals(runtime as any)\n return resolveSchemaAst(internals.stateSchema)\n } catch {\n return undefined\n }\n}\n\nconst withModuleHint = (error: Error, moduleId: string): Error => {\n const hint = (error as any).hint\n if (typeof hint === 'string' && hint.length > 0) {\n if (!hint.includes('moduleId=')) {\n ;(error as any).hint = `moduleId=${moduleId}\\n${hint}`\n }\n return error\n }\n ;(error as any).hint = `moduleId=${moduleId}`\n return error\n}\n\nconst actionIdFromUnknown = (action: unknown): string | undefined => {\n if (!action || typeof action !== 'object') return undefined\n const anyAction = action as any\n if (typeof anyAction._tag === 'string' && anyAction._tag.length > 0) return anyAction._tag\n if (typeof anyAction.type === 'string' && anyAction.type.length > 0) return anyAction.type\n return undefined\n}\n\nexport const make = (options?: {\n readonly maxEventHistory?: number\n}): Effect.Effect<ProcessRuntime, never, Scope.Scope> =>\n Effect.gen(function* () {\n const runtimeScope = yield* Effect.scope\n const maxEventHistory =\n typeof options?.maxEventHistory === 'number' &&\n Number.isFinite(options.maxEventHistory) &&\n options.maxEventHistory >= 0\n ? Math.floor(options.maxEventHistory)\n : 500\n\n const installations = new Map<InstallationKey, InstallationState>()\n const instances = new Map<ProcessInstanceId, InstanceState>()\n\n const eventsBuffer: ProcessEvent[] = []\n const eventsHub = yield* PubSub.sliding<ProcessEvent>(Math.max(1, Math.min(2048, maxEventHistory)))\n\n const trimEvents = () => {\n if (maxEventHistory <= 0) {\n eventsBuffer.length = 0\n return\n }\n if (eventsBuffer.length <= maxEventHistory) return\n const excess = eventsBuffer.length - maxEventHistory\n eventsBuffer.splice(0, excess)\n }\n\n const recordDebugEvent = (event: ProcessEvent): Effect.Effect<void> =>\n Effect.gen(function* () {\n const diagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n\n // diagnostics=off: avoid entering Debug sinks (near-zero cost); error cases are exposed via diagnostic events.\n if (diagnosticsLevel === 'off') {\n return\n }\n\n const processId = event.identity.identity.processId\n const processInstanceId = Identity.processInstanceIdFromIdentity(event.identity)\n const moduleId = deriveDebugModuleId(processId)\n const { txnSeq, txnId } = deriveTxnAnchor(event)\n\n yield* Debug.record({\n type: event.type,\n moduleId,\n instanceId: processInstanceId,\n identity: event.identity,\n severity: event.severity,\n eventSeq: event.eventSeq,\n timestampMs: event.timestampMs,\n trigger: event.trigger,\n dispatch: event.dispatch,\n error: event.error,\n txnSeq,\n txnId,\n } as any)\n })\n\n const publishEvent = (event: ProcessEvent): Effect.Effect<void> =>\n Effect.gen(function* () {\n eventsBuffer.push(event)\n trimEvents()\n yield* PubSub.publish(eventsHub, event)\n yield* recordDebugEvent(event)\n })\n\n const emit = (event: ProcessEvent): Effect.Effect<void> =>\n Effect.gen(function* () {\n const budgetRef = yield* FiberRef.get(currentProcessEventBudget)\n if (budgetRef) {\n const decision = yield* Ref.modify(budgetRef, (state) => {\n const [nextDecision, nextState] = ProcessEvents.applyProcessRunEventBudget(state, event)\n return [nextDecision, nextState] as const\n })\n\n if (decision._tag === 'emit' || decision._tag === 'emitSummary') {\n yield* publishEvent(decision.event)\n }\n return\n }\n\n const enforced = ProcessEvents.enforceProcessEventMaxBytes(event)\n yield* publishEvent(enforced.event)\n })\n\n const emitErrorDiagnostic = (\n scope: ProcessScope,\n processId: string,\n code: string,\n message: string,\n hint?: string,\n ): Effect.Effect<void> => {\n if (!isDevEnv()) {\n return Effect.void\n }\n const moduleId = scope.type === 'moduleInstance' ? scope.moduleId : undefined\n const instanceId = scope.type === 'moduleInstance' ? scope.instanceId : undefined\n return Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code,\n severity: 'error',\n message,\n hint,\n actionTag: processId,\n kind: 'process_runtime',\n })\n }\n\n const resolveMissingDependencies = (installation: InstallationState): ReadonlyArray<string> => {\n const declared = installation.definition.requires ?? []\n const implicitFromTriggers: string[] = []\n for (const trigger of installation.definition.triggers) {\n if (trigger.kind === 'moduleAction' || trigger.kind === 'moduleStateChange') {\n implicitFromTriggers.push(trigger.moduleId)\n }\n }\n\n const requires = Array.from(new Set([...declared, ...implicitFromTriggers]))\n if (requires.length === 0) return []\n\n const missing: string[] = []\n for (const dep of requires) {\n if (typeof dep !== 'string' || dep.length === 0) continue\n\n // ModuleTag key convention: `@logixjs/Module/${id}`; Tag identity is derived from the key, so we can construct it on demand.\n const tag = Context.Tag(`@logixjs/Module/${dep}`)() as Context.Tag<any, any>\n const found = Context.getOption(installation.env, tag)\n if (Option.isNone(found)) {\n missing.push(dep)\n }\n }\n return missing\n }\n\n const stopInstance = (\n instance: InstanceState,\n reason: ProcessInstanceStatus['stoppedReason'],\n ): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (\n instance.status.status === 'stopped' ||\n instance.status.status === 'failed' ||\n instance.status.status === 'stopping'\n ) {\n return\n }\n\n const fiber = instance.fiber\n instance.status = {\n ...instance.status,\n status: 'stopping',\n stoppedReason: reason,\n }\n\n if (fiber) {\n yield* Fiber.interrupt(fiber)\n }\n\n yield* Queue.shutdown(instance.platformTriggersQueue)\n\n instance.status = {\n ...instance.status,\n status: 'stopped',\n stoppedReason: reason,\n }\n instance.fiber = undefined\n\n const evt: ProcessEvent = {\n type: 'process:stop',\n identity: instance.identity,\n severity: 'info',\n eventSeq: instance.nextEventSeq++,\n timestampMs: Date.now(),\n }\n yield* emit(evt)\n\n const installation = installations.get(instance.installationKey)\n if (installation?.pendingStart) {\n installation.pendingStart = undefined\n yield* startInstallation(instance.installationKey)\n }\n })\n\n const startInstallation: (installationKey: InstallationKey) => Effect.Effect<void> = (installationKey) =>\n Effect.gen(function* () {\n const installation = installations.get(installationKey)\n if (!installation) return\n installation.pendingStart = undefined\n\n const noop = yield* shouldNoopDueToSyncTxn(installation.identity.scope, 'process_start_in_transaction')\n if (noop) return\n\n // Do not start again if an active instance already exists.\n const currentId = installation.currentInstanceId\n if (currentId) {\n const current = instances.get(currentId)\n if (current && (current.status.status === 'running' || current.status.status === 'starting')) {\n return\n }\n }\n\n const runSeq = installation.nextRunSeq++\n const identity: ProcessInstanceIdentity = {\n identity: installation.identity,\n runSeq,\n }\n const processInstanceId = Identity.processInstanceIdFromIdentity(identity)\n\n const platformTriggersQueue = yield* Queue.sliding<ProcessTrigger>(64)\n\n const instanceState: InstanceState = {\n installationKey,\n processInstanceId,\n identity,\n processId: installation.identity.processId,\n scope: installation.identity.scope,\n forkScope: installation.forkScope,\n platformTriggersQueue,\n status: {\n identity,\n status: 'starting',\n },\n nextEventSeq: 1,\n nextTriggerSeq: 1,\n }\n\n instances.set(processInstanceId, instanceState)\n installation.currentInstanceId = processInstanceId\n\n // When forkScope is disposed (e.g. uiSubtree unmount), ensure the instance transitions to stopped and emits a stop event.\n // - Do not rely on unstable \"whether interruption reaches catchAllCause\" behavior.\n // - Do not double-register on runtimeScope; the runtime finalizer already stops all instances.\n if (installation.forkScope !== runtimeScope) {\n yield* Scope.addFinalizer(\n installation.forkScope as Scope.CloseableScope,\n Effect.suspend(() => {\n const status = instanceState.status.status\n if (status === 'stopped' || status === 'failed' || status === 'stopping') {\n return Effect.void\n }\n return stopInstance(instanceState, 'scopeDisposed')\n }).pipe(Effect.catchAllCause(() => Effect.void)),\n )\n }\n\n // start event: indicates the instance has entered the start flow (fiber has been forked).\n yield* emit({\n type: 'process:start',\n identity,\n severity: 'info',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n })\n\n const missing = resolveMissingDependencies(installation)\n if (missing.length > 0) {\n const hint = isDevEnv()\n ? [\n 'Strict scope dependency resolution: missing required modules in the current scope.',\n `missing: ${missing.join(', ')}`,\n '',\n 'fix:',\n '- Provide the missing module implementation(s) in the same scope via imports.',\n ` Example: RootModule.implement({ imports: [${missing[0]}.implement(...).impl], processes: [...] })`,\n '- Do not rely on cross-scope fallbacks / guessing instances.',\n ].join('\\n')\n : undefined\n\n const error: SerializableErrorSummary = {\n message: `Missing dependencies in scope: ${missing.join(', ')}`,\n code: 'process::missing_dependency',\n hint,\n }\n\n instanceState.status = {\n ...instanceState.status,\n status: 'failed',\n stoppedReason: 'failed',\n lastError: error,\n }\n\n yield* emit({\n type: 'process:error',\n identity,\n severity: 'error',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error,\n })\n\n yield* emitErrorDiagnostic(\n installation.identity.scope,\n installation.identity.processId,\n 'process::missing_dependency',\n error.message,\n hint,\n )\n return\n }\n\n const shouldRecordChainEvents = installation.definition.diagnosticsLevel !== 'off'\n\n const baseEnv = installation.env\n\n const makeWrappedEnv = (): Context.Context<any> => {\n if (!shouldRecordChainEvents) {\n return baseEnv\n }\n\n const requires = installation.definition.requires ?? []\n if (requires.length === 0) {\n return baseEnv\n }\n\n const ids = Array.from(new Set(requires))\n let nextEnv = baseEnv\n\n for (const moduleId of ids) {\n if (typeof moduleId !== 'string' || moduleId.length === 0) continue\n const tag = Context.Tag(`@logixjs/Module/${moduleId}`)() as Context.Tag<any, any>\n const found = Context.getOption(baseEnv, tag)\n if (Option.isNone(found)) continue\n const runtime = found.value as any\n\n const recordDispatch = (action: unknown) =>\n Effect.gen(function* () {\n const trigger = yield* FiberRef.get(currentProcessTrigger)\n if (!trigger) return\n\n const actionId = actionIdFromUnknown(action) ?? 'unknown'\n const dispatchModuleId = typeof runtime.moduleId === 'string' ? runtime.moduleId : moduleId\n const dispatchInstanceId = typeof runtime.instanceId === 'string' ? runtime.instanceId : 'unknown'\n\n const evt: ProcessEvent = {\n type: 'process:dispatch',\n identity,\n trigger,\n dispatch: {\n moduleId: dispatchModuleId,\n instanceId: dispatchInstanceId,\n actionId,\n },\n severity: 'info',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n }\n\n yield* emit(evt)\n })\n\n const wrapped = {\n ...runtime,\n dispatch: (action: unknown) => runtime.dispatch(action).pipe(Effect.tap(() => recordDispatch(action))),\n dispatchLowPriority: (action: unknown) =>\n runtime.dispatchLowPriority(action).pipe(Effect.tap(() => recordDispatch(action))),\n dispatchBatch: (actions: ReadonlyArray<unknown>) =>\n runtime\n .dispatchBatch(actions)\n .pipe(Effect.tap(() => Effect.forEach(actions, recordDispatch, { discard: true }))),\n }\n\n nextEnv = Context.add(tag, wrapped)(nextEnv)\n }\n\n return nextEnv\n }\n\n const wrappedEnv = makeWrappedEnv()\n const providedProcess = Effect.provide(installation.process, wrappedEnv)\n\n const makeTriggerStream = (spec: NonPlatformTriggerSpec): Effect.Effect<Stream.Stream<ProcessTrigger>, Error> =>\n Effect.gen(function* () {\n if (spec.kind === 'timer') {\n const interval = Duration.decodeUnknown(spec.timerId)\n if (Option.isNone(interval)) {\n const err = new Error(`[ProcessRuntime] invalid timerId (expected DurationInput): ${spec.timerId}`)\n ;(err as any).code = 'process::invalid_timer_id'\n ;(err as any).hint =\n \"timerId must be a valid DurationInput string, e.g. '10 millis', '1 seconds', '5 minutes'.\"\n return yield* Effect.fail(err)\n }\n\n return Stream.tick(interval.value).pipe(\n Stream.map(\n () =>\n ({\n kind: 'timer',\n name: spec.name,\n timerId: spec.timerId,\n }) satisfies ProcessTrigger,\n ),\n )\n }\n\n if (spec.kind === 'moduleAction') {\n const tag = Context.Tag(`@logixjs/Module/${spec.moduleId}`)() as Context.Tag<any, any>\n const found = Context.getOption(baseEnv, tag)\n if (Option.isNone(found)) {\n return yield* Effect.fail(new Error(`Missing module runtime in scope: ${spec.moduleId}`))\n }\n\n const runtime = found.value as any\n\n // perf: when diagnostics=off, avoid subscribing to actionsWithMeta$ (published inside txns; more subscribers hurt hot paths).\n // diagnostics=light/full needs txnSeq/txnId anchors, so only use actionsWithMeta$ when chain events are enabled.\n if (!shouldRecordChainEvents) {\n const stream = runtime.actions$ as Stream.Stream<any> | undefined\n if (!stream) {\n const err = new Error('ModuleRuntime does not provide actions$ (required for moduleAction trigger).')\n ;(err as any).code = 'process::missing_action_stream'\n ;(err as any).hint = `moduleId=${spec.moduleId}`\n return yield* Effect.fail(err)\n }\n\n return stream.pipe(\n Stream.filter((action: any) => actionIdFromUnknown(action) === spec.actionId),\n Stream.map(\n () =>\n ({\n kind: 'moduleAction',\n name: spec.name,\n moduleId: spec.moduleId,\n instanceId: runtime.instanceId as string,\n actionId: spec.actionId,\n txnSeq: 1,\n }) satisfies ProcessTrigger,\n ),\n )\n }\n\n const stream = runtime.actionsWithMeta$ as Stream.Stream<any> | undefined\n if (!stream) {\n const err = new Error(\n 'ModuleRuntime does not provide actionsWithMeta$ (required for moduleAction trigger).',\n )\n ;(err as any).code = 'process::missing_action_meta_stream'\n ;(err as any).hint = `moduleId=${spec.moduleId}`\n return yield* Effect.fail(err)\n }\n\n return stream.pipe(\n Stream.filter((evt: any) => actionIdFromUnknown(evt.value) === spec.actionId),\n Stream.map((evt: any) => {\n const txnSeq = evt?.meta?.txnSeq\n return {\n kind: 'moduleAction',\n name: spec.name,\n moduleId: spec.moduleId,\n instanceId: runtime.instanceId as string,\n actionId: spec.actionId,\n txnSeq: typeof txnSeq === 'number' ? txnSeq : 1,\n } satisfies ProcessTrigger\n }),\n )\n }\n\n // moduleStateChange\n const tag = Context.Tag(`@logixjs/Module/${spec.moduleId}`)() as Context.Tag<any, any>\n const found = Context.getOption(baseEnv, tag)\n if (Option.isNone(found)) {\n return yield* Effect.fail(new Error(`Missing module runtime in scope: ${spec.moduleId}`))\n }\n\n const runtime = found.value as any\n const schemaAst = resolveRuntimeStateSchemaAst(runtime)\n const selectorResult = makeSchemaSelector(spec.path, schemaAst)\n if (!selectorResult.ok) {\n return yield* Effect.fail(withModuleHint(selectorResult.error, spec.moduleId))\n }\n const selectorBase = selectorResult.selector\n const prevRef = yield* Ref.make<Option.Option<unknown>>(Option.none())\n\n const enableSelectorDiagnostics = shouldRecordChainEvents\n\n const selectorDiagnosticsRef = enableSelectorDiagnostics\n ? yield* Ref.make({\n windowStartedMs: Date.now(),\n triggersInWindow: 0,\n lastWarningAtMs: 0,\n })\n : undefined\n\n const sampleEveryMask = 0x7f // sample every 128 calls\n const slowSampleThresholdMs = 4\n const triggerWindowMs = 1000\n const triggerWarningThreshold = isDevEnv() ? 20 : 200\n const warningCooldownMs = 30_000\n\n let selectorCalls = 0\n let selectorSamples = 0\n let selectorSlowSamples = 0\n let selectorMaxSampleMs = 0\n\n const nowMs = (): number => {\n if (typeof performance !== 'undefined' && typeof performance.now === 'function') {\n return performance.now()\n }\n return Date.now()\n }\n\n const selector = enableSelectorDiagnostics\n ? (state: unknown): unknown => {\n selectorCalls += 1\n if ((selectorCalls & sampleEveryMask) !== 0) {\n return selectorBase(state)\n }\n\n const t0 = nowMs()\n const value = selectorBase(state)\n const dt = nowMs() - t0\n\n selectorSamples += 1\n if (dt >= slowSampleThresholdMs) {\n selectorSlowSamples += 1\n }\n if (dt > selectorMaxSampleMs) {\n selectorMaxSampleMs = dt\n }\n\n return value\n }\n : selectorBase\n\n const maybeWarnSelector = (trigger: ProcessTrigger): Effect.Effect<void> => {\n if (!selectorDiagnosticsRef) {\n return Effect.void\n }\n\n return Effect.gen(function* () {\n const now = Date.now()\n\n const decision = yield* Ref.modify(selectorDiagnosticsRef, (s) => {\n const windowExpired = now - s.windowStartedMs >= triggerWindowMs\n const windowStartedMs = windowExpired ? now : s.windowStartedMs\n const triggersInWindow = windowExpired ? 1 : s.triggersInWindow + 1\n\n const shouldCooldown = now - s.lastWarningAtMs < warningCooldownMs\n const tooFrequent = triggersInWindow >= triggerWarningThreshold\n const tooSlow = selectorMaxSampleMs >= slowSampleThresholdMs && selectorSamples > 0\n const shouldWarn = !shouldCooldown && (tooFrequent || tooSlow)\n\n const next = shouldWarn\n ? {\n windowStartedMs: now,\n triggersInWindow: 0,\n lastWarningAtMs: now,\n }\n : {\n ...s,\n windowStartedMs,\n triggersInWindow,\n }\n\n return [\n {\n shouldWarn,\n tooFrequent,\n tooSlow,\n triggersInWindow,\n },\n next,\n ] as const\n })\n\n if (!decision.shouldWarn) {\n return\n }\n\n const code = decision.tooFrequent ? 'process::selector_high_frequency' : 'process::selector_slow'\n\n const hint = [\n `moduleId=${spec.moduleId}`,\n `path=${spec.path}`,\n `windowMs=${triggerWindowMs}`,\n `triggersInWindow=${decision.triggersInWindow}`,\n `threshold=${triggerWarningThreshold}`,\n `cooldownMs=${warningCooldownMs}`,\n '',\n 'selector sampling:',\n `calls=${selectorCalls}`,\n `sampled=${selectorSamples}`,\n `slowSamples(>=${slowSampleThresholdMs}ms)=${selectorSlowSamples}`,\n `maxSampleMs=${selectorMaxSampleMs.toFixed(2)}`,\n '',\n 'notes:',\n '- Ensure the selected value is stable (prefer primitive/tuple; avoid returning fresh objects).',\n '- Narrow the path to reduce change frequency; avoid selecting large objects.',\n ].join('\\n')\n\n selectorSamples = 0\n selectorSlowSamples = 0\n selectorMaxSampleMs = 0\n\n yield* emit({\n type: 'process:trigger',\n identity,\n trigger,\n severity: 'warning',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error: {\n message: 'moduleStateChange selector diagnostics warning',\n code,\n hint,\n },\n } satisfies ProcessEvent)\n })\n }\n\n const baseStream = (runtime.changesWithMeta(selector) as Stream.Stream<any>).pipe(\n Stream.mapEffect((evt: any) =>\n Ref.get(prevRef).pipe(\n Effect.flatMap((prev) => {\n if (Option.isSome(prev) && Object.is(prev.value, evt.value)) {\n return Effect.succeed(Option.none())\n }\n return Ref.set(prevRef, Option.some(evt.value)).pipe(Effect.as(Option.some(evt)))\n }),\n ),\n ),\n Stream.filterMap((opt) => opt),\n Stream.map((evt: any) => {\n const txnSeq = evt?.meta?.txnSeq\n return {\n kind: 'moduleStateChange',\n name: spec.name,\n moduleId: spec.moduleId,\n instanceId: runtime.instanceId as string,\n path: spec.path,\n txnSeq: typeof txnSeq === 'number' ? txnSeq : 1,\n } satisfies ProcessTrigger\n }),\n )\n\n return enableSelectorDiagnostics ? baseStream.pipe(Stream.tap(maybeWarnSelector)) : baseStream\n })\n\n const makeRun = (trigger: ProcessTrigger, fatal: Deferred.Deferred<Cause.Cause<any>>): Effect.Effect<void> =>\n Effect.locally(\n currentProcessTrigger,\n trigger,\n )(\n providedProcess.pipe(\n Effect.catchAllCause((cause) => {\n if (Cause.isInterruptedOnly(cause)) {\n return Effect.void\n }\n return Deferred.succeed(fatal, cause).pipe(\n Effect.asVoid,\n Effect.catchAll(() => Effect.void),\n )\n }),\n ),\n )\n\n const makeChainRun = (\n trigger: ProcessTrigger,\n fatal: Deferred.Deferred<Cause.Cause<any>>,\n ): Effect.Effect<void> => {\n if (!shouldRecordChainEvents) {\n return makeRun(trigger, fatal)\n }\n\n return Effect.gen(function* () {\n const budgetRef = yield* Ref.make(ProcessEvents.makeProcessRunEventBudgetState())\n return yield* Effect.locally(\n currentProcessEventBudget,\n budgetRef,\n )(emitTriggerEvent(trigger, 'info').pipe(Effect.zipRight(makeRun(trigger, fatal))))\n })\n }\n\n const assignTriggerSeq = (trigger: ProcessTrigger): ProcessTrigger => {\n if (!shouldRecordChainEvents) {\n return trigger\n }\n\n return {\n ...trigger,\n triggerSeq: instanceState.nextTriggerSeq++,\n }\n }\n\n const emitTriggerEvent = (trigger: ProcessTrigger, severity: ProcessEvent['severity']): Effect.Effect<void> => {\n if (!shouldRecordChainEvents) {\n return Effect.void\n }\n\n const evt: ProcessEvent = {\n type: 'process:trigger',\n identity,\n trigger,\n severity,\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n }\n return emit(evt)\n }\n\n const policy = installation.definition.concurrency\n const autoStart = installation.definition.triggers.some(\n (t) => t.kind === 'platformEvent' && t.platformEvent === RUNTIME_BOOT_EVENT,\n )\n const bootTriggerSpec = installation.definition.triggers.find(\n (t): t is Extract<ProcessTriggerSpec, { readonly kind: 'platformEvent' }> =>\n t.kind === 'platformEvent' && t.platformEvent === RUNTIME_BOOT_EVENT,\n )\n\n const instanceProgram = Effect.gen(function* () {\n const fatal = yield* Deferred.make<Cause.Cause<any>>()\n\n const platformEventStream: Stream.Stream<ProcessTrigger> = Stream.fromQueue(\n instanceState.platformTriggersQueue,\n )\n\n const nonPlatformTriggers = installation.definition.triggers.filter(\n (t): t is NonPlatformTriggerSpec => t.kind !== 'platformEvent',\n )\n\n const streams = yield* Effect.forEach(nonPlatformTriggers, makeTriggerStream)\n\n const triggerStream = Stream.mergeAll([platformEventStream, ...streams], {\n concurrency: 'unbounded',\n })\n\n const reportQueueOverflow = (\n info: ProcessConcurrency.ProcessTriggerQueueOverflowInfo,\n ): Effect.Effect<void> => {\n const err = new Error('Process trigger queue overflow (serial maxQueue guard).')\n ;(err as any).code = 'process::serial_queue_overflow'\n ;(err as any).hint = [\n `mode=${info.mode}`,\n `queue: current=${info.currentLength} peak=${info.peak}`,\n `maxQueue: configured=${info.limit.configured} guard=${info.limit.guard}`,\n `policy: ${JSON.stringify(info.policy)}`,\n '',\n 'fix:',\n '- Configure concurrency.maxQueue (serial) to a finite value, or switch to mode=latest/drop to avoid unbounded backlog.',\n ].join('\\n')\n return Deferred.succeed(fatal, Cause.fail(err)).pipe(\n Effect.asVoid,\n Effect.catchAll(() => Effect.void),\n )\n }\n\n const runnerFiber = yield* Effect.forkScoped(\n ProcessConcurrency.runProcessTriggerStream({\n stream: triggerStream,\n policy,\n assignTriggerSeq,\n run: (trigger) => makeChainRun(trigger, fatal),\n onDrop: (trigger) => emitTriggerEvent(trigger, 'warning'),\n onQueueOverflow: reportQueueOverflow,\n }),\n )\n\n if (autoStart) {\n yield* Queue.offer(instanceState.platformTriggersQueue, {\n kind: 'platformEvent',\n name: bootTriggerSpec?.name,\n platformEvent: RUNTIME_BOOT_EVENT,\n })\n }\n\n const cause = yield* Deferred.await(fatal)\n yield* Fiber.interrupt(runnerFiber)\n return yield* Effect.failCause(cause)\n })\n\n const fiber = yield* Effect.forkIn(installation.forkScope)(\n Effect.scoped(instanceProgram).pipe(\n Effect.catchAllCause((cause) =>\n Effect.gen(function* () {\n // Interruptions (typically from scope dispose / manual stop) should not be treated as process failures.\n // Otherwise we emit process:error/diagnostic during scope shutdown and may deadlock disposal.\n if (Cause.isInterruptedOnly(cause)) {\n // If stopInstance already advanced the status to stopping, stopInstance owns the stop event and final state.\n if (instanceState.status.status === 'stopping') {\n return\n }\n\n // Otherwise treat as a natural stop due to scope disposal (e.g. moduleInstance scope closing).\n instanceState.status = {\n ...instanceState.status,\n status: 'stopped',\n stoppedReason: 'scopeDisposed',\n }\n instanceState.fiber = undefined\n\n yield* Effect.uninterruptible(\n emit({\n type: 'process:stop',\n identity,\n severity: 'info',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n }),\n )\n\n const installation = installations.get(installationKey)\n if (installation?.pendingStart) {\n installation.pendingStart = undefined\n yield* startInstallation(installationKey)\n }\n return\n }\n\n const primary = Option.getOrElse(Cause.failureOption(cause), () =>\n Option.getOrElse(Cause.dieOption(cause), () => cause),\n )\n const summary = toSerializableErrorSummary(primary)\n const error: SerializableErrorSummary = summary.errorSummary as any\n\n instanceState.status = {\n ...instanceState.status,\n status: 'failed',\n stoppedReason: 'failed',\n lastError: error,\n }\n\n yield* emit({\n type: 'process:error',\n identity,\n severity: 'error',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error,\n })\n\n const decision = Supervision.onFailure(\n installation.definition.errorPolicy,\n installation.supervision,\n Date.now(),\n )\n installation.supervision = decision.nextState\n\n if (decision.decision === 'restart') {\n // supervise: controlled restart (runSeq increments) and emit a restart event.\n yield* emit({\n type: 'process:restart',\n identity,\n severity: 'warning',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error,\n })\n yield* startInstallation(installationKey)\n } else {\n yield* emitErrorDiagnostic(\n installation.identity.scope,\n installation.identity.processId,\n 'process::failed_stop',\n 'Process failed and stopped (failStop / restart limit reached).',\n `processId=${installation.identity.processId} scopeKey=${installation.scopeKey} failures=${decision.withinWindowFailures} maxRestarts=${decision.maxRestarts}`,\n )\n }\n }),\n ),\n ),\n )\n\n instanceState.fiber = fiber as Fiber.RuntimeFiber<unknown, unknown>\n instanceState.status = {\n ...instanceState.status,\n status: 'running',\n }\n\n // Best-effort: ensure the instance fiber starts subscribing to trigger streams before install/start returns,\n // avoiding lost moduleAction/moduleStateChange triggers right after env is built and dispatch happens.\n yield* Effect.yieldNow()\n })\n\n const install = <E, R>(\n process: Effect.Effect<void, E, R>,\n options: {\n readonly scope: ProcessScope\n readonly enabled?: boolean\n readonly installedAt?: string\n readonly mode?: ProcessInstallMode\n },\n ): Effect.Effect<ProcessInstallation | undefined, never, R> =>\n Effect.gen(function* () {\n const meta = Meta.getMeta(process)\n if (!meta) {\n return undefined\n }\n\n const env = yield* Effect.context<R>()\n const forkScopeOpt = yield* Effect.serviceOption(Scope.Scope)\n const forkScope = Option.isSome(forkScopeOpt) ? forkScopeOpt.value : runtimeScope\n\n const scopeKey = Identity.scopeKeyFromScope(options.scope)\n const identity = {\n processId: meta.definition.processId,\n scope: options.scope,\n } as const\n\n const installationKey = Identity.installationKeyFromIdentity(identity)\n const existing = installations.get(installationKey)\n if (existing) {\n existing.env = env as Context.Context<any>\n existing.forkScope = forkScope\n existing.enabled = options.enabled ?? true\n existing.installedAt = options.installedAt ?? existing.installedAt\n if (!existing.enabled) {\n existing.pendingStart = undefined\n return {\n identity,\n enabled: existing.enabled,\n installedAt: existing.installedAt,\n } satisfies ProcessInstallation\n }\n\n const currentId = existing.currentInstanceId\n const current = currentId ? instances.get(currentId) : undefined\n const status = current?.status.status\n\n if (status === 'running' || status === 'starting') {\n const mode: ProcessInstallMode = options.mode ?? 'switch'\n if (mode === 'switch' && current && current.forkScope !== forkScope) {\n existing.pendingStart = { forkScope }\n yield* Scope.addFinalizer(\n forkScope,\n Effect.sync(() => {\n const installation = installations.get(installationKey)\n if (!installation) return\n if (installation.pendingStart?.forkScope === forkScope) {\n installation.pendingStart = undefined\n }\n }),\n )\n } else {\n existing.pendingStart = undefined\n }\n return {\n identity,\n enabled: existing.enabled,\n installedAt: existing.installedAt,\n } satisfies ProcessInstallation\n }\n\n if (status === 'stopping') {\n const mode: ProcessInstallMode = options.mode ?? 'switch'\n if (mode === 'switch') {\n existing.pendingStart = { forkScope }\n yield* Scope.addFinalizer(\n forkScope,\n Effect.sync(() => {\n const installation = installations.get(installationKey)\n if (!installation) return\n if (installation.pendingStart?.forkScope === forkScope) {\n installation.pendingStart = undefined\n }\n }),\n )\n } else {\n existing.pendingStart = undefined\n }\n return {\n identity,\n enabled: existing.enabled,\n installedAt: existing.installedAt,\n } satisfies ProcessInstallation\n }\n\n existing.pendingStart = undefined\n yield* startInstallation(installationKey)\n return {\n identity,\n enabled: existing.enabled,\n installedAt: existing.installedAt,\n } satisfies ProcessInstallation\n }\n\n // Derive an effect for this installation to avoid overwriting meta on the original Effect (reused across scopes).\n // Note: do not provide env eagerly; we may need to layer additional context per-trigger execution (e.g. dispatch chain diagnostics).\n const derived = Effect.suspend(() => process)\n Meta.attachMeta(derived, {\n ...meta,\n installationScope: options.scope,\n })\n\n const installation: InstallationState = {\n identity,\n scopeKey,\n definition: meta.definition,\n env: env as Context.Context<any>,\n forkScope,\n process: derived as unknown as Effect.Effect<void, any, unknown>,\n kind: meta.kind ?? 'process',\n enabled: options.enabled ?? true,\n installedAt: options.installedAt,\n nextRunSeq: 1,\n supervision: Supervision.initialState(),\n pendingStart: undefined,\n }\n\n installations.set(installationKey, installation)\n\n if (installation.enabled) {\n yield* startInstallation(installationKey)\n }\n\n return {\n identity,\n enabled: installation.enabled,\n installedAt: installation.installedAt,\n } satisfies ProcessInstallation\n })\n\n const listInstallations: ProcessRuntime['listInstallations'] = (filter) =>\n Effect.sync(() => {\n const scopeType = filter?.scopeType\n const scopeKey = filter?.scopeKey\n const out: ProcessInstallation[] = []\n for (const installation of installations.values()) {\n if (scopeType && installation.identity.scope.type !== scopeType) continue\n if (scopeKey && installation.scopeKey !== scopeKey) continue\n out.push({\n identity: installation.identity,\n enabled: installation.enabled,\n installedAt: installation.installedAt,\n })\n }\n return out\n })\n\n const getInstanceStatus: ProcessRuntime['getInstanceStatus'] = (processInstanceId) =>\n Effect.sync(() => instances.get(processInstanceId)?.status)\n\n const controlInstance: ProcessRuntime['controlInstance'] = (processInstanceId, request) =>\n Effect.suspend(() => {\n const instance = instances.get(processInstanceId)\n if (!instance) {\n return Effect.void\n }\n\n return shouldNoopDueToSyncTxn(instance.scope, 'process_control_in_transaction').pipe(\n Effect.flatMap((noop) => {\n if (noop) {\n return Effect.void\n }\n\n if (request.action === 'stop') {\n return stopInstance(instance, 'manualStop')\n }\n\n if (request.action === 'restart') {\n return stopInstance(instance, 'manualStop').pipe(\n Effect.flatMap(() => {\n const installation = installations.get(instance.installationKey)\n if (!installation) {\n return Effect.void\n }\n installation.currentInstanceId = undefined\n return startInstallation(instance.installationKey)\n }),\n )\n }\n\n // start: only applies to stopped instances; reuses current runSeq without incrementing.\n if (request.action === 'start') {\n if (instance.status.status === 'running' || instance.status.status === 'starting') {\n return Effect.void\n }\n\n const installation = installations.get(instance.installationKey)\n if (!installation) {\n return Effect.void\n }\n installation.currentInstanceId = undefined\n return startInstallation(instance.installationKey)\n }\n\n return Effect.void\n }),\n )\n })\n\n const deliverPlatformEvent: ProcessRuntime['deliverPlatformEvent'] = (event) =>\n Effect.gen(function* () {\n const noop = yield* TaskRunner.shouldNoopInSyncTransactionFiber({\n code: 'process::invalid_usage',\n severity: 'error',\n message:\n 'ProcessRuntime platform events are not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint: 'Deliver platformEvent outside the transaction window.',\n kind: 'process_platform_event_in_transaction',\n })\n if (noop) return\n\n const targets = Array.from(instances.values())\n const eventName = event.eventName\n\n yield* Effect.forEach(\n targets,\n (instance) =>\n Effect.suspend(() => {\n if (instance.status.status !== 'starting' && instance.status.status !== 'running') {\n return Effect.void\n }\n\n const installation = installations.get(instance.installationKey)\n if (!installation) {\n return Effect.void\n }\n\n const specs = installation.definition.triggers.filter(\n (t): t is Extract<ProcessTriggerSpec, { readonly kind: 'platformEvent' }> =>\n t.kind === 'platformEvent' && t.platformEvent === eventName,\n )\n if (specs.length === 0) {\n return Effect.void\n }\n\n return Effect.forEach(\n specs,\n (spec) =>\n Queue.offer(instance.platformTriggersQueue, {\n kind: 'platformEvent',\n name: spec.name,\n platformEvent: spec.platformEvent,\n } satisfies ProcessTrigger),\n { discard: true },\n )\n }),\n { discard: true },\n )\n })\n\n const eventsStream: ProcessRuntime['events'] = Stream.fromPubSub(eventsHub)\n\n const getEventsSnapshot: ProcessRuntime['getEventsSnapshot'] = () => Effect.sync(() => eventsBuffer.slice())\n\n yield* Effect.addFinalizer(() =>\n Effect.gen(function* () {\n for (const installation of installations.values()) {\n installation.pendingStart = undefined\n }\n for (const instance of instances.values()) {\n if (instance.fiber) {\n yield* stopInstance(instance, 'scopeDisposed')\n }\n }\n }).pipe(\n Effect.catchAllCause((cause) =>\n Effect.sync(() => {\n // Finalizers must not throw; best-effort logging only.\n if (isDevEnv()) {\n // eslint-disable-next-line no-console\n console.warn('[ProcessRuntime] finalizer failed', Cause.pretty(cause))\n }\n }),\n ),\n ),\n )\n\n return {\n install,\n listInstallations,\n getInstanceStatus,\n controlInstance,\n deliverPlatformEvent,\n events: eventsStream,\n getEventsSnapshot,\n } satisfies ProcessRuntime\n })\n\nexport const layer = (options?: { readonly maxEventHistory?: number }): Layer.Layer<ProcessRuntimeTag, never, never> =>\n Layer.scoped(ProcessRuntimeTag, make(options))\n","import { Effect, Fiber, Option, Ref, Scope, Stream } from 'effect'\nimport type { TaskRunnerMode } from '../TaskRunner.js'\nimport type { ProcessConcurrencyPolicy, ProcessTrigger } from './protocol.js'\n\nexport const DEFAULT_SERIAL_QUEUE_GUARD_LIMIT = 4096\nexport const DEFAULT_PARALLEL_LIMIT = 16\n\nexport const toTaskRunnerMode = (policy: ProcessConcurrencyPolicy): TaskRunnerMode => {\n switch (policy.mode) {\n case 'latest':\n return 'latest'\n case 'serial':\n return 'task'\n case 'drop':\n return 'exhaust'\n case 'parallel':\n return 'parallel'\n }\n}\n\nexport type ResolvedQueueLimit = {\n /** User-configured limit; treated as unlimited when omitted (still bounded by the guard). */\n readonly configured: number | 'unbounded'\n /** Runtime-enforced guard limit (prevents unbounded memory growth). */\n readonly guard: number\n}\n\nexport const resolveQueueLimit = (\n maxQueue: unknown,\n options?: {\n readonly defaultGuard?: number\n },\n): ResolvedQueueLimit => {\n const defaultGuard = options?.defaultGuard ?? DEFAULT_SERIAL_QUEUE_GUARD_LIMIT\n\n const configured =\n typeof maxQueue === 'number' && Number.isFinite(maxQueue) && maxQueue >= 0 ? Math.floor(maxQueue) : 'unbounded'\n\n return {\n configured,\n guard: configured === 'unbounded' ? defaultGuard : configured,\n }\n}\n\nexport type ProcessTriggerQueueOverflowInfo = {\n readonly mode: 'serial' | 'parallel'\n readonly currentLength: number\n readonly peak: number\n readonly limit: ResolvedQueueLimit\n readonly policy: ProcessConcurrencyPolicy\n}\n\nexport const runProcessTriggerStream = (args: {\n readonly stream: Stream.Stream<ProcessTrigger>\n readonly policy: ProcessConcurrencyPolicy\n readonly assignTriggerSeq: (trigger: ProcessTrigger) => ProcessTrigger\n /** run a trigger to completion (the caller decides what a \\\"run\\\" means). */\n readonly run: (trigger: ProcessTrigger) => Effect.Effect<void, never, Scope.Scope>\n /** invoked when a trigger is dropped (only for mode=drop). */\n readonly onDrop: (trigger: ProcessTrigger) => Effect.Effect<void>\n /** invoked when internal queue guard is exceeded (fail-stop by default). */\n readonly onQueueOverflow: (info: ProcessTriggerQueueOverflowInfo) => Effect.Effect<void>\n readonly defaultParallelLimit?: number\n readonly defaultQueueGuard?: number\n}): Effect.Effect<void, never, Scope.Scope> =>\n Effect.gen(function* () {\n const policy = args.policy\n const defaultQueueGuard = args.defaultQueueGuard ?? DEFAULT_SERIAL_QUEUE_GUARD_LIMIT\n\n if (policy.mode === 'latest') {\n const stateRef = yield* Ref.make<{\n readonly fiber?: Fiber.RuntimeFiber<void, never>\n readonly runningId: number\n readonly nextId: number\n }>({ fiber: undefined, runningId: 0, nextId: 0 })\n\n const onTrigger = (trigger0: ProcessTrigger): Effect.Effect<void, never, Scope.Scope> =>\n Effect.gen(function* () {\n const trigger = args.assignTriggerSeq(trigger0)\n\n const [prevFiber, prevRunningId, runId] = yield* Ref.modify(stateRef, (s) => {\n const nextId = s.nextId + 1\n return [[s.fiber, s.runningId, nextId] as const, { ...s, nextId, runningId: nextId }] as const\n })\n\n if (prevFiber && prevRunningId !== 0) {\n const done = yield* Fiber.poll(prevFiber)\n if (Option.isNone(done)) {\n yield* Fiber.interruptFork(prevFiber)\n }\n }\n\n const fiber = yield* Effect.forkScoped(\n args\n .run(trigger)\n .pipe(Effect.ensuring(Ref.update(stateRef, (s) => (s.runningId === runId ? { ...s, runningId: 0 } : s)))),\n )\n\n yield* Ref.update(stateRef, (s) => ({ ...s, fiber }))\n })\n\n return yield* Stream.runForEach(args.stream, onTrigger)\n }\n\n const busyRef = yield* Ref.make(false)\n const serialStateRef = yield* Ref.make({\n running: false,\n queue: [] as ProcessTrigger[],\n peak: 0,\n })\n const parallelStateRef = yield* Ref.make({\n active: 0,\n queue: [] as ProcessTrigger[],\n peak: 0,\n })\n\n const serialQueueLimit = resolveQueueLimit(policy.maxQueue, { defaultGuard: defaultQueueGuard })\n const parallelQueueLimit = resolveQueueLimit(undefined, { defaultGuard: defaultQueueGuard })\n const parallelLimit =\n typeof policy.maxParallel === 'number' && Number.isFinite(policy.maxParallel) && policy.maxParallel >= 1\n ? Math.floor(policy.maxParallel)\n : (args.defaultParallelLimit ?? DEFAULT_PARALLEL_LIMIT)\n\n const drainSerial = (): Effect.Effect<void, never, Scope.Scope> =>\n Effect.suspend(() =>\n Ref.modify(serialStateRef, (state) => {\n if (state.running || state.queue.length === 0) {\n return [Option.none(), state] as const\n }\n const [next, ...rest] = state.queue\n return [Option.some(next), { ...state, running: true, queue: rest }] as const\n }).pipe(\n Effect.flatMap((next) =>\n Option.match(next, {\n onNone: () => Effect.void,\n onSome: (trigger) =>\n Effect.forkScoped(\n args\n .run(trigger)\n .pipe(\n Effect.ensuring(Ref.update(serialStateRef, (s) => ({ ...s, running: false }))),\n Effect.zipRight(drainSerial()),\n ),\n ).pipe(Effect.asVoid),\n }),\n ),\n ),\n )\n\n const drainParallel = (): Effect.Effect<void, never, Scope.Scope> =>\n Effect.suspend(() =>\n Ref.modify(parallelStateRef, (state) => {\n if (state.active >= parallelLimit || state.queue.length === 0) {\n return [Option.none(), state] as const\n }\n const [next, ...rest] = state.queue\n return [Option.some(next), { ...state, active: state.active + 1, queue: rest }] as const\n }).pipe(\n Effect.flatMap((next) =>\n Option.match(next, {\n onNone: () => Effect.void,\n onSome: (trigger) =>\n Effect.forkScoped(\n args.run(trigger).pipe(\n Effect.ensuring(\n Ref.update(parallelStateRef, (s) => ({\n ...s,\n active: Math.max(0, s.active - 1),\n })),\n ),\n Effect.zipRight(drainParallel()),\n ),\n ).pipe(Effect.asVoid, Effect.zipRight(drainParallel())),\n }),\n ),\n ),\n )\n\n const onTrigger = (trigger0: ProcessTrigger): Effect.Effect<void, never, Scope.Scope> =>\n Effect.gen(function* () {\n const trigger = args.assignTriggerSeq(trigger0)\n\n if (policy.mode === 'drop') {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? ([false, busy] as const) : ([true, true] as const),\n )\n if (!acquired) {\n yield* args.onDrop(trigger)\n return\n }\n\n yield* Effect.forkScoped(args.run(trigger).pipe(Effect.ensuring(Ref.set(busyRef, false))))\n return\n }\n\n if (policy.mode === 'parallel') {\n const nextSize = yield* Ref.modify(parallelStateRef, (state) => {\n const queue = [...state.queue, trigger]\n return [queue.length, { ...state, queue, peak: Math.max(state.peak, queue.length) }] as const\n })\n\n if (nextSize > parallelQueueLimit.guard) {\n const state = yield* Ref.get(parallelStateRef)\n yield* args.onQueueOverflow({\n mode: 'parallel',\n currentLength: nextSize,\n peak: state.peak,\n limit: parallelQueueLimit,\n policy,\n })\n return\n }\n\n yield* drainParallel()\n return\n }\n\n // serial\n const nextSize = yield* Ref.modify(serialStateRef, (state) => {\n const queue = [...state.queue, trigger]\n return [queue.length, { ...state, queue, peak: Math.max(state.peak, queue.length) }] as const\n })\n\n if (nextSize > serialQueueLimit.guard) {\n const state = yield* Ref.get(serialStateRef)\n yield* args.onQueueOverflow({\n mode: 'serial',\n currentLength: nextSize,\n peak: state.peak,\n limit: serialQueueLimit,\n policy,\n })\n return\n }\n\n yield* drainSerial()\n })\n\n return yield* Stream.runForEach(args.stream, onTrigger)\n })\n","import type { ProcessEvent } from './protocol.js'\n\nexport const PROCESS_EVENT_MAX_BYTES = 4 * 1024\nexport const PROCESS_EVENT_MAX_EVENTS_PER_RUN = 50\nexport const PROCESS_EVENT_RESERVED_EVENTS_FOR_SUMMARY = 1\n\nexport type ProcessRunEventBudgetState = {\n readonly maxEvents: number\n readonly maxBytes: number\n readonly emitted: number\n readonly dropped: number\n readonly downgraded: number\n readonly summaryEmitted: boolean\n}\n\nexport const makeProcessRunEventBudgetState = (options?: {\n readonly maxEvents?: number\n readonly maxBytes?: number\n}): ProcessRunEventBudgetState => ({\n maxEvents:\n typeof options?.maxEvents === 'number' && Number.isFinite(options.maxEvents) && options.maxEvents >= 0\n ? Math.floor(options.maxEvents)\n : PROCESS_EVENT_MAX_EVENTS_PER_RUN,\n maxBytes:\n typeof options?.maxBytes === 'number' && Number.isFinite(options.maxBytes) && options.maxBytes >= 0\n ? Math.floor(options.maxBytes)\n : PROCESS_EVENT_MAX_BYTES,\n emitted: 0,\n dropped: 0,\n downgraded: 0,\n summaryEmitted: false,\n})\n\nexport type ProcessRunEventBudgetDecision =\n | {\n readonly _tag: 'emit'\n readonly event: ProcessEvent\n }\n | {\n readonly _tag: 'emitSummary'\n readonly event: ProcessEvent\n }\n | {\n readonly _tag: 'drop'\n }\n\nconst makeBudgetSummaryEvent = (args: {\n readonly sourceEvent: ProcessEvent\n readonly maxEvents: number\n readonly maxBytes: number\n readonly emitted: number\n readonly dropped: number\n readonly downgraded: number\n}): ProcessEvent => ({\n type: 'process:trigger',\n identity: args.sourceEvent.identity,\n trigger: args.sourceEvent.trigger,\n severity: 'warning',\n eventSeq: args.sourceEvent.eventSeq,\n timestampMs: args.sourceEvent.timestampMs,\n error: {\n message: 'Process run event budget exceeded; further trigger/dispatch events are suppressed.',\n code: 'process::event_budget_exceeded',\n hint: `maxEvents=${args.maxEvents} maxBytes=${args.maxBytes} emitted=${args.emitted} dropped=${args.dropped} downgraded=${args.downgraded}`,\n },\n})\n\nexport const applyProcessRunEventBudget = (\n state: ProcessRunEventBudgetState,\n event: ProcessEvent,\n): readonly [ProcessRunEventBudgetDecision, ProcessRunEventBudgetState] => {\n const maxEvents = Math.max(0, state.maxEvents)\n const maxBytes = Math.max(0, state.maxBytes)\n\n if (state.summaryEmitted) {\n return [\n { _tag: 'drop' },\n {\n ...state,\n dropped: state.dropped + 1,\n },\n ]\n }\n\n const reserve = PROCESS_EVENT_RESERVED_EVENTS_FOR_SUMMARY\n const allowedRegular = Math.max(0, maxEvents - reserve)\n\n if (state.emitted < allowedRegular) {\n const enforced = enforceProcessEventMaxBytes(event, { maxBytes })\n return [\n { _tag: 'emit', event: enforced.event },\n {\n ...state,\n emitted: state.emitted + 1,\n downgraded: state.downgraded + (enforced.downgraded ? 1 : 0),\n },\n ]\n }\n\n const dropped = state.dropped + 1\n const summary = makeBudgetSummaryEvent({\n sourceEvent: event,\n maxEvents,\n maxBytes,\n emitted: state.emitted,\n dropped,\n downgraded: state.downgraded,\n })\n const enforcedSummary = enforceProcessEventMaxBytes(summary, { maxBytes })\n\n return [\n { _tag: 'emitSummary', event: enforcedSummary.event },\n {\n ...state,\n emitted: Math.min(maxEvents, state.emitted + 1),\n dropped,\n downgraded: state.downgraded + (enforcedSummary.downgraded ? 1 : 0),\n summaryEmitted: true,\n },\n ]\n}\n\nexport const estimateEventBytes = (event: ProcessEvent): number => {\n const json = JSON.stringify(event)\n return typeof Buffer !== 'undefined' ? Buffer.byteLength(json, 'utf8') : new TextEncoder().encode(json).length\n}\n\nconst truncateChars = (value: string, maxLen: number): string =>\n value.length <= maxLen ? value : value.slice(0, maxLen)\n\nconst normalizeErrorSummary = (error: NonNullable<ProcessEvent['error']>): NonNullable<ProcessEvent['error']> => {\n const message = typeof error.message === 'string' && error.message.length > 0 ? error.message : 'Error'\n\n const hint = typeof error.hint === 'string' && error.hint.length > 0 ? truncateChars(error.hint, 1024) : undefined\n\n return {\n name: typeof error.name === 'string' && error.name.length > 0 ? error.name : undefined,\n message: truncateChars(message, 256),\n code: typeof error.code === 'string' && error.code.length > 0 ? error.code : undefined,\n hint,\n }\n}\n\nexport const enforceProcessEventMaxBytes = (\n event: ProcessEvent,\n options?: {\n readonly maxBytes?: number\n },\n): { readonly event: ProcessEvent; readonly downgraded: boolean } => {\n const maxBytes = options?.maxBytes ?? PROCESS_EVENT_MAX_BYTES\n\n let downgraded = false\n let next: ProcessEvent = event\n\n if (event.error) {\n const normalized = normalizeErrorSummary(event.error)\n if (\n normalized.message !== event.error.message ||\n normalized.hint !== event.error.hint ||\n normalized.code !== event.error.code ||\n normalized.name !== event.error.name\n ) {\n downgraded = true\n next = { ...event, error: normalized }\n }\n }\n\n if (estimateEventBytes(next) <= maxBytes) {\n return { event: next, downgraded }\n }\n\n // Further trimming is applied to error.hint only (common trigger: multi-line hints).\n if (!next.error?.hint) {\n // Nothing left to trim; best-effort return.\n return { event: next, downgraded: true }\n }\n\n const hint = next.error.hint\n const steps = [512, 256, 128, 64, 32, 0]\n\n for (const maxLen of steps) {\n const trimmed = maxLen === 0 ? undefined : truncateChars(hint, maxLen)\n const candidate: ProcessEvent = {\n ...next,\n error: {\n ...next.error,\n hint: trimmed,\n },\n }\n if (estimateEventBytes(candidate) <= maxBytes) {\n return { event: candidate, downgraded: true }\n }\n }\n\n // Fallback: remove hint and shorten message (process:error must still have a message).\n const fallback: ProcessEvent = next.error\n ? ({\n ...next,\n error: {\n ...next.error,\n message: truncateChars(next.error.message, 96),\n hint: undefined,\n },\n } satisfies ProcessEvent)\n : next\n\n return { event: fallback, downgraded: true }\n}\n","import type { Effect } from 'effect'\nimport type { ProcessDefinition, ProcessScope } from './protocol.js'\n\nexport type ProcessMeta = {\n readonly definition: ProcessDefinition\n /**\n * installationScope: part of Static IR.\n * - For instance-scope / subtree-scope scenarios, the runtime overwrites this field during installation with a derived effect.\n * - This field is for export/diagnostics only; it does not participate in stable identity derivation (see identity.ts).\n */\n readonly installationScope?: ProcessScope\n readonly kind?: 'process' | 'link'\n}\n\nexport const PROCESS_META = Symbol.for('@logixjs/core/processMeta')\n\nexport type ProcessEffect<E = never, R = never> = Effect.Effect<void, E, R> & {\n readonly [PROCESS_META]?: ProcessMeta\n}\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nexport const getMeta = (effect: Effect.Effect<void, any, any>): ProcessMeta | undefined =>\n (effect as ProcessEffect)[PROCESS_META]\n\nexport const getDefinition = (effect: Effect.Effect<void, any, any>): ProcessDefinition | undefined =>\n getMeta(effect)?.definition\n\nexport const attachMeta = <E, R>(effect: Effect.Effect<void, E, R>, meta: ProcessMeta): ProcessEffect<E, R> => {\n defineHidden(effect as any, PROCESS_META, meta)\n return effect as ProcessEffect<E, R>\n}\n","import * as SchemaAST from 'effect/SchemaAST'\n\nexport type DotPathSegment = string | number\n\ntype DotPathParseResult =\n | { readonly ok: true; readonly segments: ReadonlyArray<DotPathSegment> }\n | { readonly ok: false; readonly error: Error }\n\nexport type DotPathSelectorResult =\n | {\n readonly ok: true\n readonly selector: (state: unknown) => unknown\n readonly segments: ReadonlyArray<DotPathSegment>\n }\n | { readonly ok: false; readonly error: Error }\n\nconst isRecord = (value: unknown): value is Record<string, unknown> => typeof value === 'object' && value !== null\n\nexport const resolveSchemaAst = (schema: unknown): SchemaAST.AST | undefined => {\n if (!isRecord(schema)) return undefined\n const ast = (schema as any).ast as SchemaAST.AST | undefined\n if (!ast) return undefined\n if (typeof ast !== 'object' && typeof ast !== 'function') return undefined\n return ast\n}\n\nconst makeDotPathError = (path: string, message: string, hint?: string): Error => {\n const err = new Error(message)\n ;(err as any).code = 'process::invalid_dot_path'\n ;(err as any).hint =\n hint ??\n [\n \"Expected dot-path syntax: segments separated by '.', numeric segments represent array indices.\",\n `path: ${path}`,\n '',\n 'examples:',\n '- count',\n '- user.name',\n '- items.0.id',\n ].join('\\n')\n return err\n}\n\nconst makeSchemaMismatchError = (path: string): Error =>\n makeDotPathError(\n path,\n 'Invalid dot-path: path does not match the state schema.',\n [\n 'The module state schema does not contain the requested dot-path.',\n `path: ${path}`,\n '',\n 'fix:',\n '- Ensure the path exists in the state schema.',\n '- Use numeric segments for array indices (e.g. items.0.id).',\n ].join('\\n'),\n )\n\nconst parseDotPath = (path: string): DotPathParseResult => {\n if (typeof path !== 'string' || path.length === 0) {\n return { ok: false, error: makeDotPathError(String(path), 'dot-path must be a non-empty string') }\n }\n\n const raw = path.split('.')\n if (raw.length === 0) {\n return { ok: false, error: makeDotPathError(path, 'dot-path must contain at least one segment') }\n }\n\n const segments: DotPathSegment[] = []\n for (let i = 0; i < raw.length; i++) {\n const seg = raw[i]!\n if (seg.length === 0) {\n return {\n ok: false,\n error: makeDotPathError(\n path,\n `Invalid dot-path: empty segment at index ${i}.`,\n [\n \"Expected dot-path syntax: segments separated by '.', numeric segments represent array indices.\",\n `path: ${path}`,\n '',\n 'examples:',\n '- count',\n '- user.name',\n '- items.0.id',\n '',\n 'fix:',\n '- Remove consecutive dots or trailing dots.',\n ].join('\\n'),\n ),\n }\n }\n\n if (/^[0-9]+$/.test(seg)) {\n const n = Number(seg)\n if (!Number.isFinite(n) || n < 0) {\n return {\n ok: false,\n error: makeDotPathError(path, `Invalid array index segment \"${seg}\" at index ${i}.`),\n }\n }\n segments.push(Math.floor(n))\n continue\n }\n\n segments.push(seg)\n }\n\n return { ok: true, segments }\n}\n\nconst resolveAstForPath = (\n ast: SchemaAST.AST,\n segments: ReadonlyArray<DotPathSegment>,\n seen: Set<SchemaAST.AST>,\n): SchemaAST.AST | undefined => {\n if (segments.length === 0) return ast\n\n let current = ast\n while (true) {\n if (SchemaAST.isSuspend(current)) {\n if (seen.has(current)) return undefined\n seen.add(current)\n current = current.f()\n continue\n }\n if (SchemaAST.isRefinement(current)) {\n current = current.from\n continue\n }\n break\n }\n\n if (SchemaAST.isTransformation(current)) {\n const from = resolveAstForPath(current.from, segments, seen)\n if (from) return from\n return resolveAstForPath(current.to, segments, seen)\n }\n\n if (SchemaAST.isUnion(current)) {\n for (const node of current.types) {\n const resolved = resolveAstForPath(node, segments, seen)\n if (resolved) return resolved\n }\n return undefined\n }\n\n if (SchemaAST.isTupleType(current)) {\n const [head, ...tail] = segments\n if (typeof head !== 'number') return undefined\n const element =\n head < current.elements.length ? current.elements[head] : current.rest.length > 0 ? current.rest[0] : undefined\n return element ? resolveAstForPath(element.type, tail, seen) : undefined\n }\n\n if (SchemaAST.isTypeLiteral(current)) {\n const [head, ...tail] = segments\n if (head === undefined) return undefined\n\n if (typeof head === 'string') {\n for (const ps of current.propertySignatures) {\n if (String(ps.name) !== head) continue\n return resolveAstForPath(ps.type, tail, seen)\n }\n }\n\n for (const sig of current.indexSignatures) {\n let param: SchemaAST.AST = sig.parameter as unknown as SchemaAST.AST\n while (SchemaAST.isRefinement(param)) {\n param = param.from\n }\n const tag = (param as any)?._tag\n const acceptsString = tag === 'StringKeyword' || tag === 'TemplateLiteral'\n const acceptsNumber = tag === 'NumberKeyword'\n if (typeof head === 'string' && acceptsString) {\n return resolveAstForPath(sig.type, tail, seen)\n }\n if (typeof head === 'number' && acceptsNumber) {\n return resolveAstForPath(sig.type, tail, seen)\n }\n }\n }\n\n const tag = (current as any)?._tag\n if (tag === 'AnyKeyword' || tag === 'UnknownKeyword' || tag === 'ObjectKeyword' || tag === 'Declaration') {\n return current\n }\n\n return undefined\n}\n\nconst selectBySegments =\n (segments: ReadonlyArray<DotPathSegment>) =>\n (state: unknown): unknown => {\n let current: unknown = state\n for (const seg of segments) {\n if (current == null) return undefined\n if (typeof seg === 'number') {\n if (Array.isArray(current)) {\n current = current[seg]\n continue\n }\n if (isRecord(current)) {\n current = current[String(seg)]\n continue\n }\n return undefined\n }\n if (isRecord(current)) {\n current = current[seg]\n continue\n }\n return undefined\n }\n return current\n }\n\nexport const makeSchemaSelector = (path: string, schemaAst?: SchemaAST.AST): DotPathSelectorResult => {\n const parsed = parseDotPath(path)\n if (!parsed.ok) return { ok: false, error: parsed.error }\n\n if (schemaAst) {\n const resolved = resolveAstForPath(schemaAst, parsed.segments, new Set())\n if (!resolved) {\n return { ok: false, error: makeSchemaMismatchError(path) }\n }\n }\n\n return {\n ok: true,\n selector: selectBySegments(parsed.segments),\n segments: parsed.segments,\n }\n}\n","import { Effect, PubSub, Scope } from 'effect'\nimport { isPrefixOf, normalizeFieldPath, type DirtySet, type FieldPath, type FieldPathIdRegistry } from '../../field-path.js'\nimport type { ReadQueryCompiled } from './ReadQuery.js'\nimport type { StateChangeWithMeta, StateCommitMeta } from './module.js'\nimport * as Debug from './DebugSink.js'\n\ntype ReadRootKey = string\n\ntype SelectorEntry<S, V> = {\n readonly selectorId: string\n readonly readQuery: ReadQueryCompiled<S, V>\n readonly reads: ReadonlyArray<FieldPath>\n readonly readRootKeys: ReadonlyArray<ReadRootKey>\n readonly hub: PubSub.PubSub<StateChangeWithMeta<V>>\n subscriberCount: number\n cachedAtTxnSeq: number\n hasValue: boolean\n cachedValue: V | undefined\n}\n\nexport interface SelectorGraph<S> {\n readonly ensureEntry: <V>(\n readQuery: ReadQueryCompiled<S, V>,\n ) => Effect.Effect<SelectorEntry<S, V>, never, Scope.Scope>\n readonly releaseEntry: (selectorId: string) => void\n readonly onCommit: (\n state: S,\n meta: StateCommitMeta,\n dirtySet: DirtySet,\n diagnosticsLevel: Debug.DiagnosticsLevel,\n ) => Effect.Effect<void, never, never>\n}\n\nconst getReadRootKeyFromPath = (path: FieldPath): ReadRootKey => path[0] ?? ''\n\nconst overlaps = (a: FieldPath, b: FieldPath): boolean => isPrefixOf(a, b) || isPrefixOf(b, a)\n\nconst equalsShallowStruct = (a: unknown, b: unknown): boolean => {\n if (Object.is(a, b)) return true\n if (!a || !b) return false\n if (typeof a !== 'object' || typeof b !== 'object') return false\n if (Array.isArray(a) || Array.isArray(b)) return false\n\n const aObj = a as Record<string, unknown>\n const bObj = b as Record<string, unknown>\n const aKeys = Object.keys(aObj)\n const bKeys = Object.keys(bObj)\n if (aKeys.length !== bKeys.length) return false\n for (const k of aKeys) {\n if (!Object.prototype.hasOwnProperty.call(bObj, k)) return false\n if (!Object.is(aObj[k], bObj[k])) return false\n }\n return true\n}\n\nconst equalsValue = <V>(query: ReadQueryCompiled<any, V>, a: V, b: V): boolean => {\n if (query.equalsKind === 'custom' && typeof query.equals === 'function') {\n return query.equals(a, b)\n }\n if (query.equalsKind === 'shallowStruct') {\n return equalsShallowStruct(a, b)\n }\n return Object.is(a, b)\n}\n\nconst nowMs = (): number => {\n const perf = (globalThis as any).performance as { now?: () => number } | undefined\n if (perf && typeof perf.now === 'function') {\n return perf.now()\n }\n return Date.now()\n}\n\nexport const make = <S>(args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly getFieldPathIdRegistry?: () => FieldPathIdRegistry | undefined\n}): SelectorGraph<S> => {\n const { moduleId, instanceId, getFieldPathIdRegistry } = args\n\n const selectorsById = new Map<string, SelectorEntry<S, any>>()\n const indexByReadRoot = new Map<ReadRootKey, Set<string>>()\n\n const ensureEntry: SelectorGraph<S>['ensureEntry'] = (readQuery) => {\n const existing = selectorsById.get(readQuery.selectorId)\n if (existing) {\n return Effect.succeed(existing as any)\n }\n\n return Effect.gen(function* () {\n const hub = yield* PubSub.unbounded<StateChangeWithMeta<any>>()\n\n const reads = readQuery.reads\n .filter((x): x is string => typeof x === 'string')\n .map((raw) => normalizeFieldPath(raw))\n .filter((x): x is FieldPath => x != null)\n const readRootKeys = Array.from(new Set(reads.map(getReadRootKeyFromPath)))\n\n for (const rootKey of readRootKeys) {\n const set = indexByReadRoot.get(rootKey)\n if (set) {\n set.add(readQuery.selectorId)\n } else {\n indexByReadRoot.set(rootKey, new Set([readQuery.selectorId]))\n }\n }\n\n const entry: SelectorEntry<S, any> = {\n selectorId: readQuery.selectorId,\n readQuery: readQuery as any,\n reads,\n readRootKeys,\n hub,\n subscriberCount: 0,\n cachedAtTxnSeq: 0,\n hasValue: false,\n cachedValue: undefined,\n }\n selectorsById.set(readQuery.selectorId, entry)\n return entry as any\n })\n }\n\n const releaseEntry: SelectorGraph<S>['releaseEntry'] = (selectorId) => {\n const entry = selectorsById.get(selectorId)\n if (!entry) return\n entry.subscriberCount = Math.max(0, entry.subscriberCount - 1)\n if (entry.subscriberCount > 0) return\n\n selectorsById.delete(selectorId)\n for (const rootKey of entry.readRootKeys) {\n const set = indexByReadRoot.get(rootKey)\n if (!set) continue\n set.delete(selectorId)\n if (set.size === 0) {\n indexByReadRoot.delete(rootKey)\n }\n }\n }\n\n const onCommit: SelectorGraph<S>['onCommit'] = (state, meta, dirtySet, diagnosticsLevel) =>\n Effect.gen(function* () {\n if (selectorsById.size === 0) return\n\n const emitEvalEvent =\n diagnosticsLevel === 'light' || diagnosticsLevel === 'full' || diagnosticsLevel === 'sampled'\n\n const registry: FieldPathIdRegistry | undefined =\n dirtySet.dirtyAll || dirtySet.rootIds.length === 0 ? undefined : getFieldPathIdRegistry?.()\n\n const getDirtyRootPath = (id: number): FieldPath | undefined => {\n if (!registry) return undefined\n if (!Number.isFinite(id)) return undefined\n const idx = Math.floor(id)\n if (idx < 0) return undefined\n const path = registry.fieldPaths[idx]\n return path && Array.isArray(path) ? path : undefined\n }\n\n if (selectorsById.size === 1) {\n const entry = selectorsById.values().next().value\n if (!entry || entry.subscriberCount === 0) return\n\n const isDirty = (() => {\n if (dirtySet.dirtyAll) return true\n if (entry.reads.length === 0) return true\n if (!registry) return true\n for (const dirtyRootId of dirtySet.rootIds) {\n const dirtyRoot = getDirtyRootPath(dirtyRootId)\n if (!dirtyRoot) return true\n const dirtyRootKey = getReadRootKeyFromPath(dirtyRoot)\n if (entry.readRootKeys.length > 0 && !entry.readRootKeys.includes(dirtyRootKey)) continue\n for (const read of entry.reads) {\n if (overlaps(dirtyRoot, read)) {\n return true\n }\n }\n }\n return false\n })()\n\n if (!isDirty) {\n return\n }\n\n let next: any\n const evalStartedAt = emitEvalEvent ? nowMs() : undefined\n try {\n next = entry.readQuery.select(state)\n } catch (err) {\n if (emitEvalEvent) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n txnSeq: meta.txnSeq,\n txnId: meta.txnId,\n code: 'read_query::eval_error',\n severity: 'error',\n message: 'ReadQuery selector threw during evaluation.',\n hint: 'Selectors must be pure and not throw; check the selector implementation and inputs.',\n kind: 'read_query_eval_error',\n trigger: { kind: 'read_query', name: 'selector:eval', details: { selectorId: entry.selectorId } },\n })\n }\n return\n }\n const evalMs = emitEvalEvent && evalStartedAt != null ? Math.max(0, nowMs() - evalStartedAt) : undefined\n\n const hadValue = entry.hasValue\n const prev = entry.cachedValue as any\n const equal = hadValue ? equalsValue(entry.readQuery as any, prev, next) : false\n const changed = !hadValue || !equal\n\n if (changed) {\n entry.cachedValue = next\n entry.hasValue = true\n entry.cachedAtTxnSeq = meta.txnSeq\n\n yield* PubSub.publish(entry.hub as any, {\n value: entry.cachedValue,\n meta,\n } satisfies StateChangeWithMeta<any>)\n }\n\n if (emitEvalEvent) {\n yield* Debug.record({\n type: 'trace:selector:eval',\n moduleId,\n instanceId,\n txnSeq: meta.txnSeq,\n txnId: meta.txnId,\n data: {\n selectorId: entry.selectorId,\n lane: entry.readQuery.lane,\n producer: entry.readQuery.producer,\n fallbackReason: entry.readQuery.fallbackReason,\n readsDigest: entry.readQuery.readsDigest,\n equalsKind: entry.readQuery.equalsKind,\n changed,\n evalMs,\n },\n })\n }\n\n return\n }\n\n const dirtySelectorIds: Set<string> = new Set()\n\n if (dirtySet.dirtyAll) {\n for (const [id, entry] of selectorsById.entries()) {\n if (entry.subscriberCount > 0) dirtySelectorIds.add(id)\n }\n } else {\n if (!registry) {\n for (const [id, entry] of selectorsById.entries()) {\n if (entry.subscriberCount > 0) dirtySelectorIds.add(id)\n }\n } else {\n for (const dirtyRootId of dirtySet.rootIds) {\n const dirtyRoot = getDirtyRootPath(dirtyRootId)\n if (!dirtyRoot) {\n for (const [id, entry] of selectorsById.entries()) {\n if (entry.subscriberCount > 0) dirtySelectorIds.add(id)\n }\n break\n }\n\n const rootKey = getReadRootKeyFromPath(dirtyRoot)\n const candidates = indexByReadRoot.get(rootKey)\n if (!candidates) continue\n for (const selectorId of candidates) {\n if (dirtySelectorIds.has(selectorId)) continue\n const entry = selectorsById.get(selectorId)\n if (!entry || entry.subscriberCount === 0) continue\n if (entry.reads.length === 0) {\n dirtySelectorIds.add(selectorId)\n continue\n }\n for (const read of entry.reads) {\n if (overlaps(dirtyRoot, read)) {\n dirtySelectorIds.add(selectorId)\n break\n }\n }\n }\n }\n }\n }\n\n if (dirtySelectorIds.size === 0) return\n\n for (const selectorId of dirtySelectorIds) {\n const entry = selectorsById.get(selectorId)\n if (!entry || entry.subscriberCount === 0) continue\n\n let next: any\n const evalStartedAt = emitEvalEvent ? nowMs() : undefined\n try {\n next = entry.readQuery.select(state)\n } catch (err) {\n if (emitEvalEvent) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n txnSeq: meta.txnSeq,\n txnId: meta.txnId,\n code: 'read_query::eval_error',\n severity: 'error',\n message: 'ReadQuery selector threw during evaluation.',\n hint: 'Selectors must be pure and not throw; check the selector implementation and inputs.',\n kind: 'read_query_eval_error',\n trigger: { kind: 'read_query', name: 'selector:eval', details: { selectorId } },\n })\n }\n continue\n }\n const evalMs = emitEvalEvent && evalStartedAt != null ? Math.max(0, nowMs() - evalStartedAt) : undefined\n\n const hadValue = entry.hasValue\n const prev = entry.cachedValue as any\n const equal = hadValue ? equalsValue(entry.readQuery as any, prev, next) : false\n const changed = !hadValue || !equal\n\n if (changed) {\n entry.cachedValue = next\n entry.hasValue = true\n entry.cachedAtTxnSeq = meta.txnSeq\n\n yield* PubSub.publish(entry.hub as any, {\n value: entry.cachedValue,\n meta,\n } satisfies StateChangeWithMeta<any>)\n }\n\n if (emitEvalEvent) {\n yield* Debug.record({\n type: 'trace:selector:eval',\n moduleId,\n instanceId,\n txnSeq: meta.txnSeq,\n txnId: meta.txnId,\n data: {\n selectorId,\n lane: entry.readQuery.lane,\n producer: entry.readQuery.producer,\n fallbackReason: entry.readQuery.fallbackReason,\n readsDigest: entry.readQuery.readsDigest,\n equalsKind: entry.readQuery.equalsKind,\n changed,\n evalMs,\n },\n })\n }\n }\n })\n\n return { ensureEntry, releaseEntry, onCommit }\n}\n","import { Deferred, Effect, Exit, FiberRef, Option, Queue, Ref, Scope } from 'effect'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport * as Debug from './DebugSink.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport type { ConcurrencyDiagnostics } from './ConcurrencyDiagnostics.js'\nimport { StateTransactionOverridesTag, type StateTransactionOverrides } from './env.js'\nimport type { ResolvedConcurrencyPolicy } from './ModuleRuntime.concurrencyPolicy.js'\n\nexport type TxnLane = 'urgent' | 'nonUrgent'\n\nexport interface EnqueueTransaction {\n <A, E>(eff: Effect.Effect<A, E, never>): Effect.Effect<A, E, never>\n <A, E>(lane: TxnLane, eff: Effect.Effect<A, E, never>): Effect.Effect<A, E, never>\n}\n\ntype BackpressureState = {\n readonly backlogCount: number\n readonly waiters: number\n readonly signal: Deferred.Deferred<void>\n}\n\ntype BacklogAcquireAttempt =\n | { readonly _tag: 'acquired' }\n | {\n readonly _tag: 'wait'\n readonly backlogCount: number\n readonly signal: Deferred.Deferred<void>\n }\n\ntype CapturedDiagnosticContext = {\n readonly linkId: string\n readonly runtimeLabel: string | undefined\n readonly diagnosticsLevel: Debug.DiagnosticsLevel\n readonly debugSinks: ReadonlyArray<Debug.Sink>\n readonly overridesOpt: Option.Option<StateTransactionOverrides>\n}\n\nconst captureDiagnosticContext = (args: {\n readonly nextLinkId: () => string\n}): Effect.Effect<CapturedDiagnosticContext> =>\n Effect.gen(function* () {\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n const diagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n const runtimeLabel = yield* FiberRef.get(Debug.currentRuntimeLabel)\n const debugSinks = yield* FiberRef.get(Debug.currentDebugSinks)\n const existingLinkId = yield* FiberRef.get(EffectOpCore.currentLinkId)\n const linkId = existingLinkId ?? args.nextLinkId()\n\n return {\n linkId,\n runtimeLabel,\n diagnosticsLevel,\n debugSinks,\n overridesOpt,\n }\n })\n\nconst withDiagnosticContext = <A, E>(\n context: CapturedDiagnosticContext,\n eff: Effect.Effect<A, E, never>,\n): Effect.Effect<A, E, never> => {\n const effWithOverrides = Option.isSome(context.overridesOpt)\n ? Effect.provideService(eff, StateTransactionOverridesTag, context.overridesOpt.value)\n : eff\n\n return effWithOverrides.pipe(\n Effect.locally(EffectOpCore.currentLinkId, context.linkId),\n Effect.locally(Debug.currentRuntimeLabel, context.runtimeLabel),\n Effect.locally(Debug.currentDiagnosticsLevel, context.diagnosticsLevel),\n Effect.locally(Debug.currentDebugSinks, context.debugSinks),\n )\n}\n\n/**\n * Builds a \"single-instance transaction queue\":\n * - All entry points (dispatch/source-refresh/...) execute serially through the same FIFO queue.\n * - Callers still experience the entry as a single Effect (preserving the existing API shape).\n * - Tasks must \"never fail\", otherwise the queue consumer fiber would deadlock (so we return results via Deferred/Exit).\n *\n * NOTE: transaction execution happens inside a background queue fiber. To support Provider-local overrides (Tag/Layer)\n * and diagnostics tiers (FiberRef) at the call site, we capture minimal context at enqueue-time and re-provide it to the task.\n */\nexport const makeEnqueueTransaction = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n readonly resolveConcurrencyPolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>\n readonly diagnostics: ConcurrencyDiagnostics\n}): Effect.Effect<EnqueueTransaction, never, Scope.Scope> =>\n Effect.gen(function* () {\n const urgentQueue = yield* Queue.unbounded<Effect.Effect<void>>()\n const nonUrgentQueue = yield* Queue.unbounded<Effect.Effect<void>>()\n const wakeQueue = yield* Queue.unbounded<void>()\n const diagnostics = args.diagnostics\n\n let nextLinkSeq = 0\n const nextLinkId = (): string => {\n nextLinkSeq += 1\n return `${args.instanceId}::l${nextLinkSeq}`\n }\n\n const initialUrgentSignal = yield* Deferred.make<void>()\n const urgentStateRef = yield* Ref.make<BackpressureState>({\n backlogCount: 0,\n waiters: 0,\n signal: initialUrgentSignal,\n })\n\n const initialNonUrgentSignal = yield* Deferred.make<void>()\n const nonUrgentStateRef = yield* Ref.make<BackpressureState>({\n backlogCount: 0,\n waiters: 0,\n signal: initialNonUrgentSignal,\n })\n\n const release = (stateRef: Ref.Ref<BackpressureState>) =>\n Effect.gen(function* () {\n let prevSignal: Deferred.Deferred<void> | undefined\n const nextSignal = yield* Deferred.make<void>()\n yield* Ref.update(stateRef, (s) => {\n const nextBacklogCount = s.backlogCount > 0 ? s.backlogCount - 1 : 0\n if (s.waiters <= 0) {\n return {\n backlogCount: nextBacklogCount,\n waiters: 0,\n signal: s.signal,\n }\n }\n prevSignal = s.signal\n return {\n backlogCount: nextBacklogCount,\n waiters: s.waiters,\n signal: nextSignal,\n }\n })\n if (prevSignal) {\n yield* Deferred.succeed(prevSignal, undefined)\n }\n })\n\n const acquireBacklogSlot = (lane: TxnLane, capacity: number): Effect.Effect<void> =>\n Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'state_transaction::enqueue_in_transaction',\n severity: 'error',\n message:\n 'enqueueTransaction is not allowed inside a synchronous StateTransaction body (it may deadlock or violate backpressure constraints).',\n hint: 'Move dispatch/setState calls outside the transaction window, or use a multi-entry pattern (pending → IO → writeback).',\n kind: 'enqueue_in_transaction',\n })\n yield* Effect.dieMessage('enqueueTransaction is not allowed inside a synchronous StateTransaction body')\n }\n\n const stateRef = lane === 'urgent' ? urgentStateRef : nonUrgentStateRef\n\n let waitedFromMs: number | undefined\n while (true) {\n const policy = yield* args.resolveConcurrencyPolicy()\n const attempt = yield* Ref.modify(stateRef, (s): readonly [BacklogAcquireAttempt, BackpressureState] => {\n if (s.backlogCount < capacity) {\n return [\n { _tag: 'acquired' },\n {\n backlogCount: s.backlogCount + 1,\n waiters: s.waiters,\n signal: s.signal,\n },\n ] as const\n }\n\n return [{ _tag: 'wait', backlogCount: s.backlogCount, signal: s.signal }, s] as const\n })\n\n if (attempt._tag === 'acquired') {\n return\n }\n\n const now = Date.now()\n if (waitedFromMs === undefined) {\n waitedFromMs = now\n }\n const saturatedDurationMs = now - waitedFromMs\n\n yield* diagnostics.emitPressureIfNeeded({\n policy,\n trigger: { kind: 'txnQueue', name: `enqueueTransaction.${lane}` },\n backlogCount: attempt.backlogCount,\n saturatedDurationMs,\n })\n\n yield* Effect.acquireUseRelease(\n Ref.update(stateRef, (s) => ({\n backlogCount: s.backlogCount,\n waiters: s.waiters + 1,\n signal: s.signal,\n })).pipe(Effect.as(attempt.signal)),\n (signal) => Deferred.await(signal),\n () =>\n Ref.update(stateRef, (s) => ({\n backlogCount: s.backlogCount,\n waiters: s.waiters > 0 ? s.waiters - 1 : 0,\n signal: s.signal,\n })),\n )\n }\n })\n\n // Background consumer fiber: executes queued transaction Effects sequentially (urgent first).\n yield* Effect.forkScoped(\n Effect.forever(\n Effect.gen(function* () {\n yield* Queue.take(wakeQueue)\n\n while (true) {\n const urgent = yield* Queue.poll(urgentQueue)\n if (Option.isSome(urgent)) {\n yield* urgent.value\n continue\n }\n\n const nonUrgent = yield* Queue.poll(nonUrgentQueue)\n if (Option.isSome(nonUrgent)) {\n yield* nonUrgent.value\n continue\n }\n\n break\n }\n }),\n ),\n )\n\n const enqueueTransaction: EnqueueTransaction = <A2, E2>(\n a0: TxnLane | Effect.Effect<A2, E2, never>,\n a1?: Effect.Effect<A2, E2, never>,\n ): Effect.Effect<A2, E2, never> =>\n Effect.gen(function* () {\n const lane: TxnLane = a1 ? (a0 as TxnLane) : 'urgent'\n const eff: Effect.Effect<A2, E2, never> = a1 ? a1 : (a0 as Effect.Effect<A2, E2, never>)\n const stateRef = lane === 'urgent' ? urgentStateRef : nonUrgentStateRef\n\n const policy = yield* args.resolveConcurrencyPolicy()\n const capacity = policy.losslessBackpressureCapacity\n yield* acquireBacklogSlot(lane, capacity)\n\n const done = yield* Deferred.make<Exit.Exit<A2, E2>>()\n\n const capturedContext = yield* captureDiagnosticContext({ nextLinkId })\n const effWithContext = withDiagnosticContext(capturedContext, eff)\n\n const task: Effect.Effect<void> = effWithContext.pipe(\n Effect.exit,\n Effect.flatMap((exit) => Deferred.succeed(done, exit)),\n Effect.asVoid,\n Effect.ensuring(release(stateRef)),\n )\n\n // Important: slot is already acquired; offer must be uninterruptible to avoid leaking backlog counters.\n const targetQueue = lane === 'urgent' ? urgentQueue : nonUrgentQueue\n yield* Effect.uninterruptible(Effect.all([Queue.offer(targetQueue, task), Queue.offer(wakeQueue, undefined)]))\n\n const exit = yield* Deferred.await(done)\n return yield* Exit.match(exit, {\n onFailure: (cause) => Effect.failCause(cause),\n onSuccess: (value) => Effect.succeed(value),\n })\n })\n\n return enqueueTransaction\n })\n","import { Cause, Context, Effect, Exit, Fiber, Option } from 'effect'\nimport type { LogicPlan, ModuleRuntime as PublicModuleRuntime } from './module.js'\nimport * as Lifecycle from './Lifecycle.js'\nimport * as ReducerDiagnostics from './ReducerDiagnostics.js'\nimport * as LifecycleDiagnostics from './LifecycleDiagnostics.js'\nimport * as LogicDiagnostics from './LogicDiagnostics.js'\nimport * as LogicUnitMeta from './LogicUnitMeta.js'\nimport * as Platform from './Platform.js'\nimport * as LogicPlanMarker from './LogicPlanMarker.js'\n\ntype PhaseRef = LogicPlanMarker.PhaseRef\n\nconst createPhaseRef = (): PhaseRef => ({ current: 'run' })\n\nexport const runModuleLogics = <S, A, R>(args: {\n readonly tag: Context.Tag<any, PublicModuleRuntime<S, A>>\n readonly logics: ReadonlyArray<Effect.Effect<any, any, R> | LogicPlan<any, R, any>>\n readonly runtime: PublicModuleRuntime<S, A>\n readonly lifecycle: Lifecycle.LifecycleManager\n readonly moduleId: string\n readonly instanceId: string\n}): Effect.Effect<void, unknown, any> => {\n const { tag, logics, runtime, lifecycle, moduleId, instanceId } = args\n const moduleIdForLogs = moduleId\n\n return Effect.gen(function* () {\n const withRuntimeAndLifecycle = <R2, E2, A2>(\n eff: Effect.Effect<A2, E2, R2>,\n phaseRef?: PhaseRef,\n logicUnit?: LogicDiagnostics.LogicUnitService,\n ) => {\n const withServices = Effect.provideService(\n Effect.provideService(eff, Lifecycle.LifecycleContext, lifecycle),\n tag,\n runtime,\n )\n\n // Annotate logs produced inside Logic effects (moduleId, etc.) so the Logger layer can correlate them to a Module.\n const annotated = Effect.annotateLogs({\n 'logix.moduleId': moduleIdForLogs,\n })(withServices as Effect.Effect<A2, E2, any>) as Effect.Effect<A2, E2, R2>\n\n const withLogicUnit = logicUnit\n ? Effect.provideService(annotated, LogicDiagnostics.LogicUnitServiceTag, logicUnit)\n : annotated\n\n if (!phaseRef) {\n return withLogicUnit\n }\n\n const phaseService: LogicDiagnostics.LogicPhaseService = {\n get current() {\n return phaseRef.current\n },\n }\n\n return Effect.provideService(withLogicUnit, LogicDiagnostics.LogicPhaseServiceTag, phaseService)\n }\n\n const formatSource = (source?: {\n readonly file: string\n readonly line: number\n readonly column: number\n }): string | undefined => (source ? `${source.file}:${source.line}:${source.column}` : undefined)\n\n const resolveLogicUnitService = (rawLogic: unknown, index: number): LogicDiagnostics.LogicUnitService => {\n const meta = LogicUnitMeta.getLogicUnitMeta(rawLogic)\n\n const logicUnitId = meta?.resolvedId ?? meta?.id ?? `logic#${index + 1}`\n\n const logicUnitIdKind = meta?.resolvedIdKind ?? (meta?.id ? 'explicit' : 'derived')\n\n const labelBase = meta?.resolvedName ?? meta?.name ?? logicUnitId\n\n const kind = meta?.resolvedKind ?? meta?.kind\n const kindPrefix = kind && kind.length > 0 ? `${kind}:` : ''\n\n const source = meta?.resolvedSource ?? meta?.source\n\n return {\n logicUnitId,\n logicUnitIdKind,\n logicUnitLabel: `logicUnit:${kindPrefix}${labelBase}`,\n path: formatSource(source),\n }\n }\n\n const handleLogicFailure = (cause: any) => {\n if (Cause.isInterrupted(cause)) {\n return Effect.failCause(cause)\n }\n\n const phaseErrorMarker = [...Cause.failures(cause), ...Cause.defects(cause)].some(\n (err) => (err as any)?._tag === 'LogicPhaseError',\n )\n\n const base = lifecycle\n .notifyError(cause, {\n phase: 'run',\n hook: 'unknown',\n moduleId,\n instanceId,\n origin: 'logic.fork',\n })\n .pipe(\n Effect.tap(() => LifecycleDiagnostics.emitMissingOnErrorDiagnosticIfNeeded(lifecycle, moduleId)),\n Effect.tap(() => LifecycleDiagnostics.emitAssemblyFailureDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => ReducerDiagnostics.emitDiagnosticsFromCause(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitEnvServiceNotFoundDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitInvalidPhaseDiagnosticIfNeeded(cause, moduleId)),\n )\n\n // For LogicPhaseError: emit diagnostics only and avoid failing ModuleRuntime construction,\n // so runSync paths are not interrupted by AsyncFiberException.\n if (phaseErrorMarker) {\n return base\n }\n\n return base.pipe(Effect.flatMap(() => Effect.failCause(cause)))\n }\n\n const handleInitFailure = (cause: Cause.Cause<unknown>) =>\n Cause.isInterrupted(cause)\n ? Effect.failCause(cause)\n : Effect.void.pipe(\n Effect.tap(() => LifecycleDiagnostics.emitMissingOnErrorDiagnosticIfNeeded(lifecycle, moduleId)),\n Effect.tap(() => LifecycleDiagnostics.emitAssemblyFailureDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => ReducerDiagnostics.emitDiagnosticsFromCause(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitEnvServiceNotFoundDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitInvalidPhaseDiagnosticIfNeeded(cause, moduleId)),\n Effect.zipRight(Effect.failCause(cause)),\n )\n\n const isLogicPlan = (value: unknown): value is LogicPlan<any, any, any> =>\n Boolean(value && typeof value === 'object' && 'run' in (value as any) && 'setup' in (value as any))\n\n const normalizeToPlan = (value: unknown, defaultPhaseRef?: PhaseRef): LogicPlan<any, any, any> => {\n const phaseRef = LogicPlanMarker.getPhaseRef(value) ?? defaultPhaseRef ?? createPhaseRef()\n\n if (isLogicPlan(value)) {\n const plan = value as LogicPlan<any, any, any>\n if (!LogicPlanMarker.getPhaseRef(plan)) {\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n }\n return plan\n }\n\n const plan: LogicPlan<any, any, any> = {\n setup: Effect.void,\n run: value as Effect.Effect<any, any, any>,\n }\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n return plan\n }\n\n const pendingRunForks: Array<Effect.Effect<void, never, any>> = []\n\n let logicIndex = 0\n for (const rawLogic of logics) {\n const logicUnit = resolveLogicUnitService(rawLogic, logicIndex)\n logicIndex += 1\n\n if (isLogicPlan(rawLogic)) {\n const phaseRef = LogicPlanMarker.getPhaseRef(rawLogic) ?? createPhaseRef()\n const setupPhase = withRuntimeAndLifecycle(rawLogic.setup, phaseRef, logicUnit)\n const runPhase = withRuntimeAndLifecycle(rawLogic.run, phaseRef, logicUnit)\n\n phaseRef.current = 'setup'\n yield* setupPhase.pipe(Effect.catchAllCause(handleLogicFailure))\n\n pendingRunForks.push(\n Effect.sync(() => {\n phaseRef.current = 'run'\n }).pipe(\n Effect.zipRight(Effect.forkScoped(runPhase.pipe(Effect.catchAllCause(handleLogicFailure)))),\n Effect.asVoid,\n ),\n )\n continue\n }\n\n if (LogicPlanMarker.isLogicPlanEffect(rawLogic)) {\n // The logic is an Effect that returns a LogicPlan; run it once to resolve the plan.\n const phaseRef = LogicPlanMarker.getPhaseRef(rawLogic) ?? createPhaseRef()\n const makeNoopPlan = (): LogicPlan<any, any, any> =>\n (() => {\n const plan: LogicPlan<any, any, any> = {\n setup: Effect.void,\n run: Effect.void,\n }\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n LogicPlanMarker.markSkipRun(plan as any)\n return plan\n })()\n\n phaseRef.current = 'setup'\n const resolvedPlan = yield* withRuntimeAndLifecycle(\n rawLogic as Effect.Effect<any, any, any>,\n phaseRef,\n logicUnit,\n ).pipe(\n Effect.matchCauseEffect({\n onSuccess: (value) => Effect.succeed(normalizeToPlan(value, phaseRef)),\n onFailure: (cause) => {\n const isLogicPhaseError = [...Cause.failures(cause), ...Cause.defects(cause)].some(\n (err) => (err as any)?._tag === 'LogicPhaseError',\n )\n\n if (isLogicPhaseError) {\n // For LogicPhaseError: record diagnostics and continue with a noop plan,\n // to avoid failing ModuleRuntime.make on runSync paths.\n return LogicDiagnostics.emitInvalidPhaseDiagnosticIfNeeded(cause, moduleId).pipe(\n Effect.zipRight(handleLogicFailure(cause)),\n Effect.as(makeNoopPlan()),\n )\n }\n\n // Other errors: treat as hard errors — emit diagnostics/errors first, then failCause so the caller can observe it.\n return LogicDiagnostics.emitEnvServiceNotFoundDiagnosticIfNeeded(cause, moduleId).pipe(\n Effect.zipRight(handleLogicFailure(cause)),\n Effect.zipRight(Effect.failCause(cause)),\n )\n },\n }),\n )\n\n const resolvedPlanHadPhaseRef = Boolean(LogicPlanMarker.getPhaseRef(resolvedPlan))\n const planPhaseRef = LogicPlanMarker.getPhaseRef(resolvedPlan) ?? phaseRef\n if (!resolvedPlanHadPhaseRef) {\n LogicPlanMarker.attachPhaseRef(resolvedPlan as any, planPhaseRef)\n }\n const setupPhase = withRuntimeAndLifecycle(resolvedPlan.setup, planPhaseRef, logicUnit)\n const runPhase = withRuntimeAndLifecycle(resolvedPlan.run, planPhaseRef, logicUnit)\n\n // If this is a placeholder plan for phase diagnostics, only run setup (usually Effect.void),\n // and do not fork run so ModuleRuntime.make remains synchronous under runSync.\n const skipRun = LogicPlanMarker.isSkipRun(resolvedPlan)\n\n planPhaseRef.current = 'setup'\n yield* setupPhase.pipe(Effect.catchAllCause(handleLogicFailure))\n\n if (!skipRun) {\n pendingRunForks.push(\n Effect.sync(() => {\n planPhaseRef.current = 'run'\n }).pipe(\n Effect.zipRight(Effect.forkScoped(runPhase.pipe(Effect.catchAllCause(handleLogicFailure)))),\n Effect.asVoid,\n ),\n )\n }\n continue\n }\n\n // Default: single-phase Logic. Fork immediately; if it later resolves to a LogicPlan, execute setup/run.\n const basePhaseRef = LogicPlanMarker.getPhaseRef(rawLogic)\n const runPhase = withRuntimeAndLifecycle(rawLogic as Effect.Effect<any, any, any>, basePhaseRef, logicUnit).pipe(\n Effect.catchAllCause(handleLogicFailure),\n )\n\n pendingRunForks.push(\n Effect.gen(function* () {\n const runFiber = yield* Effect.forkScoped(runPhase)\n\n yield* Effect.forkScoped(\n Fiber.await(runFiber).pipe(\n Effect.flatMap((exit) =>\n Exit.match(exit, {\n onFailure: () => Effect.void,\n onSuccess: (value) => {\n const executePlan = (plan: LogicPlan<any, any, any>): Effect.Effect<void, unknown, any> => {\n const phaseRef = LogicPlanMarker.getPhaseRef(plan) ?? createPhaseRef()\n const setupPhase = withRuntimeAndLifecycle(plan.setup, phaseRef, logicUnit)\n const runPlanPhase = withRuntimeAndLifecycle(plan.run, phaseRef, logicUnit)\n\n phaseRef.current = 'setup'\n return setupPhase.pipe(\n Effect.catchAllCause(handleLogicFailure),\n Effect.zipRight(\n Effect.sync(() => {\n phaseRef.current = 'run'\n }).pipe(\n Effect.zipRight(Effect.forkScoped(runPlanPhase.pipe(Effect.catchAllCause(handleLogicFailure)))),\n Effect.asVoid,\n ),\n ),\n )\n }\n\n if (isLogicPlan(value)) {\n return executePlan(value)\n }\n\n if (LogicPlanMarker.isLogicPlanEffect(value)) {\n return withRuntimeAndLifecycle(\n value as Effect.Effect<any, any, any>,\n basePhaseRef,\n logicUnit,\n ).pipe(\n Effect.map((value) => normalizeToPlan(value, basePhaseRef)),\n Effect.matchCauseEffect({\n onFailure: (cause) => handleLogicFailure(cause),\n onSuccess: (plan) => executePlan(plan),\n }),\n )\n }\n\n return Effect.void\n },\n }),\n ),\n ),\n )\n }),\n )\n continue\n }\n\n // lifecycle initRequired: blocking gate (must complete before forking run fibers).\n yield* lifecycle.runInitRequired.pipe(Effect.catchAllCause(handleInitFailure))\n\n // platform signals: read Platform only after initRequired succeeds (avoid reading Env during setup).\n const platformOpt = yield* Effect.serviceOption(Platform.Tag)\n if (Option.isSome(platformOpt)) {\n const platform = platformOpt.value\n const snapshot = yield* lifecycle.getTaskSnapshot\n\n const platformPhaseRef: PhaseRef = { current: 'run' }\n const phaseService: LogicDiagnostics.LogicPhaseService = {\n get current() {\n return platformPhaseRef.current\n },\n }\n\n const providePlatformEnv = <A2, E2, R2>(eff: Effect.Effect<A2, E2, R2>): Effect.Effect<A2, E2, any> =>\n Effect.provideService(\n Effect.provideService(\n Effect.provideService(\n Effect.provideService(eff as Effect.Effect<A2, E2, any>, Platform.Tag, platform),\n Lifecycle.LifecycleContext,\n lifecycle,\n ),\n tag,\n runtime,\n ),\n LogicDiagnostics.LogicPhaseServiceTag,\n phaseService,\n )\n\n const register = (\n label: Lifecycle.Hook,\n subscribe: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>,\n ) =>\n Effect.forkScoped(\n subscribe(\n providePlatformEnv(\n label === 'suspend'\n ? lifecycle.runPlatformSuspend\n : label === 'resume'\n ? lifecycle.runPlatformResume\n : lifecycle.runPlatformReset,\n ).pipe(Effect.asVoid),\n ).pipe(\n Effect.catchAllCause((cause) =>\n lifecycle.notifyError(cause, {\n phase: 'platform',\n hook: label,\n moduleId,\n instanceId,\n origin: 'platform.subscribe',\n }),\n ),\n ),\n ).pipe(Effect.asVoid)\n\n if (snapshot.platformSuspend.length > 0) {\n yield* register('suspend', platform.lifecycle.onSuspend)\n }\n if (snapshot.platformResume.length > 0) {\n yield* register('resume', platform.lifecycle.onResume)\n }\n if (snapshot.platformReset.length > 0 && typeof platform.lifecycle.onReset === 'function') {\n yield* register('reset', platform.lifecycle.onReset)\n }\n }\n\n // Fork run fibers (start after init completes).\n yield* Effect.forEach(pendingRunForks, (eff) => eff, { discard: true })\n\n // lifecycle start: non-blocking (start after ready).\n yield* lifecycle.runStart\n\n // Give forked logics a scheduling chance so upper layers (e.g. Root processes) don't dispatch actions before logics are ready.\n yield* Effect.yieldNow()\n })\n}\n","import { Cause, Chunk, Effect } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport type { LifecycleManager } from './Lifecycle.js'\n\nexport type UnhandledErrorKind = 'interrupt' | 'diagnostic' | 'assembly' | 'defect'\n\nexport const classifyUnhandledCause = (cause: Cause.Cause<unknown>): UnhandledErrorKind => {\n if (Cause.isInterrupted(cause)) {\n return 'interrupt'\n }\n\n const all = [\n ...Chunk.toReadonlyArray(Cause.failures(cause)),\n ...Chunk.toReadonlyArray(Cause.defects(cause)),\n ] as ReadonlyArray<any>\n\n if (all.some((err) => err && typeof err === 'object' && err._tag === 'LogicPhaseError')) {\n return 'diagnostic'\n }\n\n if (all.some((err) => err && typeof err === 'object' && err.name === 'MissingModuleRuntimeError')) {\n return 'assembly'\n }\n\n return 'defect'\n}\n\n/**\n * When a Module hits a lifecycle error during Logic execution and no onError handler is registered,\n * emit a warning diagnostic suggesting adding $.lifecycle.onError at the beginning of the module logic.\n */\nexport const emitMissingOnErrorDiagnosticIfNeeded = (\n lifecycle: LifecycleManager,\n moduleId?: string,\n): Effect.Effect<void, never, any> =>\n lifecycle.hasOnErrorHandlers.pipe(\n Effect.flatMap((has) =>\n has || !moduleId\n ? Effect.void\n : Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'lifecycle::missing_on_error',\n severity: 'warning',\n message: `Module \"${moduleId}\" received a lifecycle error but has no $.lifecycle.onError handler registered.`,\n hint: \"Add $.lifecycle.onError((cause, context) => ...) at the beginning of this Module's logic to handle logic errors consistently.\",\n }),\n ),\n )\n\n/**\n * When a lifecycle error originates from \"assembly failure\" (e.g. missing Module runtime provider),\n * emit an error diagnostic with actionable fix suggestions.\n *\n * Notes:\n * - This diagnostic explains the error classification and does not change the original error semantics.\n * - If higher layers (e.g. React RuntimeProvider.onError) listen to both lifecycle:error and diagnostic(error),\n * they should de-duplicate or report based on context/phase to avoid duplicate alerts.\n */\nexport const emitAssemblyFailureDiagnosticIfNeeded = (\n cause: Cause.Cause<unknown>,\n moduleId?: string,\n): Effect.Effect<void, never, any> =>\n Effect.sync(() => {\n const defects = Chunk.toReadonlyArray(Cause.defects(cause))\n const missing = defects.find(\n (e) => e && typeof e === 'object' && (e as any).name === 'MissingModuleRuntimeError',\n ) as any\n\n if (!missing) {\n return Effect.void\n }\n\n const tokenId = typeof missing.tokenId === 'string' ? missing.tokenId : '<unknown module id>'\n const fix =\n Array.isArray(missing.fix) && missing.fix.every((l: unknown) => typeof l === 'string')\n ? (missing.fix as ReadonlyArray<string>).join('\\n')\n : undefined\n\n return Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'assembly::missing_module_runtime',\n severity: 'error',\n message: `Missing Module runtime provider for \"${tokenId}\".`,\n hint:\n fix ?? 'Provide the child implementation in the same scope (imports), or provide a root singleton at app root.',\n kind: 'assembly_failure',\n })\n }).pipe(Effect.flatten)\n","import { Cause, Context, Effect } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { isDevEnv } from './env.js'\n\nconst phaseDiagnosticsEnabled = (): boolean => isDevEnv()\n\n/**\n * Logic diagnostics:\n * - Currently focuses on initialization noise caused by missing Env services (\"Service not found\").\n *\n * Design intent:\n * - In recommended usage, Runtime / React layers provide Env correctly.\n * - In some startup timing windows, Logic may try to read services before Env is fully provided.\n * - Such errors often occur once, do not change final semantics, but pollute logs.\n *\n * Therefore we emit a warning diagnostic via Debug, explaining likely causes and investigation paths.\n * The real error semantics are still handled by lifecycle.onError / AppRuntime.onError.\n */\n\nconst SERVICE_NOT_FOUND_PREFIX = 'Service not found:'\n\n/**\n * If the Cause contains a `Service not found: ...` error, emit a warning diagnostic:\n * - code: logic::env_service_not_found\n * - message: the original error message\n * - hint: explains this is known startup timing noise and suggests what to check\n */\nexport const emitEnvServiceNotFoundDiagnosticIfNeeded = (\n cause: Cause.Cause<unknown>,\n moduleId?: string,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n let pretty: string\n try {\n pretty = Cause.pretty(cause, { renderErrorCause: true })\n } catch {\n return\n }\n\n if (!pretty.includes(SERVICE_NOT_FOUND_PREFIX)) {\n return\n }\n\n // 1) Warning diagnostic for the missing Env service itself\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'logic::env_service_not_found',\n severity: 'warning',\n message: pretty,\n hint:\n 'Logic attempted to access an Env service before it was provided. This is a known initialization timing noise in Runtime/React integration. ' +\n \"If it happens once during early startup and everything works afterward, it's likely harmless; \" +\n 'if it persists or correlates with app issues, verify Runtime.make / RuntimeProvider.layer provides the service.',\n })\n\n // 2) In some cases (e.g. accessing Env too early during Logic setup), we also want to surface\n // logic::invalid_phase to suggest moving Env access to the run section.\n //\n // Because we cannot reliably determine the phase at this point, this is only a supplemental signal.\n // The real phase guard is still handled by LogicPhaseError + emitInvalidPhaseDiagnosticIfNeeded.\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'logic::invalid_phase',\n severity: 'error',\n message: '$.use is not allowed before Env is fully ready.',\n hint:\n 'Avoid reading services during setup or before Env is ready; ' +\n 'move Env access to the Logic run section, or wrap init via $.lifecycle.onInitRequired.',\n kind: 'env_service_not_ready',\n })\n })\n\nexport interface LogicPhaseError extends Error {\n readonly _tag: 'LogicPhaseError'\n readonly kind: string\n readonly api?: string\n readonly phase: 'setup' | 'run'\n readonly moduleId?: string\n}\n\nexport interface LogicPhaseService {\n readonly current: 'setup' | 'run'\n}\n\nexport const LogicPhaseServiceTag = Context.GenericTag<LogicPhaseService>('@logixjs/LogicPhaseService')\n\n/**\n * LogicUnitService:\n * - Injected while executing each mounted logic unit (scope = the logic unit's setup/run fiber).\n * - Used for trait provenance and other \"bound to the current logic unit\" information (aligned with 022-module logicUnitId).\n *\n * Constraints:\n * - Read-only (must not mutate runtime state); only a provenance/diagnostics anchor.\n */\nexport interface LogicUnitService {\n readonly logicUnitId: string\n readonly logicUnitIdKind: 'explicit' | 'derived'\n readonly logicUnitLabel: string\n readonly path?: string\n}\n\nexport class LogicUnitServiceTag extends Context.Tag('@logixjs/LogicUnitService')<\n LogicUnitServiceTag,\n LogicUnitService\n>() {}\n\nexport const makeLogicPhaseError = (\n kind: string,\n api: string,\n phase: 'setup' | 'run',\n moduleId?: string,\n): LogicPhaseError =>\n Object.assign(new Error(`[LogicPhaseError] ${api} is not allowed in ${phase} phase (kind=${kind}).`), {\n _tag: 'LogicPhaseError',\n kind,\n api,\n phase,\n moduleId,\n }) as LogicPhaseError\n\n/**\n * Extracts LogicPhaseError from a Cause and emits it as a diagnostic:\n * - code: logic::invalid_phase\n * - kind: concrete violation kind (e.g. use_in_setup)\n */\nexport const emitInvalidPhaseDiagnosticIfNeeded = (\n cause: Cause.Cause<unknown>,\n moduleId?: string,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (!phaseDiagnosticsEnabled()) {\n return\n }\n\n const allErrors = [...Cause.failures(cause), ...Cause.defects(cause)]\n\n for (const err of allErrors) {\n const logicErr = err as any\n if (logicErr && logicErr._tag === 'LogicPhaseError') {\n const phaseErr = logicErr as LogicPhaseError\n const hint =\n phaseErr.kind === 'use_in_setup' || phaseErr.kind === 'lifecycle_in_setup'\n ? 'The setup phase must not read Env/services or run long-lived logic; move the relevant calls to the run phase.'\n : phaseErr.kind === 'lifecycle_in_run'\n ? 'Do not register $.lifecycle.* in the run phase (setup-only). Move lifecycle registrations to the synchronous part of Module.logic builder (before return).'\n : phaseErr.kind === 'traits_in_run' || phaseErr.kind === 'traits_declare_in_run'\n ? 'Traits are frozen after setup; move $.traits.declare to LogicPlan.setup or the setup registration phase of Module.logic builder.'\n : 'Move logic to the run phase; keep setup for registrations only.'\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: phaseErr.moduleId ?? moduleId,\n code: 'logic::invalid_phase',\n severity: 'error',\n message: `${phaseErr.api ?? phaseErr.kind} is not allowed in ${phaseErr.phase} phase.`,\n hint,\n kind: phaseErr.kind,\n })\n\n // Return after the first LogicPhaseError match.\n return\n }\n }\n })\n","import { Context, Effect } from 'effect'\n\nexport interface Service {\n readonly lifecycle: {\n readonly onSuspend: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>\n readonly onResume: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>\n readonly onReset?: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>\n }\n\n /**\n * Platform signal broadcaster (for host integration and tests): triggers registered lifecycle handlers.\n *\n * Notes:\n * - The default implementation should be a safe no-op.\n * - Failure policy is decided by the platform implementation; the runtime should ensure \"do not terminate the instance by default\".\n */\n readonly emitSuspend: () => Effect.Effect<void, never, any>\n readonly emitResume: () => Effect.Effect<void, never, any>\n readonly emitReset: () => Effect.Effect<void, never, any>\n}\n\nexport const Tag = Context.GenericTag<Service>('@logixjs/Platform')\n","import { Effect, Ref } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport type { RuntimeInternalsResolvedConcurrencyPolicy } from './RuntimeInternals.js'\n\ntype PressureKey = string\n\ntype PressureCooldownState = {\n readonly lastEmittedAtMs: number\n readonly suppressedCount: number\n}\n\nconst keyOf = (trigger: Debug.TriggerRef): PressureKey =>\n `${trigger.kind}::${typeof trigger.name === 'string' ? trigger.name : ''}`\n\nconst nowMs = Effect.clockWith((clock) => clock.currentTimeMillis)\n\nexport interface ConcurrencyDiagnostics {\n readonly emitPressureIfNeeded: (args: {\n readonly policy: RuntimeInternalsResolvedConcurrencyPolicy\n readonly trigger: Debug.TriggerRef\n readonly backlogCount?: number\n readonly inFlight?: number\n readonly saturatedDurationMs?: number\n }) => Effect.Effect<void>\n readonly emitUnboundedPolicyIfNeeded: (args: {\n readonly policy: RuntimeInternalsResolvedConcurrencyPolicy\n readonly trigger: Debug.TriggerRef\n }) => Effect.Effect<void>\n}\n\nexport const make = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n}): Effect.Effect<ConcurrencyDiagnostics> =>\n Effect.gen(function* () {\n const pressureCooldownByKeyRef = yield* Ref.make<Readonly<Record<PressureKey, PressureCooldownState>>>({})\n\n const unboundedEnabledEmittedRef = yield* Ref.make(false)\n const unboundedBlockedEmittedRef = yield* Ref.make(false)\n\n const emitPressureIfNeeded: ConcurrencyDiagnostics['emitPressureIfNeeded'] = (inArgs) =>\n Effect.gen(function* () {\n const policy = inArgs.policy\n\n const backlogCount = inArgs.backlogCount ?? 0\n const saturatedDurationMs = inArgs.saturatedDurationMs ?? 0\n\n const threshold = policy.pressureWarningThreshold\n const meetsThreshold =\n backlogCount >= threshold.backlogCount || saturatedDurationMs >= threshold.backlogDurationMs\n\n if (!meetsThreshold) {\n return\n }\n\n const cooldownMs = policy.warningCooldownMs\n const now = yield* nowMs\n const key = keyOf(inArgs.trigger)\n\n const decision = yield* Ref.modify(\n pressureCooldownByKeyRef,\n (\n byKey,\n ): readonly [\n { readonly _tag: 'emit'; readonly suppressedCount: number } | { readonly _tag: 'suppress' },\n Readonly<Record<PressureKey, PressureCooldownState>>,\n ] => {\n const prev = byKey[key]\n if (prev && now - prev.lastEmittedAtMs < cooldownMs) {\n return [\n { _tag: 'suppress' },\n {\n ...byKey,\n [key]: {\n lastEmittedAtMs: prev.lastEmittedAtMs,\n suppressedCount: prev.suppressedCount + 1,\n },\n },\n ] as const\n }\n\n const suppressedCount = prev?.suppressedCount ?? 0\n return [\n { _tag: 'emit', suppressedCount },\n {\n ...byKey,\n [key]: {\n lastEmittedAtMs: now,\n suppressedCount: 0,\n },\n },\n ] as const\n },\n )\n\n if (decision._tag === 'suppress') {\n return\n }\n\n const details: Record<string, unknown> = {\n configScope: policy.concurrencyLimitScope,\n limit: policy.concurrencyLimit,\n backlogCount,\n saturatedDurationMs,\n threshold: {\n backlogCount: threshold.backlogCount,\n backlogDurationMs: threshold.backlogDurationMs,\n },\n cooldownMs,\n degradeStrategy: decision.suppressedCount > 0 ? ('cooldown' as const) : ('none' as const),\n suppressedCount: decision.suppressedCount,\n sampleRate: 1,\n droppedCount: 0,\n }\n if (typeof inArgs.inFlight === 'number' && Number.isFinite(inArgs.inFlight)) {\n details.inFlight = inArgs.inFlight\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'concurrency::pressure',\n severity: 'warning',\n message: 'Concurrency pressure detected (backpressure / saturation).',\n hint: 'Reduce trigger frequency, split work, switch to runLatest or batch processing; or tune concurrency/backpressure limits via concurrencyPolicy.',\n kind: 'concurrency:pressure',\n trigger: {\n kind: inArgs.trigger.kind,\n name: inArgs.trigger.name,\n details,\n },\n })\n })\n\n const emitUnboundedPolicyIfNeeded: ConcurrencyDiagnostics['emitUnboundedPolicyIfNeeded'] = (inArgs) =>\n Effect.gen(function* () {\n const policy = inArgs.policy\n\n // 1) effective unbounded: emit only once (SC-004 / FR-004)\n if (policy.concurrencyLimit === 'unbounded' && policy.allowUnbounded === true) {\n const shouldEmit = yield* Ref.modify(unboundedEnabledEmittedRef, (emitted) =>\n emitted ? ([false, true] as const) : ([true, true] as const),\n )\n if (!shouldEmit) {\n return\n }\n\n const details: Record<string, unknown> = {\n configScope: policy.concurrencyLimitScope,\n limit: policy.concurrencyLimit,\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'concurrency::unbounded_enabled',\n severity: 'error',\n message: 'Unbounded concurrency is enabled (risk: resource exhaustion).',\n hint:\n 'Enable only for short-lived, controlled, cancelable fan-out; prefer bounded concurrency and increase gradually; ' +\n 'avoid piling up long-running or never-ending tasks under unbounded.',\n kind: 'concurrency:unbounded_enabled',\n trigger: {\n kind: inArgs.trigger.kind,\n name: inArgs.trigger.name,\n details,\n },\n })\n return\n }\n\n // 2) requested unbounded without explicit allow: fall back to bounded + diagnostic (T023)\n const requestedUnbounded =\n policy.requestedConcurrencyLimit === 'unbounded' && policy.concurrencyLimit !== 'unbounded'\n\n if (!requestedUnbounded) {\n return\n }\n\n const shouldEmit = yield* Ref.modify(unboundedBlockedEmittedRef, (emitted) =>\n emitted ? ([false, true] as const) : ([true, true] as const),\n )\n if (!shouldEmit) {\n return\n }\n\n const details: Record<string, unknown> = {\n configScope: policy.requestedConcurrencyLimitScope,\n limit: policy.requestedConcurrencyLimit,\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'concurrency::unbounded_requires_opt_in',\n severity: 'error',\n message: 'Unbounded concurrency was requested but is not allowed; falling back to bounded concurrency.',\n hint:\n 'If you really need unbounded, explicitly set concurrencyPolicy.allowUnbounded = true; ' +\n 'otherwise set concurrencyPolicy.concurrencyLimit to a positive integer limit.',\n kind: 'concurrency:unbounded_blocked',\n trigger: {\n kind: inArgs.trigger.kind,\n name: inArgs.trigger.name,\n details,\n },\n })\n })\n\n return { emitPressureIfNeeded, emitUnboundedPolicyIfNeeded }\n })\n","import { Context, Effect, FiberRef, Option, Schema, Stream, SubscriptionRef } from 'effect'\nimport { create } from 'mutative'\nimport type * as Logix from './module.js'\nimport * as Logic from './LogicMiddleware.js'\nimport * as Action from '../../action.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport { mutateWithPatchPaths } from './mutativePatches.js'\nimport * as FlowRuntime from './FlowRuntime.js'\nimport * as MatchBuilder from './MatchBuilder.js'\nimport * as Platform from './Platform.js'\nimport * as Lifecycle from './Lifecycle.js'\nimport * as Debug from './DebugSink.js'\nimport * as LogicDiagnostics from './LogicDiagnostics.js'\nimport { isDevEnv } from './env.js'\nimport { RunSessionTag } from '../../observability/runSession.js'\nimport * as Root from '../../root.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport type * as ModuleTraits from './ModuleTraits.js'\nimport { getRuntimeInternals, setBoundInternals } from './runtimeInternalsAccessor.js'\nimport type { AnyModuleShape, ModuleRuntime, StateOf, ActionOf } from './module.js'\n\n// Local IntentBuilder factory; equivalent to the old internal/dsl/LogicBuilder.makeIntentBuilderFactory.\nconst LogicBuilderFactory = <Sh extends AnyModuleShape, R = never>(\n runtime: ModuleRuntime<StateOf<Sh>, ActionOf<Sh>>,\n runtimeInternals: RuntimeInternals,\n) => {\n const flowApi = FlowRuntime.make<Sh, R>(runtime, runtimeInternals)\n\n return <T>(stream: Stream.Stream<T>, triggerName?: string): Logic.IntentBuilder<T, Sh, R> => {\n const runWithStateTransaction: TaskRunner.TaskRunnerRuntime['runWithStateTransaction'] = (origin, body) =>\n runtimeInternals.txn.runWithStateTransaction(origin as any, body)\n\n const taskRunnerRuntime: TaskRunner.TaskRunnerRuntime = {\n moduleId: runtime.moduleId,\n instanceId: runtimeInternals.instanceId,\n runWithStateTransaction,\n resolveConcurrencyPolicy: runtimeInternals.concurrency.resolveConcurrencyPolicy,\n }\n\n const builder = {\n debounce: (ms: number) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(flowApi.debounce<T>(ms)(stream), triggerName),\n throttle: (ms: number) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(flowApi.throttle<T>(ms)(stream), triggerName),\n filter: (predicate: (value: T) => boolean) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(flowApi.filter(predicate)(stream), triggerName),\n map: <U>(f: (value: T) => U) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(stream.pipe(Stream.map(f)), triggerName),\n run<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.run<T, A, E, R2>(eff, options)(stream)\n },\n runLatest<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.runLatest<T, A, E, R2>(eff, options)(stream)\n },\n runExhaust<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.runExhaust<T, A, E, R2>(eff, options)(stream)\n },\n runParallel<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.runParallel<T, A, E, R2>(eff, options)(stream)\n },\n runFork: <A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n ): Logic.Of<Sh, R & R2, void, E> =>\n Effect.forkScoped(flowApi.run<T, A, E, R2>(eff)(stream)).pipe(Effect.asVoid) as Logic.Of<Sh, R & R2, void, E>,\n runParallelFork: <A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n ): Logic.Of<Sh, R & R2, void, E> =>\n Effect.forkScoped(flowApi.runParallel<T, A, E, R2>(eff)(stream)).pipe(Effect.asVoid) as Logic.Of<\n Sh,\n R & R2,\n void,\n E\n >,\n runTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'task', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n runParallelTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'parallel', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n runLatestTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'latest', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n runExhaustTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'exhaust', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n toStream: () => stream,\n update: (\n reducer: (prev: StateOf<Sh>, payload: T) => StateOf<Sh> | Effect.Effect<StateOf<Sh>, any, any>,\n ): Logic.Of<Sh, R, void, never> =>\n Stream.runForEach(stream, (payload) =>\n taskRunnerRuntime.runWithStateTransaction(\n {\n kind: 'watcher:update',\n name: triggerName,\n },\n () =>\n Effect.gen(function* () {\n const prev = (yield* runtime.getState) as StateOf<Sh>\n const next = reducer(prev, payload)\n if (Effect.isEffect(next)) {\n const exit = yield* Effect.exit(next as Effect.Effect<StateOf<Sh>, any, any>)\n if (exit._tag === 'Failure') {\n yield* Effect.logError('Flow error', exit.cause)\n return\n }\n yield* runtime.setState(exit.value as StateOf<Sh>)\n return\n }\n yield* runtime.setState(next as StateOf<Sh>)\n }),\n ),\n ).pipe(Effect.catchAllCause((cause) => Effect.logError('Flow error', cause))) as Logic.Of<Sh, R, void, never>,\n mutate: (reducer: (draft: Logic.Draft<StateOf<Sh>>, payload: T) => void): Logic.Of<Sh, R, void, never> =>\n Stream.runForEach(stream, (payload) =>\n taskRunnerRuntime.runWithStateTransaction(\n {\n kind: 'watcher:mutate',\n name: triggerName,\n },\n () =>\n Effect.gen(function* () {\n const prev = (yield* runtime.getState) as StateOf<Sh>\n const recordPatch = runtimeInternals.txn.recordStatePatch\n const updateDraft = runtimeInternals.txn.updateDraft\n\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as StateOf<Sh>, (draft) => {\n reducer(draft as Logic.Draft<StateOf<Sh>>, payload)\n })\n\n for (const path of patchPaths) {\n recordPatch(path, 'unknown')\n }\n\n updateDraft(nextState)\n }),\n ),\n ).pipe(Effect.catchAllCause((cause) => Effect.logError('Flow error', cause))) as Logic.Of<Sh, R, void, never>,\n } as Omit<Logic.IntentBuilder<T, Sh, R>, 'pipe'>\n\n const pipe: Logic.IntentBuilder<T, Sh, R>['pipe'] = function (this: unknown) {\n // eslint-disable-next-line prefer-rest-params\n const fns = arguments as unknown as ReadonlyArray<\n (self: Logic.IntentBuilder<T, Sh, R>) => Logic.IntentBuilder<T, Sh, R>\n >\n let acc: Logic.IntentBuilder<T, Sh, R> = builder as Logic.IntentBuilder<T, Sh, R>\n for (let i = 0; i < fns.length; i++) {\n acc = fns[i](acc)\n }\n return acc\n }\n\n return Object.assign(builder, { pipe }) as Logic.IntentBuilder<T, Sh, R>\n }\n}\nimport type { BoundApi } from './module.js'\n\n/**\n * BoundApi implementation: creates a pre-bound `$` for a given Store shape + runtime.\n *\n * Note: public types and entrypoint signatures live in api/BoundApi.ts; this file only hosts the implementation.\n */\nexport function make<Sh extends Logix.AnyModuleShape, R = never>(\n shape: Sh,\n runtime: Logix.ModuleRuntime<Logix.StateOf<Sh>, Logix.ActionOf<Sh>>,\n options?: {\n readonly getPhase?: () => 'setup' | 'run'\n readonly phaseService?: LogicDiagnostics.LogicPhaseService\n readonly moduleId?: string\n readonly logicUnit?: LogicDiagnostics.LogicUnitService\n },\n): BoundApi<Sh, R> {\n const runtimeInternals = getRuntimeInternals(runtime as any)\n\n const getPhase = options?.getPhase ?? (() => 'run')\n const getCurrentPhase = (): 'setup' | 'run' => {\n const phaseService = options?.phaseService\n const phase = phaseService?.current ?? getPhase()\n return phase === 'setup' ? 'setup' : 'run'\n }\n const guardRunOnly = (kind: string, api: string) => {\n const phaseService = options?.phaseService\n const phase = phaseService?.current ?? getPhase()\n if (phase === 'setup') {\n throw LogicDiagnostics.makeLogicPhaseError(kind, api, 'setup', options?.moduleId)\n }\n }\n const flowApi = FlowRuntime.make<Sh, R>(runtime, runtimeInternals)\n\n const makeIntentBuilder = (runtime_: Logix.ModuleRuntime<any, any>) =>\n LogicBuilderFactory<Sh, R>(runtime_, runtimeInternals)\n const withLifecycle = <A>(\n available: (manager: Lifecycle.LifecycleManager) => Effect.Effect<A, never, any>,\n missing: () => Effect.Effect<A, never, any>,\n ) =>\n Effect.serviceOption(Lifecycle.LifecycleContext).pipe(\n Effect.flatMap((maybe) =>\n Option.match(maybe, {\n onSome: available,\n onNone: missing,\n }),\n ),\n )\n const withPlatform = (invoke: (platform: Platform.Service) => Effect.Effect<void, never, any>) =>\n Effect.serviceOption(Platform.Tag).pipe(\n Effect.flatMap((maybe) =>\n Option.match(maybe, {\n onSome: invoke,\n onNone: () => Effect.void,\n }),\n ),\n )\n\n const emitSetupOnlyViolation = (api: string): Effect.Effect<void> =>\n Debug.record({\n type: 'diagnostic',\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'logic::invalid_phase',\n severity: 'error',\n message: `${api} is setup-only and is not allowed in run phase.`,\n hint:\n 'Move $.lifecycle.* calls to the synchronous part of Module.logic builder (before return) for registration; ' +\n 'for dynamic resource cleanup in the run phase, use Effect.acquireRelease / Scope finalizer instead of registering onDestroy late.',\n kind: 'lifecycle_in_run',\n })\n\n const createIntentBuilder = <T>(stream: Stream.Stream<T>, triggerName?: string) =>\n makeIntentBuilder(runtime)(stream, triggerName)\n\n const onceInRunSession = (key: string): Effect.Effect<boolean, never, any> =>\n Effect.serviceOption(RunSessionTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.local.once(key) : true)),\n )\n\n let cachedDiagnosticsLevel: Debug.DiagnosticsLevel | undefined\n\n const isModuleLike = (\n value: unknown,\n ): value is {\n readonly _kind: 'ModuleDef' | 'Module'\n readonly id: string\n readonly tag: Context.Tag<any, Logix.ModuleRuntime<any, any>>\n readonly schemas?: Record<string, unknown>\n readonly meta?: Record<string, unknown>\n readonly dev?: { readonly source?: { readonly file: string; readonly line: number; readonly column: number } }\n } =>\n Boolean(\n value &&\n typeof value === 'object' &&\n ((value as any)._kind === 'ModuleDef' || (value as any)._kind === 'Module') &&\n 'tag' in (value as object) &&\n Context.isTag((value as any).tag),\n )\n\n const buildModuleHandle = (\n tag: Context.Tag<any, Logix.ModuleRuntime<any, any>>,\n rt: Logix.ModuleRuntime<any, any>,\n ): unknown => {\n const actionsProxy: Logix.ModuleHandle<any>['actions'] = new Proxy(\n {},\n {\n get: (_target, prop) => (payload: unknown) =>\n rt.dispatch({\n _tag: prop as string,\n payload,\n }),\n },\n ) as Logix.ModuleHandle<any>['actions']\n\n const handle: Logix.ModuleHandle<any> = {\n read: (selector) => Effect.map(rt.getState, selector),\n changes: rt.changes,\n dispatch: rt.dispatch,\n actions$: rt.actions$,\n actions: actionsProxy,\n }\n\n const EXTEND_HANDLE = Symbol.for('logix.module.handle.extend')\n const extend = (tag as any)?.[EXTEND_HANDLE] as\n | ((runtime: Logix.ModuleRuntime<any, any>, base: Logix.ModuleHandle<any>) => unknown)\n | undefined\n\n return typeof extend === 'function' ? (extend(rt, handle) ?? handle) : handle\n }\n\n const emitModuleDescriptorOnce = (\n module: {\n readonly id: string\n readonly tag: any\n readonly schemas?: Record<string, unknown>\n readonly meta?: Record<string, unknown>\n readonly dev?: { readonly source?: { readonly file: string; readonly line: number; readonly column: number } }\n },\n rt: Logix.ModuleRuntime<any, any>,\n ): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n // Hot-path guard: never emit events when diagnostics are off.\n if (cachedDiagnosticsLevel === 'off') return\n\n const key = `module_descriptor:${String(rt.instanceId ?? 'unknown')}`\n const shouldEmit = yield* onceInRunSession(key)\n if (!shouldEmit) return\n\n const actionKeys = Object.keys((module.tag as any)?.shape?.actionMap ?? {})\n\n const internalSymbol = Symbol.for('logix.module.internal')\n const internal = (module as any)[internalSymbol] as { readonly mounted?: ReadonlyArray<any> } | undefined\n\n const logicUnits = (internal?.mounted ?? []).map((u: any) => ({\n kind: String(u?.kind ?? 'user'),\n id: String(u?.id ?? ''),\n derived: u?.derived ? true : undefined,\n name: typeof u?.name === 'string' ? u.name : undefined,\n }))\n\n const schemaKeys = module.schemas && typeof module.schemas === 'object' ? Object.keys(module.schemas) : undefined\n\n const meta = module.meta && typeof module.meta === 'object' ? module.meta : undefined\n\n const source = module.dev?.source\n\n const traitsSnapshot = runtimeInternals.traits.getModuleTraitsSnapshot()\n const traits = traitsSnapshot\n ? {\n digest: traitsSnapshot.digest,\n count: traitsSnapshot.traits.length,\n }\n : undefined\n\n const data = {\n id: module.id,\n moduleId: String(rt.moduleId),\n instanceId: String(rt.instanceId),\n actionKeys,\n logicUnits,\n schemaKeys,\n meta,\n source,\n traits,\n }\n\n yield* Debug.record({\n type: 'trace:module:descriptor',\n moduleId: rt.moduleId,\n instanceId: rt.instanceId,\n data,\n } as any)\n })\n\n /**\n * strict: resolve a Module runtime only from the current Effect environment.\n *\n * Notes:\n * - With multiple roots / instances, any process-wide registry cannot express the correct semantics.\n * - A missing provider is a wiring error: fail deterministically and provide actionable hints (more details in dev/test).\n */\n const resolveModuleRuntime = (\n tag: Context.Tag<any, Logix.ModuleRuntime<any, any>>,\n ): Effect.Effect<Logix.ModuleRuntime<any, any>, never, any> =>\n Effect.gen(function* () {\n const requestedModuleId = typeof (tag as any)?.id === 'string' ? ((tag as any).id as string) : undefined\n const fromModuleId = typeof options?.moduleId === 'string' ? options.moduleId : runtime.moduleId\n\n // self: always allow resolving the current ModuleRuntime (both Bound.make and runtime injection paths).\n if (requestedModuleId && requestedModuleId === runtime.moduleId) {\n return runtime as unknown as Logix.ModuleRuntime<any, any>\n }\n\n const fromImports = runtimeInternals.imports.get(tag as unknown as Context.Tag<any, any>)\n if (fromImports) {\n return fromImports as unknown as Logix.ModuleRuntime<any, any>\n }\n\n // Bound.make (no moduleId context): allow resolving from the current Effect env (useful for tests/scaffolding).\n if (typeof options?.moduleId !== 'string') {\n const fromEnv = yield* Effect.serviceOption(tag as any)\n if (Option.isSome(fromEnv)) {\n return fromEnv.value as unknown as Logix.ModuleRuntime<any, any>\n }\n }\n\n // 2) Not found: die immediately — this is a wiring error; guide the caller to fix the composition.\n const tokenId = requestedModuleId ?? '<unknown module id>'\n const fix: string[] = isDevEnv()\n ? [\n '- Provide the child implementation in the same scope (imports).',\n ` Example: ${fromModuleId ?? 'ParentModule'}.implement({ imports: [${requestedModuleId ?? 'ChildModule'}.impl], ... })`,\n '- If you intentionally want a root singleton, provide it at app root (Runtime.make(...,{ layer }) / root imports),',\n ' and use Root.resolve(ModuleTag) (instead of $.use) at the callsite.',\n ]\n : []\n\n const err = new Error(\n isDevEnv()\n ? [\n '[MissingModuleRuntimeError] Cannot resolve ModuleRuntime for ModuleTag.',\n '',\n `tokenId: ${tokenId}`,\n 'entrypoint: logic.$.use',\n 'mode: strict',\n `from: ${fromModuleId ?? '<unknown module id>'}`,\n `startScope: moduleId=${fromModuleId ?? '<unknown>'}, instanceId=${String(runtime.instanceId ?? '<unknown>')}`,\n '',\n 'fix:',\n ...fix,\n ].join('\\n')\n : '[MissingModuleRuntimeError] module runtime not found',\n )\n\n ;(err as any).tokenId = tokenId\n ;(err as any).entrypoint = 'logic.$.use'\n ;(err as any).mode = 'strict'\n ;(err as any).from = fromModuleId\n ;(err as any).startScope = {\n moduleId: fromModuleId,\n instanceId: String(runtime.instanceId ?? '<unknown>'),\n }\n ;(err as any).fix = fix\n\n err.name = 'MissingModuleRuntimeError'\n return yield* Effect.die(err)\n })\n\n const stateApi: BoundApi<Sh, R>['state'] = {\n read: runtime.getState,\n update: (f) =>\n Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n const prev = yield* runtime.getState\n return yield* runtime.setState(f(prev))\n }\n\n const body = () => Effect.flatMap(runtime.getState, (prev) => runtime.setState(f(prev)))\n\n return yield* runtimeInternals\n ? runtimeInternals.txn.runWithStateTransaction({ kind: 'state', name: 'update' } as any, body)\n : body()\n }),\n mutate: (f) =>\n Effect.gen(function* () {\n const recordPatch = runtimeInternals?.txn.recordStatePatch\n const updateDraft = runtimeInternals?.txn.updateDraft\n\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n const prev = yield* runtime.getState\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as Logix.StateOf<Sh>, (draft) => {\n f(draft as Logic.Draft<Logix.StateOf<Sh>>)\n })\n\n for (const path of patchPaths) {\n recordPatch?.(path, 'unknown')\n }\n\n updateDraft?.(nextState)\n return\n }\n\n const body = () =>\n Effect.gen(function* () {\n const prev = yield* runtime.getState\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as Logix.StateOf<Sh>, (draft) => {\n f(draft as Logic.Draft<Logix.StateOf<Sh>>)\n })\n\n for (const path of patchPaths) {\n recordPatch?.(path, 'unknown')\n }\n\n updateDraft?.(nextState)\n })\n\n return yield* runtimeInternals\n ? runtimeInternals.txn.runWithStateTransaction({ kind: 'state', name: 'mutate' } as any, body)\n : body()\n }),\n ref: runtime.ref,\n }\n\n const actions = shape.actionMap as BoundApi<Sh, R>['actions']\n\n const dispatcherCache = new Map<string, (...args: any[]) => Effect.Effect<void, any, any>>()\n\n const hasAction = (key: string): boolean => Object.prototype.hasOwnProperty.call(actions as any, key)\n\n const dispatchers: BoundApi<Sh, R>['dispatchers'] = new Proxy({} as any, {\n get: (_target, prop) => {\n if (typeof prop !== 'string') return undefined\n if (!hasAction(prop)) return undefined\n\n const cached = dispatcherCache.get(prop)\n if (cached) return cached\n\n const token = (actions as any)[prop] as Action.AnyActionToken\n const fn = (...args: any[]) => runtime.dispatch((token as any)(...args))\n\n dispatcherCache.set(prop, fn)\n return fn\n },\n has: (_target, prop) => typeof prop === 'string' && hasAction(prop),\n ownKeys: () => Object.keys(actions as any),\n getOwnPropertyDescriptor: (_target, prop) => {\n if (typeof prop !== 'string') return undefined\n if (!hasAction(prop)) return undefined\n return { enumerable: true, configurable: true }\n },\n }) as unknown as BoundApi<Sh, R>['dispatchers']\n\n const dispatch: BoundApi<Sh, R>['dispatch'] = (...args: any[]) => {\n const [first, second] = args\n\n if (typeof first === 'string') {\n return runtime.dispatch({ _tag: first, payload: second } as Logix.ActionOf<Sh>)\n }\n\n if (Action.isActionToken(first)) {\n return runtime.dispatch((first as any)(second))\n }\n\n return runtime.dispatch(first as Logix.ActionOf<Sh>)\n }\n\n const matchApi = <V>(value: V): Logic.FluentMatch<V> => MatchBuilder.makeMatch(value)\n\n const matchTagApi = <V extends { _tag: string }>(value: V): Logic.FluentMatchTag<V> =>\n MatchBuilder.makeMatchTag(value)\n\n // Primary reducer registration: write into the reducer map via the runtime's internal registrar.\n const reducer: BoundApi<Sh, R>['reducer'] = (tag, fn) => {\n return Effect.sync(() => {\n runtimeInternals.txn.registerReducer(String(tag), fn as any)\n }) as any\n }\n\n const effect: BoundApi<Sh, R>['effect'] = (token, handler) =>\n Effect.gen(function* () {\n if (!Action.isActionToken(token)) {\n return yield* Effect.dieMessage('[BoundApi.effect] token must be an ActionToken')\n }\n\n const phase = getCurrentPhase()\n const logicUnit = options?.logicUnit\n\n yield* runtimeInternals.effects.registerEffect({\n actionTag: token.tag,\n handler: handler as any,\n phase,\n ...(logicUnit\n ? {\n logicUnit: {\n logicUnitId: logicUnit.logicUnitId,\n logicUnitLabel: logicUnit.logicUnitLabel,\n path: logicUnit.path,\n },\n }\n : {}),\n })\n }) as any\n\n const api: BoundApi<Sh, R> = {\n root: {\n resolve: (tag: any) => {\n guardRunOnly('root_resolve_in_setup', '$.root.resolve')\n return Root.resolve(tag, {\n entrypoint: 'logic.$.root.resolve',\n waitForReady: true,\n }) as any\n },\n },\n state: stateApi,\n actions,\n dispatchers,\n dispatch,\n flow: flowApi,\n match: matchApi,\n matchTag: matchTagApi,\n lifecycle: {\n onInitRequired: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onInitRequired') as any\n }\n runtimeInternals.lifecycle.registerInitRequired(eff as any)\n return Effect.void as any\n },\n onStart: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onStart') as any\n }\n runtimeInternals.lifecycle.registerStart(eff as any)\n return Effect.void as any\n },\n onInit: (eff: Logic.Of<Sh, R, void, never>) => {\n // Legacy alias: same semantics as onInitRequired (to reduce migration friction).\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onInit') as any\n }\n runtimeInternals.lifecycle.registerInitRequired(eff as any)\n return Effect.void as any\n },\n onDestroy: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onDestroy') as any\n }\n runtimeInternals.lifecycle.registerDestroy(eff as any)\n return Effect.void as any\n },\n onError: (\n handler: (\n cause: import('effect').Cause.Cause<unknown>,\n context: Lifecycle.ErrorContext,\n ) => Effect.Effect<void, never, R>,\n ) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onError') as any\n }\n runtimeInternals.lifecycle.registerOnError(handler as any)\n return Effect.void as any\n },\n onSuspend: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onSuspend') as any\n }\n runtimeInternals.lifecycle.registerPlatformSuspend(Effect.asVoid(eff as Effect.Effect<void, never, any>))\n return Effect.void as any\n },\n onResume: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onResume') as any\n }\n runtimeInternals.lifecycle.registerPlatformResume(Effect.asVoid(eff as Effect.Effect<void, never, any>))\n return Effect.void as any\n },\n onReset: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onReset') as any\n }\n runtimeInternals.lifecycle.registerPlatformReset(Effect.asVoid(eff as Effect.Effect<void, never, any>))\n return Effect.void as any\n },\n },\n traits: {\n declare: (traits: ModuleTraits.TraitSpec) => {\n if (getCurrentPhase() === 'run') {\n throw LogicDiagnostics.makeLogicPhaseError(\n 'traits_declare_in_run',\n '$.traits.declare',\n 'run',\n options?.moduleId,\n )\n }\n\n if (!traits || typeof traits !== 'object') {\n throw new Error('[InvalidTraitsDeclaration] $.traits.declare expects an object.')\n }\n\n const logicUnit = options?.logicUnit ?? {\n logicUnitId: 'unknown',\n logicUnitIdKind: 'derived' as const,\n logicUnitLabel: 'logicUnit:unknown',\n path: undefined as string | undefined,\n }\n\n runtimeInternals.traits.registerModuleTraitsContribution({\n traits,\n provenance: {\n originType: 'logicUnit',\n originId: logicUnit.logicUnitId,\n originIdKind: logicUnit.logicUnitIdKind,\n originLabel: logicUnit.logicUnitLabel,\n path: logicUnit.path,\n },\n })\n },\n source: {\n refresh: (fieldPath: string, options?: { readonly force?: boolean }) =>\n Effect.gen(function* () {\n const handler = runtimeInternals.traits.getSourceRefreshHandler(fieldPath) as\n | ((state: Logix.StateOf<Sh>) => Effect.Effect<void, never, any>)\n | undefined\n if (!handler) {\n // If no refresh handler is registered, treat it as a no-op to avoid throwing when StateTraitProgram is not installed.\n return yield* Effect.void\n }\n\n const force = options?.force === true\n const runHandler = (state: Logix.StateOf<Sh>) =>\n force ? Effect.locally(TaskRunner.forceSourceRefresh, true)(handler(state)) : handler(state)\n\n // Never call enqueueTransaction inside the transaction window (it can deadlock):\n // - Run the handler inside the current transaction so it writes to the draft via bound.state.mutate.\n // - The outer transaction window is responsible for commit + debug aggregation.\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n const state = (yield* runtime.getState) as Logix.StateOf<Sh>\n return yield* runHandler(state)\n }\n\n // Treat one source-refresh as a dedicated transaction entry.\n return yield* runtimeInternals.txn.runWithStateTransaction(\n {\n kind: 'source-refresh',\n name: fieldPath,\n } as any,\n () =>\n Effect.gen(function* () {\n const state = (yield* runtime.getState) as Logix.StateOf<Sh>\n return yield* runHandler(state)\n }),\n )\n }),\n },\n },\n reducer,\n effect,\n use: new Proxy(() => {}, {\n apply: (_target, _thisArg, [arg]) => {\n guardRunOnly('use_in_setup', '$.use')\n if (isModuleLike(arg)) {\n const domain = arg\n const tag = domain.tag as unknown as Context.Tag<any, Logix.ModuleRuntime<any, any>>\n\n const resolveAndBuild = resolveModuleRuntime(tag).pipe(Effect.map((rt) => buildModuleHandle(tag, rt)))\n\n const resolveWithDescriptor = resolveModuleRuntime(tag).pipe(\n Effect.tap((rt) => emitModuleDescriptorOnce(domain, rt)),\n Effect.map((rt) => buildModuleHandle(tag, rt)),\n )\n\n const detectAndSelect = FiberRef.get(Debug.currentDiagnosticsLevel).pipe(\n Effect.tap((level) => {\n cachedDiagnosticsLevel = level\n }),\n Effect.flatMap((level) => (level === 'off' ? resolveAndBuild : resolveWithDescriptor)),\n )\n\n // 022 perf gate: when diagnostics are off, $.use(module) and $.use(module.tag) must be equivalent with zero extra overhead.\n // Constraint: Effect is a value (reusable), so we must one-time cache at execution time instead of branching at construction time.\n return Effect.suspend(() => {\n if (cachedDiagnosticsLevel === 'off') {\n return resolveAndBuild\n }\n\n if (cachedDiagnosticsLevel !== undefined) {\n return resolveWithDescriptor\n }\n\n return detectAndSelect\n }) as unknown as Logic.Of<Sh, R, any, never>\n }\n if (Context.isTag(arg)) {\n const candidate = arg as { _kind?: unknown }\n\n // Module: return a read-only ModuleHandle view.\n if (candidate._kind === 'ModuleTag') {\n return resolveModuleRuntime(arg as any).pipe(\n Effect.map((rt: Logix.ModuleRuntime<any, any>) => buildModuleHandle(arg as any, rt)),\n ) as unknown as Logic.Of<Sh, R, any, never>\n }\n\n // Regular service tag: read the service from Env.\n return arg as unknown as Logic.Of<Sh, R, any, never>\n }\n return Effect.die('BoundApi.use: unsupported argument') as unknown as Logic.Of<Sh, R, any, never>\n },\n }) as unknown as BoundApi<Sh, R>['use'],\n onAction: new Proxy(() => {}, {\n apply: (_target, _thisArg, args) => {\n guardRunOnly('use_in_setup', '$.onAction')\n const arg = args[0]\n if (Action.isActionToken(arg)) {\n const tag = arg.tag\n return createIntentBuilder(\n runtime.actions$.pipe(\n Stream.filter((a: any) => a._tag === tag || a.type === tag),\n Stream.map((a: any) => a.payload),\n ),\n tag,\n )\n }\n if (typeof arg === 'function') {\n return createIntentBuilder(runtime.actions$.pipe(Stream.filter(arg)))\n }\n if (typeof arg === 'string') {\n return createIntentBuilder(\n runtime.actions$.pipe(Stream.filter((a: any) => a._tag === arg || a.type === arg)),\n arg,\n )\n }\n if (typeof arg === 'object' && arg !== null) {\n if ('_tag' in arg) {\n return createIntentBuilder(\n runtime.actions$.pipe(Stream.filter((a: any) => a._tag === (arg as any)._tag)),\n String((arg as any)._tag),\n )\n }\n if (Schema.isSchema(arg)) {\n return createIntentBuilder(\n runtime.actions$.pipe(\n Stream.filter((a: any) => {\n const result = Schema.decodeUnknownSync(arg as Schema.Schema<any, any, never>)(a)\n return !!result\n }),\n ),\n )\n }\n }\n return createIntentBuilder(runtime.actions$)\n },\n get: (_target, prop) => {\n guardRunOnly('use_in_setup', '$.onAction')\n if (typeof prop === 'string') {\n return createIntentBuilder(\n runtime.actions$.pipe(Stream.filter((a: any) => a._tag === prop || a.type === prop)),\n prop,\n )\n }\n return undefined\n },\n }) as unknown as BoundApi<Sh, R>['onAction'],\n onState: (selector: (s: Logix.StateOf<Sh>) => any) => {\n guardRunOnly('use_in_setup', '$.onState')\n return createIntentBuilder(runtime.changes(selector))\n },\n on: (stream: Stream.Stream<any>) => {\n guardRunOnly('use_in_setup', '$.on')\n return createIntentBuilder(stream)\n },\n } as any\n\n setBoundInternals(api as any, runtimeInternals)\n\n return api\n}\n","import { Schema } from 'effect'\n\ntype ActionArgs<P> = [P] extends [void] ? [] | [P] : [P]\ntype ActionFn<P, Out> = (...args: ActionArgs<P>) => Out\n\ntype DevSource = {\n readonly file: string\n readonly line: number\n readonly column: number\n}\n\nexport type ActionValue<Tag extends string, Payload> = Payload extends void\n ? {\n readonly _tag: Tag\n readonly payload?: Payload\n }\n : {\n readonly _tag: Tag\n readonly payload: Payload\n }\n\nexport type ActionCreator<Tag extends string, Payload> = ActionFn<Payload, ActionValue<Tag, Payload>>\n\nexport type ActionToken<\n Tag extends string,\n Payload,\n PayloadSchema extends Schema.Schema<any, any, any> = Schema.Schema<any, any, any>,\n> = ActionCreator<Tag, Payload> & {\n readonly _kind: 'ActionToken'\n readonly tag: Tag\n readonly schema: PayloadSchema\n readonly source?: DevSource\n}\n\nexport type AnyActionToken = ActionToken<string, any, Schema.Schema<any, any, any>>\n\nexport const isActionToken = (value: unknown): value is AnyActionToken =>\n typeof value === 'function' &&\n (value as any)._kind === 'ActionToken' &&\n typeof (value as any).tag === 'string' &&\n Schema.isSchema((value as any).schema)\n\nexport const make = <Tag extends string, PayloadSchema extends Schema.Schema<any, any, any>>(\n tag: Tag,\n schema: PayloadSchema,\n options?: { readonly source?: DevSource },\n): ActionToken<Tag, Schema.Schema.Type<PayloadSchema>, PayloadSchema> => {\n const fn = ((...args: readonly [unknown?]) => ({\n _tag: tag,\n payload: args[0],\n })) as unknown as ActionToken<Tag, Schema.Schema.Type<PayloadSchema>, PayloadSchema>\n\n ;(fn as any)._kind = 'ActionToken'\n ;(fn as any).tag = tag\n ;(fn as any).schema = schema\n if (options?.source) {\n ;(fn as any).source = options.source\n }\n\n return fn\n}\n\nexport const makeActions = <M extends Record<string, Schema.Schema<any, any, any>>>(\n schemas: M,\n options?: {\n readonly source?: DevSource\n readonly sources?: Partial<Record<Extract<keyof M, string>, DevSource>>\n },\n): {\n readonly [K in keyof M]: ActionToken<Extract<K, string>, Schema.Schema.Type<M[K]>, M[K]>\n} => {\n const out: Record<string, AnyActionToken> = {}\n const sources = options?.sources as Record<string, DevSource | undefined> | undefined\n const defaultSource = options?.source\n for (const [key, schema] of Object.entries(schemas)) {\n const source = sources?.[key] ?? defaultSource\n out[key] = make(key, schema, source ? { source } : undefined)\n }\n return out as any\n}\n\nexport type ActionDef = Schema.Schema<any, any, any> | AnyActionToken\nexport type ActionDefs = Record<string, ActionDef>\n\nexport type NormalizedActionTokens<M extends ActionDefs> = {\n readonly [K in keyof M]: M[K] extends Schema.Schema<any, any, any>\n ? ActionToken<Extract<K, string>, Schema.Schema.Type<M[K]>, M[K]>\n : M[K] extends ActionToken<any, infer P, infer S>\n ? ActionToken<Extract<K, string>, P, S>\n : never\n}\n\nexport const normalizeActions = <M extends ActionDefs>(defs: M): NormalizedActionTokens<M> => {\n const out: Record<string, AnyActionToken> = {}\n\n for (const [key, def] of Object.entries(defs)) {\n if (Schema.isSchema(def)) {\n out[key] = make(key, def)\n continue\n }\n\n if (isActionToken(def)) {\n if (def.tag !== key) {\n throw new Error(`[Logix.Action] actionTag MUST equal key: key=\"${key}\", token.tag=\"${def.tag}\"`)\n }\n out[key] = def\n continue\n }\n\n throw new Error(`[Logix.Action] invalid action def for key \"${key}\"`)\n }\n\n return out as any\n}\n","import { create, type Patches } from 'mutative'\nimport type { FieldPath } from '../../field-path.js'\nimport { isFieldPathSegment } from '../../field-path.js'\n\nexport type PatchPath = FieldPath\n\nexport const mutateWithoutPatches = <S>(base: S, mutator: (draft: S) => void): S => {\n return create(base, mutator as any) as unknown as S\n}\n\nconst toPatchFieldPath = (path: unknown): PatchPath | '*' | undefined => {\n if (typeof path === 'string') {\n const trimmed = path.trim()\n return trimmed.length > 0 ? '*' : undefined\n }\n\n if (!Array.isArray(path)) return undefined\n\n const parts: Array<string> = []\n for (const seg of path) {\n if (typeof seg === 'string') {\n if (isFieldPathSegment(seg)) parts.push(seg)\n continue\n }\n }\n\n if (parts.length === 0) return '*'\n return parts\n}\n\nexport const mutateWithPatchPaths = <S>(\n base: S,\n mutator: (draft: S) => void,\n): { readonly nextState: S; readonly patchPaths: ReadonlyArray<PatchPath | '*'> } => {\n const out = create(base, mutator as any, {\n enablePatches: {\n pathAsArray: true,\n arrayLengthAssignment: false,\n },\n }) as unknown\n\n if (!Array.isArray(out)) {\n return { nextState: out as S, patchPaths: [] }\n }\n\n const nextState = out[0] as S\n const patches = (out[1] ?? []) as Patches<{ pathAsArray: true; arrayLengthAssignment: false }>\n\n const dedup = new Map<string, PatchPath | '*'>()\n for (const patch of patches) {\n const p = toPatchFieldPath((patch as any)?.path)\n if (!p) continue\n const key = p === '*' ? '*' : JSON.stringify(p)\n if (!dedup.has(key)) dedup.set(key, p)\n }\n\n return {\n nextState,\n patchPaths: Array.from(dedup.values()),\n }\n}\n","import { Effect, Stream, Ref, Option } from 'effect'\nimport type { AnyModuleShape, ModuleRuntime, StateOf, ActionOf, ModuleShape } from './module.js'\nimport type * as Logic from './LogicMiddleware.js'\nimport * as EffectOp from '../../effect-op.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport { RunSessionTag } from '../../observability/runSession.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport * as Debug from './DebugSink.js'\nimport * as ReadQuery from './ReadQuery.js'\n\nconst getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack, never, any> =>\n Effect.serviceOption(EffectOpCore.EffectOpMiddlewareTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\nconst getRuntimeScope = (runtime: unknown): { readonly moduleId?: string; readonly instanceId?: string } => {\n if (!runtime) return {}\n if (typeof runtime !== 'object' && typeof runtime !== 'function') return {}\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n return {\n moduleId: typeof scope.moduleId === 'string' ? scope.moduleId : undefined,\n instanceId: typeof scope.instanceId === 'string' ? scope.instanceId : undefined,\n }\n}\n\nexport interface Api<Sh extends ModuleShape<any, any>, R = never> {\n readonly fromAction: <T extends ActionOf<Sh>>(predicate: (a: ActionOf<Sh>) => a is T) => Stream.Stream<T>\n\n readonly fromState: {\n <V>(selector: (s: StateOf<Sh>) => V): Stream.Stream<V>\n <V>(query: ReadQuery.ReadQuery<StateOf<Sh>, V>): Stream.Stream<V>\n }\n\n readonly debounce: <V>(ms: number) => (stream: Stream.Stream<V>) => Stream.Stream<V>\n\n readonly throttle: <V>(ms: number) => (stream: Stream.Stream<V>) => Stream.Stream<V>\n\n readonly filter: <V>(predicate: (value: V) => boolean) => (stream: Stream.Stream<V>) => Stream.Stream<V>\n\n readonly run: <V, A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((payload: V) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => Effect.Effect<void, E, Logic.Env<Sh, R & R2>>\n\n readonly runParallel: <V, A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((payload: V) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => Effect.Effect<void, E, Logic.Env<Sh, R & R2>>\n\n readonly runLatest: <V, A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((payload: V) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => Effect.Effect<void, E, Logic.Env<Sh, R & R2>>\n\n readonly runExhaust: <V, A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((payload: V) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => Effect.Effect<void, E, Logic.Env<Sh, R & R2>>\n}\n\nconst resolveEffect = <T, Sh extends AnyModuleShape, R, A, E>(\n eff: Logic.Of<Sh, R, A, E> | ((payload: T) => Logic.Of<Sh, R, A, E>),\n payload: T,\n): Logic.Of<Sh, R, A, E> => (typeof eff === 'function' ? (eff as (p: T) => Logic.Of<Sh, R, A, E>)(payload) : eff)\n\nexport const make = <Sh extends AnyModuleShape, R = never>(\n runtime: ModuleRuntime<StateOf<Sh>, ActionOf<Sh>>,\n runtimeInternals?: RuntimeInternals,\n): Api<Sh, R> => {\n const scope = getRuntimeScope(runtime)\n const resolveConcurrencyLimit = (): Effect.Effect<number | 'unbounded', never, any> =>\n runtimeInternals\n ? runtimeInternals.concurrency.resolveConcurrencyPolicy().pipe(Effect.map((p) => p.concurrencyLimit))\n : Effect.succeed(16)\n\n const runAsFlowOp = <A, E, R2, V>(\n name: string,\n payload: V,\n eff: Effect.Effect<A, E, Logic.Env<Sh, R & R2>>,\n options?: Logic.OperationOptions,\n ): Effect.Effect<A, E, Logic.Env<Sh, R & R2>> =>\n Effect.gen(function* () {\n const stack = yield* getMiddlewareStack()\n const meta: any = {\n ...(options?.meta ?? {}),\n policy: options?.policy,\n tags: options?.tags,\n trace: options?.trace,\n moduleId: scope.moduleId,\n instanceId: scope.instanceId,\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const key = meta.instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', key)\n }\n }\n\n const op = EffectOp.make<A, E, any>({\n kind: 'flow',\n name,\n payload,\n effect: eff as any,\n meta,\n })\n return yield* EffectOp.run(op, stack)\n }) as any\n\n const runEffect =\n <T, A, E, R2>(eff: Logic.Of<Sh, R & R2, A, E> | ((payload: T) => Logic.Of<Sh, R & R2, A, E>)) =>\n (payload: T) =>\n resolveEffect<T, Sh, R & R2, A, E>(eff, payload)\n\n const runStreamSequential =\n <T, A, E, R2>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((payload: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) =>\n (stream: Stream.Stream<T>): Effect.Effect<void, E, Logic.Env<Sh, R & R2>> =>\n Stream.runForEach(stream, (payload) =>\n runAsFlowOp<A, E, R2, T>('flow.run', payload, runEffect<T, A, E, R2>(eff)(payload), options),\n )\n\n const runStreamParallel =\n <T, A, E, R2>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((payload: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) =>\n (stream: Stream.Stream<T>): Effect.Effect<void, E, Logic.Env<Sh, R & R2>> =>\n Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit()\n\n return yield* Stream.runDrain(\n stream.pipe(\n Stream.mapEffect(\n (payload) =>\n runAsFlowOp<A, E, R2, T>('flow.runParallel', payload, runEffect<T, A, E, R2>(eff)(payload), options),\n { concurrency },\n ),\n ),\n ).pipe(\n Effect.catchAllCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId: scope.moduleId,\n instanceId: scope.instanceId,\n code: 'flow::unhandled_failure',\n severity: 'error',\n message: 'Flow watcher (runParallel) failed with an unhandled error.',\n hint: 'Handle errors explicitly inside the watcher (catch/catchAll) or write back via TaskRunner failure; avoid silent failures.',\n kind: 'flow_unhandled_failure',\n trigger: {\n kind: 'flow',\n name: 'runParallel',\n },\n }).pipe(Effect.zipRight(Effect.failCause(cause))),\n ),\n )\n }) as any\n\n return {\n fromAction: <T extends ActionOf<Sh>>(predicate: (a: ActionOf<Sh>) => a is T) =>\n runtime.actions$.pipe(Stream.filter(predicate)),\n\n fromState: <V>(selectorOrQuery: ((s: StateOf<Sh>) => V) | ReadQuery.ReadQuery<StateOf<Sh>, V>) =>\n runtime.changes(ReadQuery.isReadQuery(selectorOrQuery) ? selectorOrQuery.select : selectorOrQuery),\n\n debounce: (ms: number) => (stream) => Stream.debounce(stream, ms),\n\n throttle: (ms: number) => (stream) =>\n Stream.throttle(stream, {\n cost: () => 1,\n units: 1,\n duration: ms,\n strategy: 'enforce',\n }),\n\n filter: (predicate: (value: any) => boolean) => (stream) => Stream.filter(stream, predicate),\n\n run: (eff, options) => (stream) => runStreamSequential<any, any, any, any>(eff, options)(stream),\n\n runParallel: (eff, options) => (stream) => runStreamParallel<any, any, any, any>(eff, options)(stream),\n\n runLatest: (eff, options) => (stream) =>\n Stream.runDrain(\n Stream.map(stream, (payload) =>\n runAsFlowOp<any, any, any, any>(\n 'flow.runLatest',\n payload,\n runEffect<any, any, any, any>(eff)(payload),\n options,\n ),\n ).pipe(\n Stream.flatMap((effect) => Stream.fromEffect(effect), {\n switch: true,\n }),\n ),\n ),\n\n runExhaust: (eff, options) => (stream) =>\n Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit()\n const busyRef = yield* Ref.make(false)\n const mapper = (payload: any) =>\n Effect.gen(function* () {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? ([false, busy] as const) : ([true, true] as const),\n )\n if (!acquired) {\n return\n }\n try {\n yield* runAsFlowOp<any, any, any, any>(\n 'flow.runExhaust',\n payload,\n runEffect<any, any, any, any>(eff)(payload),\n options,\n )\n } finally {\n yield* Ref.set(busyRef, false)\n }\n })\n\n return yield* Stream.runDrain(stream.pipe(Stream.mapEffect(mapper, { concurrency })))\n }),\n }\n}\n","import { Effect } from 'effect'\n\nexport const makeMatch = <V>(value: V) => {\n let result: Effect.Effect<any, any, any> | undefined\n\n const chain = {\n with: <A>(predicate: (value: V) => boolean, handler: (value: V) => A) => {\n if (result) return chain\n if (predicate(value)) {\n result = handler(value) as any\n }\n return chain\n },\n otherwise: <A>(handler: (value: V) => A): A => {\n if (result) return result as A\n return handler(value)\n },\n exhaustive: () => {\n if (result) {\n return result\n }\n return Effect.dieMessage('[FluentMatch] Non-exhaustive match: no pattern matched value')\n },\n }\n\n return chain\n}\n\nexport const makeMatchTag = <V extends { _tag: string }>(value: V) => {\n let result: Effect.Effect<any, any, any> | undefined\n\n const chain = {\n with: <K extends V['_tag'], A>(t: K, handler: (value: Extract<V, { _tag: K }>) => A) => {\n if (result) return chain\n if (value._tag === t) {\n result = handler(value as Extract<V, { _tag: K }>) as any\n }\n return chain\n },\n otherwise: <A>(handler: (value: V) => A): A => {\n if (result) return result as A\n return handler(value)\n },\n exhaustive: () => {\n if (result) {\n return result\n }\n return Effect.dieMessage('[FluentMatchTag] Non-exhaustive match: no tag handler matched value')\n },\n }\n\n return chain\n}\n","import { Context, Deferred, Effect, Layer } from 'effect'\nimport { isDevEnv } from './runtime/core/env.js'\nimport { RootContextTag, type RootContext } from './runtime/core/RootContext.js'\n\nexport type RootResolveEntrypoint = 'logic.root.resolve' | 'logic.$.root.resolve'\n\nexport interface RootResolveOptions {\n readonly entrypoint?: RootResolveEntrypoint\n /**\n * Whether to wait when RootContext is not ready yet:\n * - Default false: avoid misuse during layer/setup which can deadlock.\n * - `$.root.resolve` passes true in the run phase (run-only), allowing Env assembly to complete.\n */\n readonly waitForReady?: boolean\n}\n\nconst tagIdOf = (tag: Context.Tag<any, any>): string =>\n typeof (tag as any)?.id === 'string'\n ? String((tag as any).id)\n : typeof (tag as any)?.key === 'string'\n ? String((tag as any).key)\n : '<unknown tag>'\n\nconst makeMissingRootProviderError = (\n tag: Context.Tag<any, any>,\n entrypoint: RootResolveEntrypoint,\n extra?: string,\n): Error => {\n const dev = isDevEnv()\n const tokenId = tagIdOf(tag)\n const fix: string[] = dev\n ? [\n '- Provide it when creating the runtime tree (Logix.Runtime.make(...,{ layer }) / ManagedRuntime.make(Layer.mergeAll(...))).',\n \"- If you're in React and want the current runtime environment singleton, use useModule(ModuleTag).\",\n '- Do not rely on nested RuntimeProvider.layer to mock Root.resolve.',\n ]\n : []\n\n const message = dev\n ? [\n '[MissingRootProviderError] Cannot resolve Tag from root provider.',\n extra ? `\\n${extra}` : '',\n `tokenId: ${tokenId}`,\n `entrypoint: ${entrypoint}`,\n 'mode: global',\n 'startScope: root',\n '',\n 'fix:',\n ...fix,\n ]\n .filter((s) => s.length > 0)\n .join('\\n')\n : '[MissingRootProviderError] tag not found in root provider'\n\n const err = new Error(message)\n err.name = 'MissingRootProviderError'\n ;(err as any).tokenId = tokenId\n ;(err as any).entrypoint = entrypoint\n ;(err as any).mode = 'global'\n ;(err as any).startScope = { kind: 'root' }\n ;(err as any).fix = fix\n return err\n}\n\n/**\n * resolve\n *\n * Resolve a Tag explicitly from the root provider of the current Runtime tree (ServiceTag / ModuleTag).\n *\n * Semantics:\n * - Always reads rootContext; unaffected by nearer-scope Layer/Context overrides.\n * - For ModuleTag: expresses root singleton semantics only (not used for multi-instance selection).\n */\nexport const resolve = <Id, Svc>(\n tag: Context.Tag<Id, Svc>,\n options?: RootResolveOptions,\n): Effect.Effect<Svc, never, any> =>\n Effect.gen(function* () {\n const entrypoint: RootResolveEntrypoint = options?.entrypoint ?? 'logic.root.resolve'\n\n const root = yield* RootContextTag\n\n const rootContext = root.context ?? (options?.waitForReady ? yield* root.ready : undefined)\n\n if (!rootContext) {\n return yield* Effect.die(\n makeMissingRootProviderError(tag as Context.Tag<any, any>, entrypoint, 'reason: rootContextNotReady'),\n )\n }\n\n try {\n return Context.get(rootContext, tag as Context.Tag<any, any>) as Svc\n } catch {\n return yield* Effect.die(makeMissingRootProviderError(tag as Context.Tag<any, any>, entrypoint))\n }\n })\n\n/**\n * layerFromContext(tests/perf only)\n *\n * Provide a \"ready immediately\" RootContext for Root.resolve.\n * - `ready` is fulfilled immediately to avoid extra waits when waitForReady=true.\n */\nexport const layerFromContext = (context: Context.Context<any>): Layer.Layer<any, never, any> =>\n Layer.scoped(\n RootContextTag,\n Effect.gen(function* () {\n const ready = yield* Deferred.make<Context.Context<any>>()\n yield* Deferred.succeed(ready, context)\n const root: RootContext = { context, ready }\n return root\n }),\n )\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA,oBAAAA;AAAA,EAAA,qBAAAC;AAAA,EAAA,eAAAC;AAAA,EAAA;AAAA,cAAAC;AAAA;AAAA;;;ACAA,IAAAC,kBAAuD;;;ACAvD,IAAAC,kBAaO;;;ACbP,IAAAC,iBAA4C;;;ACA5C,oBAAsB;;;ACAtB,IAAAC,iBAAuD;;;ACwEvD,IAAM,iBAAoD;AAAA,EACxD,UAAU;AAAA,EACV,eAAe;AAAA,EACf,gBAAgB;AAAA,EAChB,iBAAiB;AAAA,EACjB,cAAc,IAAI;AAAA,EAClB,uBAAuB;AACzB;;;AC3EA,IAAAC,iBAA0C;AAOnC,IAAM,gBAAgB,wBAAS,WAA+B,MAAS;AAoHvE,IAAM,wBAAN,cAAoC,uBAAQ,IAAI,0BAA0B,EAG/E,EAAE;AAAC;;;AF6HE,IAAM,oBAAoB,wBAAS,WAAgC,CAAC,CAAC;AACrE,IAAM,sBAAsB,wBAAS,WAA+B,MAAS;AAC7E,IAAM,eAAe,wBAAS,WAA+B,MAAS;AACtE,IAAM,eAAe,wBAAS,WAA+B,MAAS;AAEtE,IAAM,0BAA0B,wBAAS,WAA6B,KAAK;AAiB3E,IAAM,0CAA0C,wBAAS,WAAmD;AAAA,EACjH,cAAc;AAAA,EACd,MAAM;AACR,CAAC;AA0ID,IAAM,uBAAuB,oBAAI,IAAY;AAC7C,IAAM,wBAAwB,oBAAI,IAAY;AA4C9C,IAAM,oBAAoB,CAAC,UAAgE;AACzF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,eAAe,MAAM;AACzB,QAAI;AACF,aAAO,qBAAM,OAAO,MAAM,OAA+B;AAAA,QACvD,kBAAkB;AAAA,MACpB,CAAC;AAAA,IACH,QAAQ;AACN,UAAI;AACF,eAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,MAC5C,QAAQ;AACN,eAAO,OAAO,MAAM,KAAK;AAAA,MAC3B;AAAA,IACF;AAAA,EACF,GAAG;AAEH,QAAM,UAAU,kBAAkB,QAAQ;AAAA,EAAsB,WAAW;AAE3E,SAAO,sBAAO,SAAS,OAAO,EAAE;AAAA,IAC9B,sBAAO,aAAa;AAAA,MAClB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,gBAAgB,CAAC,UAA2D;AAChF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,SAAS,kBAAkB,QAAQ,gBAAgB,MAAM,QAAQ;AACvE,QAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAC5C,QAAM,MAAM,GAAG,MAAM;AAAA,EAAK,MAAM;AAEhC,QAAM,OACJ,MAAM,aAAa,YACf,sBAAO,WAAW,GAAG,IACrB,MAAM,aAAa,SACjB,sBAAO,QAAQ,GAAG,IAClB,sBAAO,SAAS,GAAG;AAE3B,QAAM,cAAuC;AAAA,IAC3C,kBAAkB;AAAA,IAClB,eAAe,cAAc,MAAM,QAAQ;AAAA,IAC3C,yBAAyB,MAAM;AAAA,IAC/B,4BAA4B,MAAM;AAAA,EACpC;AACA,MAAI,MAAM,MAAM;AACd,gBAAY,uBAAuB,IAAI,MAAM;AAAA,EAC/C;AACA,MAAI,MAAM,WAAW;AACnB,gBAAY,4BAA4B,IAAI,MAAM;AAAA,EACpD;AAEA,SAAO,KAAK,KAAK,sBAAO,aAAa,WAAW,CAAC;AACnD;AAOO,IAAM,YAAY,qBAAM,cAAc,mBAAmB,CAAC,CAAC;AAQlE,IAAM,gBAAsB;AAAA,EAC1B,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AACjB;AAEO,IAAM,iBAAiB,qBAAM,cAAc,mBAAmB,CAAC,aAAa,CAAC;AASpF,IAAM,cAAoB;AAAA,EACxB,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAC/C;AAEO,IAAM,eAAe,qBAAM,cAAc,mBAAmB,CAAC,WAAW,CAAC;AAEhF,IAAM,YAAY,OAAO,WAAW,eAAe,OAAO,aAAa;AAGvE,IAAM,4BAA4B,CAAC,UAAsC;AAEvE,MAAI,OAAQ,MAAc,SAAS,YAAa,MAAc,KAAK,WAAW,QAAQ,GAAG;AACvF,UAAM,WAAY,MAAc,YAAY;AAC5C,UAAM,OAAQ,MAAc;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,yBAAyB,WAAW,QAAQ,OAAO,IAAI;AAAA,QACvD;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,IAAI,KAAK;AAEjB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,mBAAmB;AACpC,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,eAAe,MAAM;AACzB,UAAI;AACF,eAAO,qBAAM,OAAO,MAAM,OAA+B,EAAE,kBAAkB,KAAK,CAAC;AAAA,MACrF,QAAQ;AACN,YAAI;AACF,iBAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,QAC5C,QAAQ;AACN,iBAAO,OAAO,MAAM,KAAK;AAAA,QAC3B;AAAA,MACF;AAAA,IACF,GAAG;AAEH,UAAM,MAAM,GAAG,QAAQ,IAAI,WAAW;AACtC,QAAI,qBAAqB,IAAI,GAAG,GAAG;AACjC,aAAO,sBAAO;AAAA,IAChB;AACA,yBAAqB,IAAI,GAAG;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,mCAAmC;AAAA,QACnC;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,MAAM,WAAW;AAEzB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,cAAc;AAC/B,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAE5C,UAAM,QACJ,MAAM,aAAa,YAAY,kBAAkB,MAAM,aAAa,SAAS,kBAAkB;AAEjG,UAAM,QACJ,MAAM,aAAa,YACf,wBACA,MAAM,aAAa,SACjB,qBACA;AAER,UAAM,MAAM,GAAG,QAAQ,IAAI,MAAM,IAAI,IAAI,MAAM,OAAO;AACtD,QAAI,sBAAsB,IAAI,GAAG,GAAG;AAClC,aAAO,sBAAO;AAAA,IAChB;AACA,0BAAsB,IAAI,GAAG;AAE7B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,iBAAiB,QAAQ,eAAe;AAAA,QACxC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,UAAI,MAAM,aAAa,WAAW;AAEhC,gBAAQ,KAAK,MAAM;AAAA,MACrB,WAAW,MAAM,aAAa,QAAQ;AAEpC,gBAAQ,KAAK,MAAM;AAAA,MACrB,OAAO;AAEL,gBAAQ,MAAM,MAAM;AAAA,MACtB;AAEA,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAIA,SAAO,sBAAO;AAChB;AAOA,IAAM,qBAA2B;AAAA,EAC/B,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AAEd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAAA,IAC7C;AAEA,WAAO,0BAA0B,KAAK;AAAA,EACxC;AACF;AAEO,IAAM,sBAAsB,qBAAM,cAAc,mBAAmB,CAAC,kBAAkB,CAAC;AAQ9F,IAAM,+BAAqC;AAAA,EACzC,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AACd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AAAA,IACf;AAEA,WAAO,MAAM,SAAS,qBAAsB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAC1F,0BAA0B,KAAK,IAC/B,sBAAO;AAAA,EACb;AACF;AAEO,IAAM,gCAAgC,qBAAM,cAAc,mBAAmB,CAAC,4BAA4B,CAAC;AAO3G,IAAM,2BAA2B,sBAAO;AAAA,EAC7C,sBAAO;AAAA,EACP,sBAAO,aAAa,EAAE,MAAM,WAAW,QAAQ,KAAK,CAAC;AACvD;;;AF3kBO,IAAM,mBAAmB,uBAAQ,WAA6B,2BAA2B;;;AK5IhG,IAAAC,iBAAgC;AAgBzB,IAAM,oCAAoC,wBAAS,WAAqD,CAAC,CAAC;;;AChBjH,IAAAC,iBAAwC;;;ACAxC,IAAAC,iBAAwC;;;ACAxC,IAAAC,iBAA+B;AA+E/B,IAAM,gCAAN,cAA4C,uBAAQ,IAAI,6CAA6C,EAGnG,EAAE;AAAC;AAML,IAAM,mCAAN,cAA+C,uBAAQ,IAAI,gDAAgD,EAGzG,EAAE;AAAC;AAUL,IAAM,0BAAN,cAAsC,uBAAQ,IAAI,gCAAgC,EAGhF,EAAE;AAAC;AAkFL,IAAM,mCAAN,cAA+C,uBAAQ,IAAI,yCAAyC,EAGlG,EAAE;AAAC;AA6BL,IAAM,2BAAN,cAAuC,uBAAQ,IAAI,iCAAiC,EAGlF,EAAE;AAAC;AAaL,IAAM,oCAAN,cAAgD,uBAAQ,IAAI,0CAA0C,EAGpG,EAAE;AAAC;;;ADpNL,IAAM,sCAAN,cAAkD,uBAAQ,IAAI,4CAA4C,EAGxG,EAAE;AAAC;AAWL,IAAM,0CAAN,cAAsD,uBAAQ,IAAI,gDAAgD,EAGhH,EAAE;AAAC;AAIL,IAAM,0CAAN,cAAsD,uBAAQ,IAAI,gDAAgD,EAGhH,EAAE;AAAC;AAcL,IAAM,6BAAN,cAAyC,uBAAQ,IAAI,mCAAmC,EAGtF,EAAE;AAAC;AA0GL,IAAM,iCAAN,cAA6C,uBAAQ,IAAI,uCAAuC,EAG9F,EAAE;AAAC;;;AE/KL,IAAAC,iBAAwC;AAsFxC,IAAM,iCAAN,cAA6C,uBAAQ,IAAI,uCAAuC,EAG9F,EAAE;AAAC;;;ACzFL,IAAAC,kBAAgC;AAYzB,IAAM,4BAAN,cAAwC,wBAAQ,IAAI,sCAAsC,EAG/F,EAAE;AAAC;;;ACfL,IAAAC,kBAAiD;;;ACAjD,IAAAC,kBAA+B;AA8B/B,IAAM,oBAAN,cAAgC,wBAAQ,IAAI,0BAA0B,EAAiC,EAAE;AAAC;;;ADP1G,IAAM,2BAAN,cAAuC,wBAAQ,IAAI,iCAAiC,EAGlF,EAAE;AAAC;;;AE1BL,IAAAC,kBAAyC;;;ACMzC,IAAAC,kBAA+B;;;ACN/B,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAAqC;;;ACArC,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAA6F;;;ACA7F,IAAAC,kBAA4D;AAcrD,IAAM,yBAAyB,yBAAS,WAAW,KAAK;AAUxD,IAAM,qBAAqB,yBAAS,WAAW,KAAK;;;ACxB3D,IAAAC,kBAAiC;;;ACAjC,IAAAC,kBAAyC;AACzC,gBAA2B;;;ACD3B,IAAAC,kBAAwC;AAQjC,IAAM,oBAAoB,yBAAS,WAAoB,KAAK;;;ACRnE,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAAiC;AACjC,sBAAuB;AAuOvB,IAAM,SAAS,MAAM;AACnB,QAAM,OAAO,WAAW;AACxB,MAAI,QAAQ,OAAO,KAAK,QAAQ,YAAY;AAC1C,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB;AACA,SAAO,MAAM,KAAK,IAAI;AACxB,GAAG;;;AC9OH,IAAAC,kBAAgD;AAChD,IAAAC,mBAAuB;;;ACDvB,IAAAC,kBAA+B;AA0GxB,IAAM,sBAAN,cAAkC,wBAAQ,IAAI,gCAAgC,EAGnF,EAAE;AAAC;;;AC7GL,IAAAC,kBAAuC;AA+ChC,IAAM,YAAN,cAAwB,wBAAQ,IAAI,yBAAyB,EAA+B,EAAE;AAAC;AAwD/F,IAAM,WAA2E,uBAAO,IAAI,aAAa;AAC9G,QAAM,MAAM,OAAO;AACnB,SAAO,OAAO,IAAI;AACpB,CAAC;AAEM,IAAM,cAAqD,uBAAO,IAAI,aAAa;AACxF,QAAM,MAAM,OAAO;AACnB,SAAO,IAAI;AACb,CAAC;;;AC/GD,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAA+B;;;ACC/B,IAAAC,aAA2B;;;ACD3B,IAAAC,kBAAuB;;;ACAvB,IAAAC,kBAAkC;AASlC,IAAM,qBAAN,cAAiC,wBAAQ,IAAI,2BAA2B,EAAmC,EAAE;AAAC;;;ACT9G,IAAAC,kBAeO;;;ACfP,IAAAC,kBAA0D;;;ACEnD,IAAM,0BAA0B,IAAI;;;ACYpC,IAAM,eAAe,uBAAO,IAAI,2BAA2B;AAMlE,IAAM,eAAe,CAAC,QAAgB,KAAa,UAAyB;AAC1E,SAAO,eAAe,QAAQ,KAAK;AAAA,IACjC;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,UAAU;AAAA,EACZ,CAAC;AACH;AAEO,IAAM,UAAU,CAAC,WACrB,OAAyB,YAAY;AAEjC,IAAM,gBAAgB,CAAC,WAC5B,QAAQ,MAAM,GAAG;AAEZ,IAAM,aAAa,CAAO,QAAmC,SAA2C;AAC7G,eAAa,QAAe,cAAc,IAAI;AAC9C,SAAO;AACT;;;ACtCA,IAAAC,aAA2B;;;AJoGpB,IAAM,oBAAN,cAAgC,wBAAQ,IAAI,8BAA8B,EAAqC,EAAE;AAAC;AAEzH,IAAM,wBAAwB,yBAAS,WAAuC,MAAS;AACvF,IAAM,4BAA4B,yBAAS;AAAA,EACzC;AACF;;;AKzGA,IAAAC,kBAAsC;;;ACAtC,IAAAC,kBAA4E;;;ACA5E,IAAAC,kBAA4D;;;ACA5D,IAAAC,kBAAqC;;;ACArC,IAAAC,kBAAuC;AAsFhC,IAAM,uBAAuB,wBAAQ,WAA8B,4BAA4B;AAiB/F,IAAM,sBAAN,cAAkC,wBAAQ,IAAI,2BAA2B,EAG9E,EAAE;AAAC;;;AC1GL,IAAAC,kBAAgC;AAqBzB,IAAM,MAAM,wBAAQ,WAAoB,mBAAmB;;;ACrBlE,IAAAC,kBAA4B;AAc5B,IAAMC,SAAQ,uBAAO,UAAU,CAAC,UAAU,MAAM,iBAAiB;;;ACdjE,IAAAC,kBAAmF;;;ACAnF,IAAAC,kBAAuB;;;ACAvB,IAAAC,mBAAqC;;;ACArC,IAAAC,kBAA4C;;;ACA5C,IAAAC,kBAAuB;;;ACAvB,IAAAC,kBAAiD;;;ArD8B1C,SAAS,KACd,SACA,OAC2B;AAC3B,SAAO,uBAAO,IAAI,aAAa;AAC7B,UAAM,UAAwD,CAAC;AAE/D,eAAW,CAAC,KAAKC,OAAM,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,YAAM,UAAU,OAAOA;AAEvB,cAAQ,GAAG,IAAI;AAAA,QACb,MAAM,CAAC,aAAkB,uBAAO,IAAI,QAAQ,UAAU,QAAQ;AAAA,QAC9D,SAAS,QAAQ;AAAA,QACjB,UAAU,QAAQ;AAAA,QAClB,UAAU,QAAQ;AAAA,QAClB,SAAS,IAAI;AAAA,UACX,CAAC;AAAA,UACD;AAAA,YACE,KAAK,CAAC,SAAS,SAAS,CAAC,YAAiB,QAAQ,SAAS,EAAE,MAAM,MAAgB,QAAQ,CAAC;AAAA,UAC9F;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO,OAAO;AAAA,MACZ;AAAA,IAGF;AAAA,EACF,CAAC;AACH;;;ADvBA,IAAM,mBAAsD,CAAC,EAAE,MAAM,iBAAiB,eAAe,eAAe,CAAC;AAErH,IAAM,sBAAsB,CAAC,UAAoD;AAC/E,QAAM,OAAO,OAAO,UAAU,WAAY,EAAE,WAAW,MAAM,IAA8C;AAE3G,QAAM,YAAY,KAAK;AACvB,MAAI,OAAO,cAAc,YAAY,UAAU,WAAW,GAAG;AAC3D,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAEA,QAAM,WAAW,MAAM,QAAQ,KAAK,QAAQ,KAAK,KAAK,SAAS,SAAS,IAAI,KAAK,WAAW;AAE5F,QAAM,cAAc,KAAK,eAAgB,EAAE,MAAM,SAAS;AAC1D,QAAM,cAAc,KAAK,eAAgB,EAAE,MAAM,WAAW;AAC5D,QAAM,mBAAmB,KAAK,oBAAoB;AAElD,SAAO;AAAA,IACL;AAAA,IACA,MAAM,KAAK;AAAA,IACX,aAAa,KAAK;AAAA,IAClB,UAAU,KAAK;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,IAAMC,QAAO,CAAO,YAAmC,WACvD,WAAW,QAAQ;AAAA,EACtB,YAAY,oBAAoB,UAAU;AAAA,EAC1C,MAAM;AACR,CAAC;AAEI,IAAMC,iBAAgB,CAAC,WACvB,cAAc,MAAM;AAEpB,IAAMC,WAAU,CAAC,WAA6E,QAAQ,MAAM;AAE5G,IAAMC,cAAa,CAAO,QAAmC,SAC7D,WAAW,QAAQ,IAAI;AAc9B,IAAM,eAAe,CAAC,UACpB,QAAQ,KAAK,KACb,OAAO,UAAU,aACf,MAAuC,UAAU,eAChD,MAAuC,UAAU,aACpD,SAAU;AAEZ,IAAM,kBAAkB,CACtBC,YACsB;AACtB,MAAI,aAAaA,OAAM,GAAG;AACxB,WAAOA,QAAO;AAAA,EAChB;AACA,SAAOA;AACT;AAeO,SAAS,KACd,QACA,OACqB;AACrB,QAAM,SACJ,OAAO,MACP,CAAC,GAAG,OAAO,OAAO,EACf,IAAI,CAAC,MAAM,EAAE,EAAE,EACf,KAAK,EACL,KAAK,GAAG;AAEb,QAAM,gBAAgB,uBAAO,OAAO,IAAI;AACxC,QAAM,WAAW,oBAAI,IAAY;AAEjC,aAAWA,WAAU,OAAO,SAAS;AACnC,UAAM,MAAM,gBAAgBA,OAAM;AAClC,aAAS,IAAI,OAAO,IAAI,EAAE,CAAC;AAC1B,IAAC,cAAoE,IAAI,EAAE,IAAI;AAAA,EAIlF;AAEA,QAAM,SAAuB;AAAA,IAC3B;AAAA,IACA;AAAA,EACF;AAEA,QAAM,aAAgC;AAAA,IACpC,WAAW;AAAA,IACX,UAAU,MAAM,KAAK,QAAQ;AAAA,IAC7B,UAAU,CAAC,GAAG,kBAAkB,EAAE,MAAM,iBAAiB,eAAe,QAAQ,MAAM,GAAG,CAAC;AAAA,IAC1F,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,aAAa,EAAE,MAAM,WAAW;AAAA,IAChC,kBAAkB;AAAA,EACpB;AAEA,SAAY,WAAW,QAAQ;AAAA,IAC7B;AAAA,IACA,MAAM;AAAA,EACR,CAAC;AACH;","names":["attachMeta","getDefinition","getMeta","make","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_mutative","import_effect","import_effect","import_effect","import_effect","import_effect","SchemaAST","import_effect","import_effect","import_effect","import_effect","SchemaAST","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","nowMs","import_effect","import_effect","import_mutative","import_effect","import_effect","import_effect","module","make","getDefinition","getMeta","attachMeta","module"]}
|
|
1
|
+
{"version":3,"sources":["../src/Process.ts","../src/internal/runtime/ModuleFactory.ts","../src/internal/runtime/core/ModuleRuntime.impl.ts","../src/internal/runtime/core/Lifecycle.ts","../src/internal/runtime/core/errorSummary.ts","../src/internal/runtime/core/DebugSink.record.ts","../src/internal/observability/jsonValue.ts","../src/internal/runtime/core/EffectOpCore.ts","../src/internal/runtime/core/ConvergeStaticIrCollector.ts","../src/internal/runtime/core/StateTransaction.ts","../src/internal/runtime/core/RuntimeKernel.ts","../src/internal/runtime/core/env.ts","../src/internal/runtime/core/HostScheduler.ts","../src/internal/runtime/core/RuntimeStore.ts","../src/internal/runtime/core/TickScheduler.ts","../src/internal/runtime/core/TaskRunner.ts","../src/internal/runtime/core/ModeRunner.ts","../src/internal/runtime/core/LatestFiberSlot.ts","../src/internal/runtime/core/DeclarativeLinkRuntime.ts","../src/internal/runtime/core/KernelRef.ts","../src/internal/runtime/core/RuntimeServiceBuiltins.ts","../src/internal/observability/evidenceCollector.ts","../src/internal/observability/runSession.ts","../src/internal/runtime/core/ModuleRuntime.operation.ts","../src/internal/effect-op.ts","../src/internal/runtime/core/ModuleRuntime.dispatch.ts","../src/internal/runtime/core/ReducerDiagnostics.ts","../src/internal/runtime/core/mutativePatches.ts","../src/internal/runtime/core/ModuleRuntime.effects.ts","../src/internal/runtime/core/ModuleRuntime.transaction.ts","../src/internal/state-trait/converge-in-transaction.impl.ts","../src/internal/digest.ts","../src/internal/state-trait/converge-diagnostics.ts","../src/internal/state-trait/exec-vm-mode.ts","../src/internal/state-trait/converge-step.ts","../src/internal/state-trait/validate.impl.ts","../src/internal/state-trait/source.impl.ts","../src/internal/resource.ts","../src/internal/runtime/core/ReplayLog.ts","../src/internal/runtime/core/ModuleRuntime.txnQueue.ts","../src/internal/runtime/core/runner/SyncEffectRunner.ts","../src/internal/runtime/core/ModuleRuntime.concurrencyPolicy.ts","../src/internal/runtime/core/ModuleRuntime.txnLanePolicy.ts","../src/internal/runtime/core/ModuleRuntime.traitConvergeConfig.ts","../src/internal/state-trait/build.ts","../src/internal/runtime/core/ModuleRuntime.internalHooks.ts","../src/internal/runtime/core/RootContext.ts","../src/internal/runtime/core/process/ProcessRuntime.make.ts","../src/internal/runtime/core/process/concurrency.ts","../src/internal/runtime/core/process/events.ts","../src/internal/runtime/core/process/meta.ts","../src/internal/runtime/core/process/selectorSchema.ts","../src/internal/runtime/core/process/triggerStreams.ts","../src/internal/runtime/core/ReadQuery.ts","../src/internal/serviceId.ts","../src/internal/runtime/core/SelectorGraph.ts","../src/internal/runtime/core/ModuleRuntime.logics.ts","../src/internal/runtime/core/LifecycleDiagnostics.ts","../src/internal/runtime/core/LogicDiagnostics.ts","../src/internal/runtime/core/Platform.ts","../src/internal/runtime/core/ConcurrencyDiagnostics.ts","../src/internal/runtime/core/BoundApiRuntime.ts","../src/internal/action.ts","../src/internal/runtime/core/FlowRuntime.ts","../src/internal/runtime/core/MatchBuilder.ts","../src/internal/root.ts","../src/internal/runtime/core/DeclarativeLinkIR.ts","../src/ReadQuery.ts"],"sourcesContent":["import { Effect } from 'effect'\nimport * as ModuleFactory from './internal/runtime/ModuleFactory.js'\nimport * as Debug from './internal/runtime/core/DebugSink.js'\nimport { currentConvergeStaticIrCollectors } from './internal/runtime/core/ConvergeStaticIrCollector.js'\nimport { exportDeclarativeLinkIr, type DeclarativeLinkIR } from './internal/runtime/core/DeclarativeLinkIR.js'\nimport { DeclarativeLinkRuntimeTag } from './internal/runtime/core/env.js'\nimport type * as Protocol from './internal/runtime/core/process/protocol.js'\nimport * as Meta from './internal/runtime/core/process/meta.js'\nimport type { AnyModuleShape, ModuleHandle, ModuleLike, ModuleTag } from './internal/module.js'\nimport * as ReadQuery from './ReadQuery.js'\n\nexport type DiagnosticsLevel = Protocol.DiagnosticsLevel\nexport type ProcessScope = Protocol.ProcessScope\nexport type ProcessIdentity = Protocol.ProcessIdentity\nexport type ProcessInstanceIdentity = Protocol.ProcessInstanceIdentity\nexport type ProcessTriggerSpec = Protocol.ProcessTriggerSpec\nexport type ProcessTrigger = Protocol.ProcessTrigger\nexport type ProcessConcurrencyPolicy = Protocol.ProcessConcurrencyPolicy\nexport type ProcessErrorPolicy = Protocol.ProcessErrorPolicy\nexport type ProcessDefinition = Protocol.ProcessDefinition\nexport type ProcessInstallation = Protocol.ProcessInstallation\nexport type SerializableErrorSummary = Protocol.SerializableErrorSummary\nexport type ProcessInstanceStatus = Protocol.ProcessInstanceStatus\nexport type ProcessEvent = Protocol.ProcessEvent\nexport type ProcessControlRequest = Protocol.ProcessControlRequest\nexport type ProcessPlatformEvent = Protocol.ProcessPlatformEvent\n\nexport type ProcessEffect<E = never, R = never> = Meta.ProcessEffect<E, R>\n\nexport type ProcessMakeDefinition =\n | string\n | {\n readonly processId: string\n readonly name?: string\n readonly description?: string\n readonly requires?: ReadonlyArray<string>\n readonly triggers?: ReadonlyArray<ProcessTriggerSpec>\n readonly concurrency?: ProcessConcurrencyPolicy\n readonly errorPolicy?: ProcessErrorPolicy\n readonly diagnosticsLevel?: DiagnosticsLevel\n }\n\nconst DEFAULT_TRIGGERS: ReadonlyArray<ProcessTriggerSpec> = [{ kind: 'platformEvent', platformEvent: 'runtime:boot' }]\n\nconst normalizeDefinition = (input: ProcessMakeDefinition): ProcessDefinition => {\n const base = typeof input === 'string' ? ({ processId: input } satisfies { readonly processId: string }) : input\n\n const processId = base.processId\n if (typeof processId !== 'string' || processId.length === 0) {\n throw new Error('[Process.make] processId must be a non-empty string')\n }\n\n const triggers = Array.isArray(base.triggers) && base.triggers.length > 0 ? base.triggers : DEFAULT_TRIGGERS\n\n const concurrency = base.concurrency ?? ({ mode: 'latest' } satisfies ProcessConcurrencyPolicy)\n const errorPolicy = base.errorPolicy ?? ({ mode: 'failStop' } satisfies ProcessErrorPolicy)\n const diagnosticsLevel = base.diagnosticsLevel ?? 'off'\n\n return {\n processId,\n name: base.name,\n description: base.description,\n requires: base.requires,\n triggers,\n concurrency,\n errorPolicy,\n diagnosticsLevel,\n }\n}\n\nexport const make = <E, R>(definition: ProcessMakeDefinition, effect: Effect.Effect<void, E, R>): ProcessEffect<E, R> =>\n Meta.attachMeta(effect, {\n definition: normalizeDefinition(definition),\n kind: 'process',\n })\n\nexport const getDefinition = (effect: Effect.Effect<void, any, any>): ProcessDefinition | undefined =>\n Meta.getDefinition(effect)\n\nexport const getMeta = (effect: Effect.Effect<void, any, any>): Meta.ProcessMeta | undefined => Meta.getMeta(effect)\n\nexport const attachMeta = <E, R>(effect: Effect.Effect<void, E, R>, meta: Meta.ProcessMeta): ProcessEffect<E, R> =>\n Meta.attachMeta(effect, meta)\n\ntype LinkModuleToken<Id extends string, Sh extends AnyModuleShape> = ModuleTag<Id, Sh> | ModuleLike<Id, Sh, object>\n\ntype LinkModuleIdOf<M> = M extends { readonly id: infer Id } ? Id : never\ntype LinkModuleShapeOf<M> =\n M extends ModuleLike<string, infer Sh, object> ? Sh : M extends ModuleTag<string, infer Sh> ? Sh : never\ntype LinkModuleTagOf<M> =\n M extends ModuleLike<string, infer Sh, object>\n ? ModuleTag<string, Sh>\n : M extends ModuleTag<string, AnyModuleShape>\n ? M\n : never\n\nconst isModuleLike = (value: unknown): value is ModuleLike<string, AnyModuleShape, object> =>\n Boolean(value) &&\n typeof value === 'object' &&\n ((value as { readonly _kind?: unknown })._kind === 'ModuleDef' ||\n (value as { readonly _kind?: unknown })._kind === 'Module') &&\n 'tag' in (value as object)\n\nconst unwrapModuleTag = <Id extends string, Sh extends AnyModuleShape>(\n module: LinkModuleToken<Id, Sh>,\n): ModuleTag<Id, Sh> => {\n if (isModuleLike(module)) {\n return module.tag\n }\n return module as ModuleTag<Id, Sh>\n}\n\nexport interface LinkConfig<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> {\n readonly id?: string\n readonly modules: Ms\n}\n\nexport type LinkHandles<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> = {\n [M in Ms[number] as LinkModuleIdOf<M>]: ModuleHandle<LinkModuleShapeOf<M>>\n}\n\ntype ModulesRecord<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> = {\n [M in Ms[number] as LinkModuleIdOf<M>]: LinkModuleTagOf<M>\n}\n\nexport function link<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[], E = never, R = never>(\n config: LinkConfig<Ms>,\n logic: ($: LinkHandles<Ms>) => Effect.Effect<void, E, R>,\n): ProcessEffect<E, R> {\n const linkId =\n config.id ??\n [...config.modules]\n .map((m) => m.id)\n .sort()\n .join('~')\n\n const modulesRecord = Object.create(null) as ModulesRecord<Ms>\n const requires = new Set<string>()\n\n for (const module of config.modules) {\n const tag = unwrapModuleTag(module)\n requires.add(String(tag.id))\n ;(modulesRecord as Record<string, ModuleTag<string, AnyModuleShape>>)[tag.id] = tag as unknown as ModuleTag<\n string,\n AnyModuleShape\n >\n }\n\n const effect = ModuleFactory.Link(\n modulesRecord as unknown as Record<string, ModuleTag<string, AnyModuleShape>>,\n logic as unknown as ($: Record<string, ModuleHandle<AnyModuleShape>>) => Effect.Effect<void, E, R>,\n )\n\n const wrapped = Effect.gen(function* () {\n const level = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n if (level !== 'off') {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: `link:${linkId}`,\n code: 'process_link::blackbox_best_effort',\n severity: 'info',\n message:\n '[Process.link] Blackbox link is best-effort and is NOT stabilized within the same tick. Strong consistency applies only to declarative IR.',\n hint: 'If you need same-tick strong consistency, migrate to Process.linkDeclarative(...) (073).',\n kind: 'blackbox_link_boundary',\n })\n }\n return yield* effect\n }) as unknown as Effect.Effect<void, E, R>\n\n const definition: ProcessDefinition = {\n processId: linkId,\n requires: Array.from(requires),\n triggers: [...DEFAULT_TRIGGERS, { kind: 'platformEvent', platformEvent: `link:${linkId}` }],\n concurrency: { mode: 'latest' },\n errorPolicy: { mode: 'failStop' },\n diagnosticsLevel: 'off',\n }\n\n return Meta.attachMeta(wrapped, {\n definition,\n kind: 'link',\n }) as ProcessEffect<E, R>\n}\n\ntype DeclarativeReadNode = {\n readonly _tag: 'readNode'\n readonly moduleId: string\n readonly tag: ModuleTag<string, AnyModuleShape>\n readonly readQuery: ReadQuery.ReadQueryCompiled<any, any>\n}\n\ntype DeclarativeDispatchNode = {\n readonly _tag: 'dispatchNode'\n readonly moduleId: string\n readonly tag: ModuleTag<string, AnyModuleShape>\n readonly actionTag: string\n}\n\ntype DeclarativeLinkEdgeSpec = {\n readonly from: DeclarativeReadNode\n readonly to: DeclarativeDispatchNode\n}\n\ntype DeclarativeLinkHandles<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]> = {\n [M in Ms[number] as LinkModuleIdOf<M>]: {\n readonly read: <S, V>(selector: ReadQuery.ReadQueryInput<S, V>) => DeclarativeReadNode\n readonly dispatch: (actionTag: string) => DeclarativeDispatchNode\n }\n}\n\nexport function linkDeclarative<Ms extends readonly LinkModuleToken<string, AnyModuleShape>[]>(\n config: LinkConfig<Ms>,\n build: ($: DeclarativeLinkHandles<Ms>) => ReadonlyArray<DeclarativeLinkEdgeSpec>,\n): ProcessEffect<never, never> {\n const linkId =\n config.id ??\n [...config.modules]\n .map((m) => m.id)\n .sort()\n .join('~')\n\n const modulesRecord = Object.create(null) as ModulesRecord<Ms>\n const requires = new Set<string>()\n\n for (const module of config.modules) {\n const tag = unwrapModuleTag(module)\n requires.add(String(tag.id))\n ;(modulesRecord as Record<string, ModuleTag<string, AnyModuleShape>>)[tag.id] = tag as unknown as ModuleTag<\n string,\n AnyModuleShape\n >\n }\n\n const handles = Object.create(null) as DeclarativeLinkHandles<Ms>\n for (const id of Object.keys(modulesRecord) as Array<keyof ModulesRecord<Ms> & string>) {\n const tag = (modulesRecord as any)[id] as ModuleTag<string, AnyModuleShape>\n ;(handles as any)[id] = {\n read: (selector: ReadQuery.ReadQueryInput<any, any>) => {\n const compiled = ReadQuery.compile(selector)\n const staticIr = compiled.staticIr\n const ok = staticIr.lane === 'static' && staticIr.readsDigest != null && staticIr.fallbackReason == null\n if (!ok) {\n throw new Error(\n `[Process.linkDeclarative] ReadQuery must be static with readsDigest (moduleId=${id}, selectorId=${staticIr.selectorId}, lane=${staticIr.lane}, fallbackReason=${staticIr.fallbackReason}). ` +\n 'Fix: use ReadQuery.make(...) or annotate selector.fieldPaths.',\n )\n }\n return { _tag: 'readNode', moduleId: id, tag, readQuery: compiled } as DeclarativeReadNode\n },\n dispatch: (actionTag: string) => {\n if (typeof actionTag !== 'string' || actionTag.length === 0) {\n throw new Error('[Process.linkDeclarative] actionTag must be a non-empty string')\n }\n return { _tag: 'dispatchNode', moduleId: id, tag, actionTag } as DeclarativeDispatchNode\n },\n }\n }\n\n const edges = build(handles)\n if (!Array.isArray(edges)) {\n throw new Error('[Process.linkDeclarative] build(...) must return an array of edges')\n }\n\n const normalized = edges\n .filter((e): e is DeclarativeLinkEdgeSpec => Boolean(e && e.from && e.to))\n .map((e) => {\n if (e.from._tag !== 'readNode' || e.to._tag !== 'dispatchNode') {\n throw new Error('[Process.linkDeclarative] Invalid edge spec returned from build(...)')\n }\n return e\n })\n .slice()\n .sort((a, b) => {\n const ak = `${a.from.moduleId}:${a.from.readQuery.staticIr.selectorId}->${a.to.moduleId}:${a.to.actionTag}`\n const bk = `${b.from.moduleId}:${b.from.readQuery.staticIr.selectorId}->${b.to.moduleId}:${b.to.actionTag}`\n return ak < bk ? -1 : ak > bk ? 1 : 0\n })\n\n const program: Effect.Effect<void, never, any> = Effect.gen(function* () {\n const runtime = yield* Effect.service(DeclarativeLinkRuntimeTag).pipe(Effect.orDie)\n const collectors = yield* Effect.service(currentConvergeStaticIrCollectors).pipe(Effect.orDie)\n\n const runtimeByTag = new Map<ModuleTag<string, AnyModuleShape>, any>()\n const resolveRuntime = (tag: ModuleTag<string, AnyModuleShape>): Effect.Effect<any, never, any> =>\n Effect.suspend(() => {\n const cached = runtimeByTag.get(tag)\n if (cached) return Effect.succeed(cached)\n return Effect.service(tag as any).pipe(\n Effect.tap((rt: any) =>\n Effect.sync(() => {\n runtimeByTag.set(tag, rt)\n }),\n ),\n )\n })\n\n const nodes: Array<any> = []\n const irEdges: Array<any> = []\n const readNodes: Array<any> = []\n const dispatchNodes: Array<any> = []\n\n for (let i = 0; i < normalized.length; i += 1) {\n const edge = normalized[i]\n const fromRuntime = yield* resolveRuntime(edge.from.tag)\n const toRuntime = yield* resolveRuntime(edge.to.tag)\n\n const fromKey = `${fromRuntime.moduleId}::${fromRuntime.instanceId}`\n const readNodeId = `r${i}`\n const dispatchNodeId = `d${i}`\n\n nodes.push({\n id: readNodeId,\n kind: 'readQuery',\n moduleId: fromRuntime.moduleId,\n instanceKey: fromRuntime.instanceId,\n readQuery: edge.from.readQuery.staticIr,\n })\n nodes.push({\n id: dispatchNodeId,\n kind: 'dispatch',\n moduleId: toRuntime.moduleId,\n instanceKey: toRuntime.instanceId,\n actionTag: edge.to.actionTag,\n })\n\n irEdges.push({ from: readNodeId, to: dispatchNodeId })\n\n readNodes.push({ nodeId: readNodeId, moduleInstanceKey: fromKey, readQuery: edge.from.readQuery })\n dispatchNodes.push({\n nodeId: dispatchNodeId,\n dispatch: (payload: unknown) =>\n (toRuntime.dispatch({ _tag: edge.to.actionTag, payload } as any) as any).pipe(Effect.asVoid),\n })\n }\n\n const ir: DeclarativeLinkIR = {\n version: 1,\n nodes,\n edges: irEdges,\n }\n\n const registration = {\n linkId,\n ir,\n readNodes,\n dispatchNodes,\n } as const\n\n const unregister = runtime.registerDeclarativeLink(registration as any)\n\n if (collectors.length > 0) {\n const exported = exportDeclarativeLinkIr({ linkId, ir })\n for (const collector of collectors) {\n collector.register(exported as any)\n }\n }\n\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n unregister()\n }),\n )\n\n yield* Effect.never\n })\n\n const definition: ProcessDefinition = {\n processId: `dlink:${linkId}`,\n requires: Array.from(requires),\n triggers: DEFAULT_TRIGGERS,\n concurrency: { mode: 'latest' },\n errorPolicy: { mode: 'failStop' },\n diagnosticsLevel: 'off',\n }\n\n return Meta.attachMeta(program, {\n definition,\n kind: 'declarativeLink',\n }) as ProcessEffect<never, never>\n}\n","import { Effect, Layer, Option, Schema, ServiceMap } from 'effect'\nimport * as ModuleRuntimeImpl from './ModuleRuntime.js'\nimport * as BoundApiRuntime from './BoundApiRuntime.js'\nimport * as LogicDiagnostics from './core/LogicDiagnostics.js'\nimport * as LogicPlanMarker from './core/LogicPlanMarker.js'\nimport type * as Action from '../action.js'\nimport type { FieldPath } from '../field-path.js'\nimport type {\n AnyModuleShape,\n AnySchema,\n ActionsFromMap,\n ModuleTag as LogixModuleTag,\n ModuleShape,\n ReducersFromMap,\n StateOf,\n ActionOf,\n ModuleHandle,\n ModuleLogic,\n ModuleImpl,\n ModuleImplementStateTransactionOptions,\n} from './core/module.js'\n\n/**\n * v3: Link (formerly Orchestrator)\n * A glue layer for cross-module collaboration.\n *\n * - Does not own its own State.\n * - Can access multiple Modules' readonly handles.\n * - Can define Logic only; cannot define State/Action.\n */\nexport function Link<Modules extends Record<string, LogixModuleTag<any, AnyModuleShape>>, E = never, R = never>(\n modules: Modules,\n logic: ($: { [K in keyof Modules]: ModuleHandle<Modules[K]['shape']> }) => Effect.Effect<void, E, R>,\n): Effect.Effect<void, E, R> {\n return Effect.gen(function* () {\n const handles: Record<string, ModuleHandle<AnyModuleShape>> = {}\n\n for (const [key, module] of Object.entries(modules)) {\n const runtime = yield* Effect.service(module).pipe(Effect.orDie)\n\n handles[key] = {\n read: (selector: any) => Effect.map(runtime.getState, selector),\n changes: runtime.changes,\n dispatch: runtime.dispatch,\n actions$: runtime.actions$,\n actions: new Proxy(\n {},\n {\n get: (_target, prop) => (payload: any) => runtime.dispatch({ _tag: prop as string, payload }),\n },\n ),\n }\n }\n\n return yield* logic(\n handles as {\n [K in keyof Modules]: ModuleHandle<Modules[K]['shape']>\n },\n )\n })\n}\n\n/**\n * Module factory implementation: construct a ModuleTag from an id and Schema definitions.\n */\nexport function Module<Id extends string, SSchema extends AnySchema, AMap extends Record<string, Action.AnyActionToken>>(\n id: Id,\n def: {\n readonly state: SSchema\n readonly actions: AMap\n readonly reducers?: ReducersFromMap<SSchema, AMap>\n },\n): LogixModuleTag<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>> {\n const actionMembers = Object.entries(def.actions).map(([tag, token]) => {\n const payload = (token as Action.AnyActionToken).schema as AnySchema\n return Schema.Struct(\n payload === Schema.Void\n ? {\n _tag: Schema.Literal(tag),\n payload: Schema.optional(payload),\n }\n : {\n _tag: Schema.Literal(tag),\n payload,\n },\n )\n }) as [AnySchema, ...AnySchema[]]\n\n const shape: ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap> = {\n stateSchema: def.state as any,\n actionSchema: Schema.Union(actionMembers) as unknown as Schema.Schema<ActionsFromMap<AMap>>,\n actionMap: def.actions as any,\n }\n\n type ShapeState = StateOf<typeof shape>\n type ShapeAction = ActionOf<typeof shape>\n\n // Normalize tag-keyed reducers into `_tag -> (state, action, sink?) => state` for the runtime.\n const reducers =\n def.reducers &&\n (Object.fromEntries(\n Object.entries(def.reducers).map(([tag, reducer]) => [\n tag,\n (state: ShapeState, action: ShapeAction, sink?: (path: string | FieldPath) => void) =>\n // Relies on the runtime `_tag` convention: only actions matching the current tag are routed to this reducer.\n (reducer as any)(\n state,\n action as {\n readonly _tag: string\n readonly payload: unknown\n },\n sink,\n ) as ShapeState,\n ]),\n ) as Record<\n string,\n (state: ShapeState, action: ShapeAction, sink?: (path: string | FieldPath) => void) => ShapeState\n >)\n\n class ModuleTag extends ServiceMap.Service<\n ModuleTag,\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>\n >()(`@logixjs/Module/${id}`) {}\n\n const tag = ModuleTag\n\n const moduleTag = Object.assign(tag, {\n _kind: 'ModuleTag' as const,\n id,\n shape,\n stateSchema: shape.stateSchema,\n actionSchema: shape.actionSchema,\n actions: shape.actionMap,\n reducers: def.reducers,\n /**\n * Build a Logic program for the current Module:\n * - Read its ModuleRuntime from Context at runtime.\n * - Build a BoundApi from the runtime.\n * - Pass the BoundApi to the caller to build business logic.\n */\n logic: <R = unknown, E = never>(\n build: (api: import('./core/module.js').BoundApi<typeof shape, R>) => ModuleLogic<typeof shape, R, E>,\n ): ModuleLogic<typeof shape, R, E> => {\n const logicEffect = Effect.gen(function* () {\n const runtime = yield* Effect.service(tag).pipe(Effect.orDie)\n const logicUnit = yield* Effect.serviceOption(LogicDiagnostics.LogicUnitServiceTag).pipe(\n Effect.map(Option.getOrUndefined),\n )\n const phaseService = yield* Effect.serviceOption(LogicDiagnostics.LogicPhaseServiceTag).pipe(\n Effect.map(Option.getOrUndefined),\n )\n const api = BoundApiRuntime.make<typeof shape, R>(shape, runtime, {\n getPhase: () => phaseService?.current ?? 'run',\n phaseService,\n moduleId: id,\n logicUnit,\n })\n\n let built: unknown\n try {\n built = build(api)\n } catch (err) {\n // Convert synchronously thrown LogicPhaseError into Effect.fail so runSync won't treat it as an \"async pending fiber\".\n if ((err as any)?._tag === 'LogicPhaseError') {\n return yield* Effect.fail(err as any)\n }\n throw err\n }\n\n if (LogicPlanMarker.isLogicPlanEffect(built)) {\n return yield* built as Effect.Effect<any, any, any>\n }\n\n const isLogicPlan = (value: unknown): value is import('./core/module.js').LogicPlan<typeof shape, R, E> =>\n Boolean(value && typeof value === 'object' && 'setup' in (value as any) && 'run' in (value as any))\n\n const plan = isLogicPlan(built)\n ? built\n : ({\n setup: Effect.void,\n run: built as Effect.Effect<any, any, any>,\n } satisfies import('./core/module.js').LogicPlan<typeof shape, R, E>)\n\n return plan\n })\n\n LogicPlanMarker.markAsLogicPlanEffect(logicEffect)\n return logicEffect\n },\n\n /**\n * live: given an initial state and a set of logics, construct a scoped ModuleRuntime Layer.\n *\n * Env conventions:\n * - R represents extra environment required by the logics (services / platform, etc.).\n * - ModuleRuntime itself only depends on Scope.Scope and is managed by Layer.scoped.\n */\n live: <R = never, E = never>(\n initial: StateOf<typeof shape>,\n ...logics: Array<ModuleLogic<typeof shape, R, E>>\n ): Layer.Layer<import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>, E, R> =>\n Layer.effect(\n tag,\n ModuleRuntimeImpl.make<StateOf<typeof shape>, ActionOf<typeof shape>, R>(initial, {\n tag,\n logics: logics as ReadonlyArray<Effect.Effect<any, any, any>>,\n moduleId: id,\n reducers,\n }),\n ) as unknown as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n E,\n R\n >,\n\n /**\n * implement: build a ModuleImpl blueprint from Module definition + initial state + a set of logics.\n *\n * - R represents the Env required by the logics.\n * - The returned ModuleImpl.layer carries R as its input environment.\n * - withLayer/withLayers can progressively narrow R to a more concrete Env (even never).\n */\n implement: <R = never>(config: {\n initial: StateOf<typeof shape>\n logics?: Array<ModuleLogic<typeof shape, R, never>>\n imports?: ReadonlyArray<Layer.Layer<any, any, any> | ModuleImpl<any, AnyModuleShape, any>>\n /**\n * processes: a set of long-lived flows bound to this Module implementation (including Link).\n *\n * - These Effects will be forked by the runtime container (e.g. Runtime.make).\n * - Types use relaxed E/R to enable composing cross-module orchestration logic.\n * - Business code typically builds these flows via Link.make.\n */\n processes?: ReadonlyArray<Effect.Effect<void, any, any>>\n /**\n * stateTransaction: module-level StateTransaction config.\n *\n * - If instrumentation is not provided, fall back to Runtime-level config (if any) or NODE_ENV defaults.\n * - If instrumentation is provided, it takes precedence over Runtime-level config and defaults.\n */\n stateTransaction?: ModuleImplementStateTransactionOptions\n }): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, R> => {\n const importedModules = (config.imports ?? []).flatMap((item) => {\n if ((item as ModuleImpl<any, AnyModuleShape, any>)._tag === 'ModuleImpl') {\n return [\n (item as ModuleImpl<any, AnyModuleShape, any>).module as unknown as ServiceMap.Key<\n any,\n import('./core/module.js').ModuleRuntime<any, any>\n >,\n ]\n }\n return []\n })\n\n const baseLayer = Layer.effect(\n tag,\n ModuleRuntimeImpl.make<StateOf<typeof shape>, ActionOf<typeof shape>, R>(config.initial, {\n tag,\n logics: (config.logics || []) as ReadonlyArray<Effect.Effect<any, any, any>>,\n processes: (config.processes || []) as ReadonlyArray<Effect.Effect<void, any, any>>,\n moduleId: id,\n imports: importedModules,\n reducers,\n stateTransaction: config.stateTransaction,\n }),\n ) as unknown as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >\n\n const processes = config.processes ?? []\n\n const makeImplWithLayer = (\n layer: Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >,\n ): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any> => ({\n _tag: 'ModuleImpl',\n module: moduleTag as unknown as LogixModuleTag<\n Id,\n ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>\n >,\n layer,\n processes,\n stateTransaction: config.stateTransaction,\n withLayer: (\n extra: Layer.Layer<any, never, any>,\n ): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any> => {\n const provided = (\n layer as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >\n ).pipe(Layer.provide(extra as Layer.Layer<any, never, any>))\n\n const merged = Layer.mergeAll(provided, extra as Layer.Layer<any, never, any>) as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >\n\n return makeImplWithLayer(merged)\n },\n withLayers: (\n ...extras: ReadonlyArray<Layer.Layer<any, never, any>>\n ): ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any> =>\n extras.reduce<ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any>>(\n (implAcc, extra) => implAcc.withLayer(extra),\n makeImplWithLayer(\n layer as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >,\n ),\n ),\n })\n\n // Start from baseLayer and layer-in imports (Layer or other ModuleImpl.layer) sequentially.\n const initialImpl = makeImplWithLayer(\n baseLayer as Layer.Layer<\n import('./core/module.js').ModuleRuntime<StateOf<typeof shape>, ActionOf<typeof shape>>,\n never,\n any\n >,\n )\n\n const imports = config.imports ?? []\n\n const finalImpl = imports.reduce<\n ModuleImpl<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>, any>\n >((implAcc, item) => {\n const layer =\n (item as ModuleImpl<any, AnyModuleShape, any>)._tag === 'ModuleImpl'\n ? (item as ModuleImpl<any, AnyModuleShape, any>).layer\n : (item as Layer.Layer<any, any, any>)\n\n return implAcc.withLayer(layer as Layer.Layer<any, never, any>)\n }, initialImpl)\n\n return finalImpl\n },\n })\n\n return moduleTag as LogixModuleTag<Id, ModuleShape<SSchema, Schema.Schema<ActionsFromMap<AMap>>, AMap>>\n}\n","import {\n Exit,\n Effect,\n Fiber,\n ManagedRuntime,\n Layer,\n Stream,\n SubscriptionRef,\n PubSub,\n Scope,\n Option,\n Queue,\n Duration,\n ServiceMap,\n} from 'effect'\nimport type {\n LogicPlan,\n ModuleRuntime as PublicModuleRuntime,\n ReadonlySubscriptionRef,\n StateChangeWithMeta,\n} from './module.js'\nimport * as Lifecycle from './Lifecycle.js'\nimport * as Debug from './DebugSink.js'\nimport { currentConvergeStaticIrCollectors } from './ConvergeStaticIrCollector.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport type * as ModuleTraits from './ModuleTraits.js'\nimport * as StateTransaction from './StateTransaction.js'\nimport * as RuntimeKernel from './RuntimeKernel.js'\nimport * as FullCutoverGate from './FullCutoverGate.js'\nimport * as KernelRef from './KernelRef.js'\nimport * as RuntimeServiceBuiltins from './RuntimeServiceBuiltins.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport {\n getDefaultStateTxnInstrumentation,\n isDevEnv,\n ReadQueryStrictGateConfigTag,\n RuntimeStoreTag,\n StateTransactionConfigTag,\n StateTransactionOverridesTag,\n TickSchedulerTag,\n} from './env.js'\nimport type {\n StateTransactionInstrumentation,\n StateTransactionOverrides,\n TraitConvergeTimeSlicingPatch,\n TickSchedulerService,\n TxnLanesPatch,\n} from './env.js'\nimport { normalizeNonEmptyString } from './normalize.js'\nimport { EvidenceCollectorTag } from '../../observability/evidenceCollector.js'\nimport * as EffectOp from '../../effect-op.js'\nimport { makeRunOperation } from './ModuleRuntime.operation.js'\nimport { makeDispatchOps } from './ModuleRuntime.dispatch.js'\nimport { makeEffectsRegistry } from './ModuleRuntime.effects.js'\nimport { makeTransactionOps } from './ModuleRuntime.transaction.js'\nimport { makeResolveConcurrencyPolicy } from './ModuleRuntime.concurrencyPolicy.js'\nimport { makeResolveTxnLanePolicy } from './ModuleRuntime.txnLanePolicy.js'\nimport {\n makeResolveTraitConvergeConfig,\n type ResolvedTraitConvergeConfig,\n} from './ModuleRuntime.traitConvergeConfig.js'\nimport { compareFieldPath, isPrefixOf, normalizeFieldPath, toKey, type DirtyAllReason, type FieldPath } from '../../field-path.js'\nimport * as RowId from '../../state-trait/rowid.js'\nimport * as StateTraitBuild from '../../state-trait/build.js'\nimport { exportConvergeStaticIr, getConvergeStaticIrDigest } from '../../state-trait/converge-ir.js'\nimport { makeConvergeExecIr } from '../../state-trait/converge-exec-ir.js'\nimport * as StateTraitConverge from '../../state-trait/converge.js'\nimport * as StateTraitValidate from '../../state-trait/validate.js'\nimport { installInternalHooks, type TraitState } from './ModuleRuntime.internalHooks.js'\nimport { RootContextTag, type RootContext } from './RootContext.js'\nimport * as ProcessRuntime from './process/ProcessRuntime.js'\nimport * as ReadQuery from './ReadQuery.js'\nimport * as SelectorGraph from './SelectorGraph.js'\nimport { makeModuleInstanceKey, type RuntimeStoreModuleCommit } from './RuntimeStore.js'\nimport {\n getRegisteredRuntime,\n getRuntimeByModuleAndInstance,\n registerRuntime,\n registerRuntimeByInstanceKey,\n unregisterRuntime,\n unregisterRuntimeByInstanceKey,\n} from './ModuleRuntime.registry.js'\nimport {\n makeEnqueueTransaction,\n type CapturedTxnRuntimeScope,\n type EnqueueTransaction,\n} from './ModuleRuntime.txnQueue.js'\nimport { runModuleLogics } from './ModuleRuntime.logics.js'\nimport * as ConcurrencyDiagnostics from './ConcurrencyDiagnostics.js'\n\nexport { registerRuntime, unregisterRuntime, getRegisteredRuntime, getRuntimeByModuleAndInstance }\n\nexport interface ModuleRuntimeOptions<S, A, R = never> {\n readonly tag?: ServiceMap.Key<any, PublicModuleRuntime<S, A>>\n /**\n * List of \"child modules\" resolvable within the current instance scope (imports-scope):\n * - Used only to build a minimal imports injector (ModuleToken -> ModuleRuntime).\n * - Do not capture the whole Context into ModuleRuntime (avoid accidentally retaining root/base services).\n */\n readonly imports?: ReadonlyArray<ServiceMap.Key<any, PublicModuleRuntime<any, any>>>\n readonly logics?: ReadonlyArray<Effect.Effect<any, any, R> | LogicPlan<any, R, any>>\n readonly processes?: ReadonlyArray<Effect.Effect<void, any, any>>\n readonly moduleId?: string\n /** Stable instance anchor (single source of truth); defaults to a monotonic sequence. Never default to randomness/time. */\n readonly instanceId?: string\n readonly createState?: Effect.Effect<SubscriptionRef.SubscriptionRef<S>, never, Scope.Scope>\n readonly createActionHub?: Effect.Effect<PubSub.PubSub<A>, never, Scope.Scope>\n /**\n * Primary reducer map: `_tag -> (state, action) => nextState`.\n *\n * - If provided, dispatch will synchronously apply the reducer before publishing the Action.\n * - If a `_tag` has no reducer, behavior matches the current watcher-only mode.\n */\n readonly reducers?: Readonly<\n Record<string, (state: S, action: A, sink?: (path: StateTransaction.StatePatchPath) => void) => S>\n >\n /**\n * Module-level StateTransaction config:\n * - If instrumentation is provided, it takes precedence over the Runtime-level config and NODE_ENV defaults.\n * - Otherwise, fall back to the Runtime-level config (if any) or getDefaultStateTxnInstrumentation().\n */\n readonly stateTransaction?: {\n readonly instrumentation?: StateTransactionInstrumentation\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeMode?: 'auto' | 'full' | 'dirty'\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n readonly txnLanes?: TxnLanesPatch\n }\n}\n\nlet nextInstanceSeq = 0\n\nconst makeDefaultInstanceId = (): string => {\n nextInstanceSeq += 1\n return `i${nextInstanceSeq}`\n}\n\nexport const make = <S, A, R = never>(\n initialState: S,\n options: ModuleRuntimeOptions<S, A, R> = {},\n): Effect.Effect<PublicModuleRuntime<S, A>, never, Scope.Scope | R> => {\n const program = Effect.gen(function* () {\n const stateRef = options.createState ? yield* options.createState : yield* SubscriptionRef.make(initialState)\n\n const commitHub = yield* PubSub.unbounded<StateChangeWithMeta<S>>()\n const actionCommitHub = yield* PubSub.unbounded<StateChangeWithMeta<A>>()\n let commitHubSubscriberCount = 0\n\n const fromCommitHub = Stream.unwrap(\n Effect.gen(function* () {\n commitHubSubscriberCount += 1\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n commitHubSubscriberCount = Math.max(0, commitHubSubscriberCount - 1)\n }),\n )\n return Stream.fromPubSub(commitHub)\n }),\n )\n\n const moduleId = options.moduleId ?? 'unknown'\n const instanceId = normalizeNonEmptyString(options.instanceId) ?? makeDefaultInstanceId()\n const moduleInstanceKey = makeModuleInstanceKey(moduleId, instanceId)\n const runtimeLabel = yield* Effect.service(Debug.currentRuntimeLabel).pipe(Effect.orDie)\n const lifecycle = yield* Lifecycle.makeLifecycleManager({\n moduleId,\n instanceId,\n runtimeLabel,\n })\n const concurrencyDiagnostics = yield* ConcurrencyDiagnostics.make({\n moduleId: options.moduleId,\n instanceId,\n })\n\n // Resolve StateTransaction instrumentation:\n // - Prefer ModuleRuntimeOptions.stateTransaction.instrumentation.\n // - Otherwise read the default from the Runtime-level StateTransactionConfig service.\n // - Finally fall back to NODE_ENV-based defaults.\n const runtimeConfigOpt = yield* Effect.serviceOption(\n StateTransactionConfigTag as unknown as ServiceMap.Key<any, { instrumentation?: StateTransactionInstrumentation }>,\n )\n const runtimeInstrumentation: StateTransactionInstrumentation | undefined = Option.isSome(runtimeConfigOpt)\n ? runtimeConfigOpt.value.instrumentation\n : undefined\n\n const instrumentation: StateTransactionInstrumentation =\n options.stateTransaction?.instrumentation ?? runtimeInstrumentation ?? getDefaultStateTxnInstrumentation()\n\n const resolveTraitConvergeConfig = makeResolveTraitConvergeConfig({\n moduleId: options.moduleId,\n stateTransaction: options.stateTransaction,\n })\n\n const resolveConcurrencyPolicy = makeResolveConcurrencyPolicy({\n moduleId: options.moduleId,\n diagnostics: concurrencyDiagnostics,\n })\n\n const resolveConcurrencyPolicyFast = makeResolveConcurrencyPolicy({\n moduleId: options.moduleId,\n })\n\n const resolveTxnLanePolicy = makeResolveTxnLanePolicy({\n moduleId: options.moduleId,\n stateTransaction: options.stateTransaction,\n })\n\n const actionHub = options.createActionHub\n ? yield* options.createActionHub\n : yield* Effect.gen(function* () {\n const policy = yield* resolveConcurrencyPolicy()\n return yield* PubSub.bounded<A>(policy.losslessBackpressureCapacity)\n })\n\n\t\t const convergePlanCacheCapacity = 128\n\t\t const traitState: TraitState = {\n\t\t program: undefined,\n\t\t convergeStaticIrDigest: undefined,\n\t\t convergePlanCache: undefined,\n\t convergeGeneration: {\n\t generation: 0,\n\t generationBumpCount: 0,\n\t },\n\t pendingCacheMissReason: undefined,\n\t pendingCacheMissReasonCount: 0,\n\t\t lastConvergeIrKeys: undefined,\n\t\t listConfigs: [],\n\t\t }\n\n\t // Cached list-path set (derived from listConfigs) for txn index evidence recording.\n\t // - undefined => no list traits; keep recordPatch overhead at ~0 for non-list modules.\n\t let listPathSet: ReadonlySet<string> | undefined = undefined\n\n\t let externalOwnedFieldPaths: ReadonlyArray<FieldPath> = []\n\t let externalOwnedFieldPathKeys: ReadonlySet<string> = new Set()\n\n const rowIdStore = new RowId.RowIdStore(instanceId)\n const selectorGraph = SelectorGraph.make<S>({\n moduleId,\n instanceId,\n getFieldPathIdRegistry: () => {\n const convergeIr: any = (traitState.program as any)?.convergeIr\n if (!convergeIr || convergeIr.configError) return undefined\n return convergeIr.fieldPathIdRegistry\n },\n })\n\n // StateTransaction context:\n // - Maintain a single active transaction per ModuleRuntime;\n // - Aggregate state writes from all entrypoints on this instance (dispatch / Traits / source-refresh, etc.);\n // - New entrypoints (e.g. service writebacks / devtools operations) must also go through the same context + queue.\n\t const txnContext = StateTransaction.makeContext<S>({\n\t moduleId,\n\t instanceId,\n\t instrumentation,\n\t getFieldPathIdRegistry: () => {\n\t const convergeIr: any = (traitState.program as any)?.convergeIr\n\t if (!convergeIr || convergeIr.configError) return undefined\n\t return convergeIr.fieldPathIdRegistry\n\t },\n\t getListPathSet: () => listPathSet,\n\t })\n\n const recordStatePatch: RuntimeInternals['txn']['recordStatePatch'] = (\n path,\n reason,\n from,\n to,\n traitNodeId,\n stepId,\n ): void => {\n if (externalOwnedFieldPaths.length > 0) {\n const registry = txnContext.current?.fieldPathIdRegistry\n\n const toFieldPathOrStar = (input: StateTransaction.StatePatchPath | undefined): FieldPath | '*' | undefined => {\n if (input === undefined) return undefined\n if (input === '*') return '*'\n\n if (typeof input === 'number') {\n if (!registry) return '*'\n if (!Number.isFinite(input)) return '*'\n const idx = Math.floor(input)\n if (idx < 0) return '*'\n const resolved = registry.fieldPaths[idx]\n return resolved && Array.isArray(resolved) ? resolved : '*'\n }\n\n if (typeof input === 'string') {\n if (!registry) return '*'\n const id = registry.pathStringToId?.get(input)\n if (id == null) return '*'\n const resolved = registry.fieldPaths[id]\n return resolved && Array.isArray(resolved) ? resolved : '*'\n }\n\n const normalized = normalizeFieldPath(input)\n return normalized ?? '*'\n }\n\n const resolved = toFieldPathOrStar(path)\n\n const throwViolation = (details: { readonly resolvedPath?: FieldPath | '*'; readonly owned?: FieldPath }): never => {\n const owned = details.owned ?? externalOwnedFieldPaths[0]\n const ownedPath = owned ? owned.join('.') : '<unknown>'\n const resolvedPath =\n details.resolvedPath === undefined\n ? '<unknown>'\n : details.resolvedPath === '*'\n ? '*'\n : details.resolvedPath.join('.')\n\n const err: any = new Error(\n '[ExternalOwnedWriteError] State write overlaps an external-owned field.\\n' +\n `moduleId=${options.moduleId ?? 'unknown'}\\n` +\n `instanceId=${instanceId}\\n` +\n `owned=${ownedPath}\\n` +\n `path=${resolvedPath}\\n` +\n `reason=${String(reason)}\\n` +\n 'Fix: do not write external-owned fields via reducers/$.state.*; use StateTrait.externalStore to own the field, and avoid setState/state.update (root writes) on modules with external-owned fields.',\n )\n err.name = 'ExternalOwnedWriteError'\n err._tag = 'ExternalOwnedWriteError'\n err.moduleId = options.moduleId\n err.instanceId = instanceId\n err.reason = reason\n err.path = path\n throw err\n }\n\n const ensureFieldPath = (input: FieldPath | '*' | undefined): FieldPath => {\n if (input === undefined || input === '*') {\n return throwViolation({ resolvedPath: input })\n }\n return input\n }\n\n if (reason === 'trait-external-store') {\n const resolvedFieldPath = ensureFieldPath(resolved)\n const key = toKey(resolvedFieldPath)\n if (!externalOwnedFieldPathKeys.has(key)) {\n throwViolation({ resolvedPath: resolvedFieldPath })\n }\n } else {\n const resolvedFieldPath = ensureFieldPath(resolved)\n for (const owned of externalOwnedFieldPaths) {\n if (isPrefixOf(owned, resolvedFieldPath) || isPrefixOf(resolvedFieldPath, owned)) {\n throwViolation({ resolvedPath: resolvedFieldPath, owned })\n }\n }\n }\n }\n\n StateTransaction.recordPatch(txnContext, path, reason, from, to, traitNodeId, stepId)\n }\n\n const updateDraft: RuntimeInternals['txn']['updateDraft'] = (nextState): void => {\n if (!txnContext.current) return\n StateTransaction.updateDraft(txnContext, nextState as S)\n }\n\n const traitConvergeTimeSlicingSignal = yield* Queue.unbounded<void>()\n const traitConvergeTimeSlicingState: {\n readonly signal: Queue.Queue<void>\n readonly backlogDirtyPaths: Set<StateTransaction.StatePatchPath>\n readonly ensureWorkerStarted: () => Effect.Effect<void, never, never>\n workerFiber: Fiber.Fiber<void, never> | undefined\n backlogDirtyAllReason?: DirtyAllReason\n firstPendingAtMs: number | undefined\n lastTouchedAtMs: number | undefined\n latestConvergeConfig: ResolvedTraitConvergeConfig | undefined\n capturedContext: CapturedTxnRuntimeScope | undefined\n } = {\n signal: traitConvergeTimeSlicingSignal,\n backlogDirtyPaths: new Set(),\n ensureWorkerStarted: () => ensureTraitConvergeTimeSlicingWorkerStarted(),\n workerFiber: undefined,\n backlogDirtyAllReason: undefined,\n firstPendingAtMs: undefined,\n lastTouchedAtMs: undefined,\n latestConvergeConfig: undefined,\n capturedContext: undefined,\n }\n\n const moduleTraitsState: {\n frozen: boolean\n contributions: Array<ModuleTraits.TraitContribution>\n snapshot: ModuleTraits.ModuleTraitsSnapshot | undefined\n } = {\n frozen: false,\n contributions: [],\n snapshot: undefined,\n }\n\n /**\n * Transaction history:\n * - Keeps the latest N StateTransaction records per ModuleRuntime.\n * - Used only for dev/test devtools features (e.g. time-travel, txn summary views).\n * - Capacity is bounded to avoid unbounded memory growth in long-running apps.\n */\n const maxTxnHistory = 500\n const txnHistory = {\n buffer: new Array<StateTransaction.StateTransaction<S> | undefined>(maxTxnHistory),\n start: 0,\n size: 0,\n capacity: maxTxnHistory,\n }\n const txnById = new Map<string, StateTransaction.StateTransaction<S>>()\n\n /**\n * Transaction queue:\n * - Executes each logic entrypoint (dispatch / source-refresh / future extensions) serially in FIFO order.\n * - Guarantees at most one transaction at a time per instance; different instances can still run in parallel.\n */\n const kernelImplementationRef = yield* KernelRef.resolveKernelImplementationRef()\n const cutoverGateModeOpt = yield* Effect.serviceOption(\n RuntimeKernel.FullCutoverGateModeTag as unknown as ServiceMap.Key<any, RuntimeKernel.FullCutoverGateMode>,\n )\n const cutoverGateMode = Option.isSome(cutoverGateModeOpt) ? cutoverGateModeOpt.value : 'fullCutover'\n const runtimeServicesOverrides = yield* RuntimeKernel.resolveRuntimeServicesOverrides({\n moduleId: options.moduleId,\n })\n\n const runtimeServicesRegistryOpt = yield* Effect.serviceOption(\n RuntimeKernel.RuntimeServicesRegistryTag as unknown as ServiceMap.Key<any, RuntimeKernel.RuntimeServicesRegistry>,\n )\n const runtimeServicesRegistry = Option.isSome(runtimeServicesRegistryOpt)\n ? runtimeServicesRegistryOpt.value\n : undefined\n\n const resolveRuntimeServiceImpls = <Service>(\n serviceId: string,\n builtin: ReadonlyArray<RuntimeKernel.RuntimeServiceImpl<Service>>,\n ): ReadonlyArray<RuntimeKernel.RuntimeServiceImpl<Service>> => {\n const extraRaw = runtimeServicesRegistry?.implsByServiceId[serviceId]\n if (!extraRaw || extraRaw.length === 0) return builtin\n\n const extra = extraRaw as ReadonlyArray<RuntimeKernel.RuntimeServiceImpl<Service>>\n const seen = new Set<string>()\n const out: Array<RuntimeKernel.RuntimeServiceImpl<Service>> = []\n\n for (const impl of builtin) {\n seen.add(impl.implId)\n out.push(impl)\n }\n\n for (const impl of extra) {\n if (!impl || typeof impl.implId !== 'string' || impl.implId.length === 0) continue\n if (seen.has(impl.implId)) continue\n seen.add(impl.implId)\n out.push(impl)\n }\n\n return out\n }\n\n const withRuntimeServiceBuiltins = <A, E, R>(\n serviceId: string,\n builtinMake: Effect.Effect<unknown, never, any>,\n effect: Effect.Effect<A, E, R>,\n ) =>\n effect.pipe(\n Effect.provideService(RuntimeServiceBuiltins.RuntimeServiceBuiltinsTag, {\n getBuiltinMake: (candidateServiceId) =>\n candidateServiceId === serviceId\n ? (builtinMake as Effect.Effect<unknown, never, any>)\n : Effect.die(new Error(`[Logix] builtin make not available: ${candidateServiceId}`)),\n } satisfies RuntimeServiceBuiltins.RuntimeServiceBuiltins),\n )\n\n const readCurrentOpSeq = (): Effect.Effect<number | undefined> =>\n Effect.service(Debug.currentOpSeq).pipe(Effect.orDie).pipe(\n Effect.map((opSeqRaw) =>\n typeof opSeqRaw === 'number' && Number.isFinite(opSeqRaw) && opSeqRaw >= 0 ? Math.floor(opSeqRaw) : undefined,\n ),\n )\n\n const makeTxnQueueBuiltin = makeEnqueueTransaction({\n moduleId: options.moduleId,\n instanceId,\n resolveConcurrencyPolicy,\n diagnostics: concurrencyDiagnostics,\n })\n\n const enqueueTxnSel = RuntimeKernel.selectRuntimeService(\n 'txnQueue',\n resolveRuntimeServiceImpls('txnQueue', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeTxnQueueBuiltin,\n },\n {\n implId: 'trace',\n implVersion: 'v1',\n make: makeTxnQueueBuiltin,\n notes: 'no-op wrapper (used for override isolation tests)',\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const enqueueTransactionBase = yield* withRuntimeServiceBuiltins('txnQueue', makeTxnQueueBuiltin, enqueueTxnSel.impl.make)\n\n const makeOperationRunnerBuiltin = Effect.succeed(\n makeRunOperation({\n optionsModuleId: options.moduleId,\n instanceId,\n runtimeLabel,\n txnContext,\n }),\n )\n\n const runOperationSel = RuntimeKernel.selectRuntimeService(\n 'operationRunner',\n resolveRuntimeServiceImpls('operationRunner', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeOperationRunnerBuiltin,\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const runOperation = yield* withRuntimeServiceBuiltins(\n 'operationRunner',\n makeOperationRunnerBuiltin,\n runOperationSel.impl.make,\n )\n\n yield* runOperation(\n 'lifecycle',\n 'module:init',\n { meta: { moduleId, instanceId } },\n Debug.record({\n type: 'module:init',\n moduleId,\n instanceId,\n }),\n )\n\n // Initial state snapshot:\n // - Emit a state:update event to write the initial state into the Debug stream.\n // - Helps Devtools show \"Current State\" even before any business interaction.\n // - Provides frame 0 for the timeline so later events can build time-travel views on top of it.\n const initialSnapshot = yield* SubscriptionRef.get(stateRef)\n yield* runOperation(\n 'state',\n 'state:init',\n { meta: { moduleId, instanceId } },\n Debug.record({\n type: 'state:update',\n moduleId,\n state: initialSnapshot,\n instanceId,\n txnSeq: 0,\n }),\n )\n\n const runtimeStoreOpt = yield* Effect.serviceOption(\n RuntimeStoreTag as unknown as ServiceMap.Key<any, { registerModuleInstance: (args: unknown) => void; unregisterModuleInstance: (key: string) => void }>,\n )\n if (Option.isSome(runtimeStoreOpt)) {\n runtimeStoreOpt.value.registerModuleInstance({\n moduleId,\n instanceId,\n moduleInstanceKey,\n initialState: initialSnapshot,\n })\n }\n\n const rootContextSvcOpt = yield* Effect.serviceOption(RootContextTag as unknown as ServiceMap.Key<any, RootContext>)\n const rootContext = Option.isSome(rootContextSvcOpt) ? (rootContextSvcOpt.value as RootContext) : undefined\n\n const tickSchedulerOpt = (yield* Effect.serviceOption(\n TickSchedulerTag as unknown as ServiceMap.Key<any, TickSchedulerService>,\n )) as Option.Option<TickSchedulerService>\n let tickSchedulerCached: TickSchedulerService | undefined = Option.isSome(tickSchedulerOpt) ? tickSchedulerOpt.value : undefined\n\n const readTickSchedulerFromRootContext = (root: RootContext | undefined): TickSchedulerService | undefined => {\n if (!root?.context) {\n return undefined\n }\n\n const fromRoot = ServiceMap.getOption(root.context, TickSchedulerTag as any) as Option.Option<TickSchedulerService>\n return Option.isSome(fromRoot) ? fromRoot.value : undefined\n }\n\n const refreshTickSchedulerFromEnv = (): Effect.Effect<TickSchedulerService | undefined> =>\n Effect.gen(function* () {\n const refreshed = (yield* Effect.serviceOption(\n TickSchedulerTag as unknown as ServiceMap.Key<any, TickSchedulerService>,\n )) as Option.Option<TickSchedulerService>\n if (Option.isSome(refreshed)) {\n tickSchedulerCached = refreshed.value\n return refreshed.value\n }\n return undefined\n })\n\n const enqueueTransaction: EnqueueTransaction = ((a0: any, a1?: any) =>\n Effect.gen(function* () {\n // Cache TickScheduler from the current fiber Env whenever possible:\n // - ManagedRuntime scenarios (e.g. React RuntimeProvider injecting tick services) may not have TickSchedulerTag\n // visible during ModuleRuntime initialization.\n // - But it is often available at enqueue-time (callsite), and caching it ensures onCommit can publish into RuntimeStore.\n if (!tickSchedulerCached) {\n const refreshed = yield* refreshTickSchedulerFromEnv()\n if (!refreshed) {\n const fromRoot = readTickSchedulerFromRootContext(rootContext)\n if (fromRoot) {\n tickSchedulerCached = fromRoot\n }\n }\n }\n\n // Preserve the original call signature: (eff) or (lane, eff).\n return yield* (a1 !== undefined ? (enqueueTransactionBase as any)(a0, a1) : (enqueueTransactionBase as any)(a0))\n })) as any\n\n const makeTransactionBuiltin = Effect.sync(() =>\n makeTransactionOps<S>({\n moduleId,\n optionsModuleId: options.moduleId,\n instanceId,\n stateRef,\n commitHub,\n shouldPublishCommitHub: () => commitHubSubscriberCount > 0,\n recordStatePatch,\n onCommit: ({ state, meta, transaction, diagnosticsLevel }) =>\n Effect.gen(function* () {\n let scheduler = tickSchedulerCached\n if (!scheduler) {\n scheduler = yield* refreshTickSchedulerFromEnv()\n }\n\n let root = rootContext\n if (!root) {\n const rootOpt = yield* Effect.serviceOption(RootContextTag as unknown as ServiceMap.Key<any, RootContext>)\n if (Option.isSome(rootOpt)) {\n root = rootOpt.value as RootContext\n }\n }\n\n if (!scheduler) {\n const fromRoot = readTickSchedulerFromRootContext(root)\n if (fromRoot) {\n scheduler = fromRoot\n tickSchedulerCached = fromRoot\n }\n }\n\n if (!scheduler && diagnosticsLevel !== 'off' && isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n txnSeq: meta.txnSeq,\n txnId: meta.txnId,\n trigger: {\n kind: meta.originKind ?? 'unknown',\n name: meta.originName ?? meta.originKind ?? 'unknown',\n },\n code: 'tick_scheduler::missing_service',\n severity: 'error',\n message:\n 'TickScheduler service is not visible in ModuleRuntime.onCommit; tickSeq will not advance and RuntimeStore subscribers will not flush.',\n hint:\n 'Ensure TickSchedulerTag is available in the fiber Env for logic/task/txnQueue execution (AppRuntime baseLayer + RootContext wiring).',\n kind: 'missing_tick_scheduler',\n })\n }\n\n // Avoid selector graph work when there are no selectors at all.\n // (SelectorGraph will no-op; transaction.dirty is already snapshotted at commit time.)\n if (selectorGraph.hasAnyEntries()) {\n yield* selectorGraph.onCommit(\n state,\n meta,\n transaction.dirty,\n diagnosticsLevel,\n scheduler\n ? (selectorId) => {\n scheduler.onSelectorChanged({\n moduleInstanceKey,\n selectorId,\n priority: meta.priority,\n })\n }\n : undefined,\n )\n }\n\n if (scheduler) {\n const opSeq = yield* readCurrentOpSeq()\n let resolvedSchedulingPolicy: RuntimeStoreModuleCommit['schedulingPolicy'] | undefined\n if (diagnosticsLevel !== 'off') {\n const resolved = yield* resolveConcurrencyPolicyFast()\n resolvedSchedulingPolicy = {\n configScope: resolved.configScope,\n concurrencyLimit: resolved.concurrencyLimit,\n allowUnbounded: resolved.allowUnbounded,\n losslessBackpressureCapacity: resolved.losslessBackpressureCapacity,\n pressureWarningThreshold: resolved.pressureWarningThreshold,\n warningCooldownMs: resolved.warningCooldownMs,\n resolvedAtTxnSeq: meta.txnSeq,\n }\n }\n\n yield* scheduler.onModuleCommit({\n moduleId,\n instanceId,\n moduleInstanceKey,\n state,\n meta,\n opSeq,\n schedulingPolicy: resolvedSchedulingPolicy,\n })\n }\n }),\n enqueueTransaction,\n runOperation,\n txnContext,\n traitConvergeTimeSlicing: traitConvergeTimeSlicingState,\n\t traitRuntime: {\n\t getProgram: () => traitState.program,\n\t getConvergeStaticIrDigest: () => traitState.convergeStaticIrDigest,\n\t getConvergePlanCache: () => traitState.convergePlanCache,\n\t getConvergeGeneration: () => traitState.convergeGeneration,\n\t getPendingCacheMissReason: () => traitState.pendingCacheMissReason,\n\t getPendingCacheMissReasonCount: () => traitState.pendingCacheMissReasonCount,\n\t setPendingCacheMissReason: (next) => {\n\t traitState.pendingCacheMissReason = next\n\t if (next == null) {\n\t traitState.pendingCacheMissReasonCount = 0\n\t }\n\t },\n\t rowIdStore,\n\t getListConfigs: () => traitState.listConfigs,\n\t },\n resolveTraitConvergeConfig,\n isDevEnv,\n txnHistory,\n txnById,\n }),\n )\n\n const transactionSel = RuntimeKernel.selectRuntimeService(\n 'transaction',\n resolveRuntimeServiceImpls('transaction', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeTransactionBuiltin,\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const { readState, setStateInternal, runWithStateTransaction } = yield* withRuntimeServiceBuiltins(\n 'transaction',\n makeTransactionBuiltin,\n transactionSel.impl.make,\n )\n\n let deferredFlushCoalescedCount = 0\n let deferredFlushCanceledCount = 0\n\n const runDeferredConvergeFlush = (args: {\n readonly dirtyPathsSnapshot: ReadonlyArray<StateTransaction.StatePatchPath>\n readonly dirtyAllReason?: DirtyAllReason\n readonly lane: 'urgent' | 'nonUrgent'\n readonly slice?: { readonly start: number; readonly end: number; readonly total: number }\n readonly captureOpSeq?: boolean\n readonly emitLaneEvidence?: (anchor: {\n readonly txnSeq: number\n readonly txnId?: string\n readonly opSeq?: number\n }) => Effect.Effect<void, never, never>\n }): Effect.Effect<{ readonly txnSeq: number; readonly txnId?: string; readonly opSeq?: number }> => {\n let capturedTxnSeq = 0\n let capturedTxnId: string | undefined = undefined\n let capturedOpSeq: number | undefined = undefined\n\n const details: any = { dirtyPathCount: args.dirtyPathsSnapshot.length }\n if (args.dirtyAllReason) {\n details.dirtyAllReason = args.dirtyAllReason\n }\n if (args.slice) {\n details.sliceStart = args.slice.start\n details.sliceEnd = args.slice.end\n details.sliceTotal = args.slice.total\n }\n\n return enqueueTransaction(\n args.lane,\n runOperation(\n 'lifecycle',\n 'trait:deferredConvergeFlush',\n {\n payload: { dirtyPathCount: args.dirtyPathsSnapshot.length },\n meta: { moduleId, instanceId },\n },\n runWithStateTransaction(\n {\n kind: 'trait:deferred_flush',\n name: 'trait:deferredConvergeFlush',\n details,\n },\n () =>\n Effect.gen(function* () {\n const current: any = txnContext.current\n if (current) {\n capturedTxnSeq = current.txnSeq\n capturedTxnId = current.txnId\n }\n\n if (args.captureOpSeq) {\n capturedOpSeq = yield* readCurrentOpSeq()\n }\n\n if (args.emitLaneEvidence) {\n yield* args.emitLaneEvidence({\n txnSeq: capturedTxnSeq,\n txnId: capturedTxnId,\n opSeq: capturedOpSeq,\n })\n }\n\n if (!current) return\n if (args.dirtyAllReason) {\n current.dirtyAllReason = args.dirtyAllReason\n }\n for (const p of args.dirtyPathsSnapshot) {\n StateTransaction.markDirtyPath(txnContext, p, 'unknown')\n }\n }),\n ),\n ),\n ).pipe(\n Effect.as({\n txnSeq: capturedTxnSeq,\n txnId: capturedTxnId,\n opSeq: capturedOpSeq,\n } as const),\n )\n }\n\n // 043: time-slicing scheduler for deferred converge (debounce + maxLag); triggered by in-txn signals and enqueued outside the txn.\n const runTraitConvergeTimeSlicingWorker = Effect.gen(function* () {\n try {\n while (true) {\n while (true) {\n const config = traitConvergeTimeSlicingState.latestConvergeConfig?.traitConvergeTimeSlicing\n if (!config?.enabled) {\n traitConvergeTimeSlicingState.backlogDirtyPaths.clear()\n traitConvergeTimeSlicingState.backlogDirtyAllReason = undefined\n traitConvergeTimeSlicingState.firstPendingAtMs = undefined\n traitConvergeTimeSlicingState.lastTouchedAtMs = undefined\n return\n }\n\n const hasBacklog =\n traitConvergeTimeSlicingState.backlogDirtyPaths.size > 0 ||\n traitConvergeTimeSlicingState.backlogDirtyAllReason != null\n if (!hasBacklog) {\n return\n }\n\n const now = Date.now()\n const firstPendingAtMs = traitConvergeTimeSlicingState.firstPendingAtMs ?? now\n traitConvergeTimeSlicingState.firstPendingAtMs = firstPendingAtMs\n\n const captured = traitConvergeTimeSlicingState.capturedContext\n const txnLanePolicy = yield* (captured?.overrides\n ? Effect.provideService(\n resolveTxnLanePolicy(),\n StateTransactionOverridesTag as unknown as ServiceMap.Key<any, StateTransactionOverrides>,\n captured.overrides,\n )\n : resolveTxnLanePolicy())\n\n const debounceMs = txnLanePolicy.enabled ? txnLanePolicy.debounceMs : config.debounceMs\n const maxLagMs = txnLanePolicy.enabled ? txnLanePolicy.maxLagMs : config.maxLagMs\n const lastTouchedAtMs = traitConvergeTimeSlicingState.lastTouchedAtMs ?? firstPendingAtMs\n const quietMs = Math.max(0, now - lastTouchedAtMs)\n const lagMs = Math.max(0, now - firstPendingAtMs)\n\n if (quietMs >= debounceMs || lagMs >= maxLagMs) {\n break\n }\n\n const untilQuietMs = Math.max(0, debounceMs - quietMs)\n const untilLagMs = Math.max(0, maxLagMs - lagMs)\n const sleepMs = Math.max(0, Math.min(untilQuietMs, untilLagMs))\n if (sleepMs > 0) {\n yield* Effect.sleep(Duration.millis(sleepMs))\n } else {\n yield* Effect.yieldNow\n }\n }\n\n const dirtyPathsSnapshot = Array.from(traitConvergeTimeSlicingState.backlogDirtyPaths)\n traitConvergeTimeSlicingState.backlogDirtyPaths.clear()\n const dirtyAllReasonSnapshot = traitConvergeTimeSlicingState.backlogDirtyAllReason\n traitConvergeTimeSlicingState.backlogDirtyAllReason = undefined\n const firstPendingAtMsForRun = traitConvergeTimeSlicingState.firstPendingAtMs\n traitConvergeTimeSlicingState.firstPendingAtMs = undefined\n traitConvergeTimeSlicingState.lastTouchedAtMs = undefined\n\n if (dirtyPathsSnapshot.length === 0 && !dirtyAllReasonSnapshot) {\n return\n }\n\n const program = traitState.program\n if (!program?.convergeExecIr || program.convergeExecIr.topoOrderDeferredInt32.length === 0) {\n return\n }\n\n const captured = traitConvergeTimeSlicingState.capturedContext\n const txnLanePolicy = yield* captured?.overrides\n ? Effect.provideService(resolveTxnLanePolicy(), StateTransactionOverridesTag, captured.overrides)\n : resolveTxnLanePolicy()\n\n const shouldEmitLaneEvidence = captured != null && captured.diagnosticsLevel !== 'off'\n const shouldEmitLaneEvidenceForPolicy =\n shouldEmitLaneEvidence && (txnLanePolicy.enabled || txnLanePolicy.overrideMode != null)\n\n const withCapturedContext = <A2, E2, R2>(eff: Effect.Effect<A2, E2, R2>): Effect.Effect<A2, E2, R2> => {\n let next = eff\n if (captured?.overrides) {\n next = Effect.provideService(next, StateTransactionOverridesTag, captured.overrides)\n }\n if (captured) {\n next = Effect.provideService(next, Debug.currentRuntimeLabel, captured.runtimeLabel)\n next = Effect.provideService(next, Debug.currentDiagnosticsLevel, captured.diagnosticsLevel)\n next = Effect.provideService(next, Debug.currentDebugSinks, captured.debugSinks)\n }\n return next\n }\n\n const firstPendingAtMs = firstPendingAtMsForRun ?? Date.now()\n\n if (!txnLanePolicy.enabled) {\n if (txnLanePolicy.overrideMode === 'forced_off') {\n deferredFlushCoalescedCount += 1\n deferredFlushCanceledCount += 1\n }\n\n const reasons: ReadonlyArray<Debug.TxnLaneEvidenceReason> =\n txnLanePolicy.overrideMode === 'forced_off'\n ? ['forced_off', 'canceled']\n : txnLanePolicy.overrideMode === 'forced_sync'\n ? ['forced_sync']\n : ['disabled']\n\n yield* withCapturedContext(\n runDeferredConvergeFlush({\n dirtyPathsSnapshot,\n dirtyAllReason: dirtyAllReasonSnapshot,\n lane: 'urgent',\n captureOpSeq: shouldEmitLaneEvidenceForPolicy,\n emitLaneEvidence: shouldEmitLaneEvidenceForPolicy\n ? (anchor) =>\n Debug.record({\n type: 'trace:txn-lane',\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n txnId: anchor.txnId,\n data: {\n evidence: {\n anchor: {\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n ...(typeof anchor.opSeq === 'number' ? { opSeq: anchor.opSeq } : {}),\n },\n lane: 'urgent',\n kind: 'trait:deferred_flush',\n policy: txnLanePolicy,\n backlog: {\n pendingCount: 0,\n ageMs: Math.max(0, Date.now() - firstPendingAtMs),\n coalescedCount: deferredFlushCoalescedCount,\n canceledCount: deferredFlushCanceledCount,\n },\n starvation: { triggered: false },\n reasons,\n } satisfies Debug.TxnLaneEvidence,\n },\n })\n : undefined,\n }),\n )\n\n const hasPending =\n traitConvergeTimeSlicingState.backlogDirtyPaths.size > 0 ||\n traitConvergeTimeSlicingState.backlogDirtyAllReason != null\n if (!hasPending) {\n return\n }\n continue\n }\n\n const totalSteps = program.convergeExecIr.topoOrderDeferredInt32.length\n\n let cursor = 0\n const initialChunkSize = txnLanePolicy.budgetMs <= 1 ? 1 : 32\n let chunkSize = Math.min(initialChunkSize, totalSteps)\n let yieldCount = 0\n let lastYieldAtMs = Date.now()\n\n const readIsInputPending = (): boolean => {\n const nav = (globalThis as any).navigator\n const scheduling = nav?.scheduling\n const isInputPending = scheduling?.isInputPending\n if (typeof isInputPending !== 'function') return false\n try {\n return Boolean(isInputPending.call(scheduling))\n } catch {\n return false\n }\n }\n\n while (cursor < totalSteps) {\n const lagMs = Math.max(0, Date.now() - firstPendingAtMs)\n const lagExceeded = lagMs >= txnLanePolicy.maxLagMs\n const budgetMs = lagExceeded\n ? Math.max(txnLanePolicy.budgetMs, txnLanePolicy.budgetMs * 4)\n : txnLanePolicy.budgetMs\n\n const sliceStart = cursor\n const sliceEnd = Math.min(totalSteps, cursor + chunkSize)\n\n const { sliceDurationMs, anchor } = yield* withCapturedContext(\n Effect.gen(function* () {\n const sliceStartedAtMs = Date.now()\n const anchor = yield* runDeferredConvergeFlush({\n dirtyPathsSnapshot,\n dirtyAllReason: dirtyAllReasonSnapshot,\n lane: 'nonUrgent',\n slice: { start: sliceStart, end: sliceEnd, total: totalSteps },\n captureOpSeq: shouldEmitLaneEvidence,\n })\n const sliceDurationMs = Math.max(0, Date.now() - sliceStartedAtMs)\n return { sliceDurationMs, anchor } as const\n }),\n )\n\n cursor = sliceEnd\n\n const hasPending =\n traitConvergeTimeSlicingState.backlogDirtyPaths.size > 0 ||\n traitConvergeTimeSlicingState.backlogDirtyAllReason != null\n const willCoalesce = txnLanePolicy.allowCoalesce && !lagExceeded && hasPending\n\n const elapsedSinceLastYieldMs = Math.max(0, Date.now() - lastYieldAtMs)\n const budgetExceeded = budgetMs > 0 && Number.isFinite(budgetMs) && elapsedSinceLastYieldMs >= budgetMs\n const forcedFrameYield = elapsedSinceLastYieldMs >= 16\n const inputPending =\n !willCoalesce && txnLanePolicy.yieldStrategy === 'inputPending' ? readIsInputPending() : false\n\n const shouldYield =\n cursor < totalSteps && !willCoalesce && (inputPending || budgetExceeded || forcedFrameYield)\n\n const yieldReason: Debug.TxnLaneNonUrgentYieldReason = !shouldYield\n ? 'none'\n : inputPending\n ? 'input_pending'\n : budgetExceeded\n ? 'budget_exceeded'\n : 'forced_frame_yield'\n\n if (shouldEmitLaneEvidence) {\n yield* withCapturedContext(\n Effect.gen(function* () {\n const reasons: Array<Debug.TxnLaneEvidenceReason> = ['queued_non_urgent']\n if (lagExceeded) reasons.push('max_lag_forced', 'starvation_protection')\n if (yieldReason === 'budget_exceeded') reasons.push('budget_yield')\n\n const evidence: Debug.TxnLaneEvidence = {\n anchor: {\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n ...(typeof anchor.opSeq === 'number' ? { opSeq: anchor.opSeq } : {}),\n },\n lane: 'nonUrgent',\n kind: 'trait:deferred_flush',\n policy: txnLanePolicy,\n backlog: {\n pendingCount: Math.max(0, totalSteps - sliceEnd),\n ageMs: lagMs,\n coalescedCount: deferredFlushCoalescedCount,\n canceledCount: deferredFlushCanceledCount,\n },\n budget: {\n budgetMs,\n sliceDurationMs,\n yieldCount,\n yielded: shouldYield,\n yieldReason,\n },\n starvation: lagExceeded ? { triggered: true, reason: 'max_lag_exceeded' } : { triggered: false },\n reasons,\n }\n\n yield* Debug.record({\n type: 'trace:txn-lane',\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n txnId: anchor.txnId,\n data: { evidence },\n })\n }),\n )\n }\n\n if (willCoalesce) {\n deferredFlushCoalescedCount += 1\n deferredFlushCanceledCount += 1\n if (shouldEmitLaneEvidence) {\n yield* withCapturedContext(\n Debug.record({\n type: 'trace:txn-lane',\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n txnId: anchor.txnId,\n data: {\n evidence: {\n anchor: {\n moduleId,\n instanceId,\n txnSeq: anchor.txnSeq,\n ...(typeof anchor.opSeq === 'number' ? { opSeq: anchor.opSeq } : {}),\n },\n lane: 'nonUrgent',\n kind: 'trait:deferred_flush',\n policy: txnLanePolicy,\n backlog: {\n pendingCount: Math.max(0, totalSteps - cursor),\n ageMs: lagMs,\n coalescedCount: deferredFlushCoalescedCount,\n canceledCount: deferredFlushCanceledCount,\n },\n budget: {\n budgetMs,\n sliceDurationMs,\n yieldCount,\n yielded: false,\n yieldReason: 'none',\n },\n starvation: { triggered: false },\n reasons: ['coalesced', 'canceled'],\n } satisfies Debug.TxnLaneEvidence,\n },\n }),\n )\n }\n break\n }\n\n if (budgetMs > 0 && Number.isFinite(budgetMs)) {\n if (sliceDurationMs > budgetMs && chunkSize > 1) {\n chunkSize = Math.max(1, Math.floor(chunkSize / 2))\n } else if (sliceDurationMs < budgetMs / 2) {\n chunkSize = Math.min(totalSteps, chunkSize * 2)\n }\n }\n\n if (shouldYield) {\n yieldCount += 1\n lastYieldAtMs = Date.now()\n yield* Effect.yieldNow\n }\n }\n\n const hasPending =\n traitConvergeTimeSlicingState.backlogDirtyPaths.size > 0 ||\n traitConvergeTimeSlicingState.backlogDirtyAllReason != null\n if (!hasPending) {\n return\n }\n }\n } finally {\n traitConvergeTimeSlicingState.workerFiber = undefined\n }\n })\n\n const ensureTraitConvergeTimeSlicingWorkerStarted = () =>\n Effect.gen(function* () {\n if (traitConvergeTimeSlicingState.workerFiber) {\n return\n }\n const fiber = yield* runTraitConvergeTimeSlicingWorker.pipe(\n Effect.provideService(TaskRunner.inSyncTransactionFiber, false),\n Effect.forkDetach({ startImmediately: true }),\n )\n traitConvergeTimeSlicingState.workerFiber = fiber\n })\n\n lifecycle.registerDestroy(\n Effect.suspend(() => {\n const fiber = traitConvergeTimeSlicingState.workerFiber\n return fiber ? Fiber.interrupt(fiber).pipe(Effect.asVoid) : Effect.void\n }),\n { name: 'traitConvergeTimeSlicing' },\n )\n\n const declaredActionTags = (() => {\n const actionMap = (options.tag as any)?.shape?.actionMap\n if (!actionMap || typeof actionMap !== 'object') {\n return undefined\n }\n return new Set(Object.keys(actionMap))\n })()\n\n const actionTopicHubCapacity = yield* resolveConcurrencyPolicy().pipe(\n Effect.map((policy) => policy.losslessBackpressureCapacity),\n )\n const actionTagHubsByTag = new Map<string, PubSub.PubSub<A>>()\n if (declaredActionTags && declaredActionTags.size > 0) {\n const topicHubEntries = yield* Effect.forEach(\n declaredActionTags,\n (tag) =>\n PubSub.bounded<A>(actionTopicHubCapacity).pipe(\n Effect.map((hub) => [tag, hub] as const),\n ),\n )\n for (const [tag, hub] of topicHubEntries) {\n actionTagHubsByTag.set(tag, hub)\n }\n }\n\n const actionTagOfUnknown = (action: unknown): string | undefined => {\n const tag = (action as any)?._tag\n if (typeof tag === 'string' && tag.length > 0) return tag\n const type = (action as any)?.type\n if (typeof type === 'string' && type.length > 0) return type\n if (tag != null) return String(tag)\n if (type != null) return String(type)\n return undefined\n }\n\n const actionMatchesTopicTag = (action: unknown, topicTag: string): boolean => {\n const tag = (action as any)?._tag\n if (typeof tag === 'string' && tag.length > 0) {\n if (tag === topicTag) return true\n const type = (action as any)?.type\n return typeof type === 'string' && type.length > 0 && type === topicTag\n }\n\n const type = (action as any)?.type\n if (typeof type === 'string' && type.length > 0) {\n return type === topicTag\n }\n\n const normalized = actionTagOfUnknown(action)\n return normalized != null && normalized.length > 0 && normalized === topicTag\n }\n\n const actionsStream: Stream.Stream<A> = Stream.fromPubSub(actionHub)\n const actionsByTagStream = (tag: string): Stream.Stream<A> => {\n const topicHub = actionTagHubsByTag.get(tag)\n if (topicHub) {\n return Stream.fromPubSub(topicHub)\n }\n return actionsStream.pipe(Stream.filter((action: A) => actionMatchesTopicTag(action, tag)))\n }\n\n const makeDispatchBuiltin = Effect.sync(() =>\n makeDispatchOps<S, A>({\n optionsModuleId: options.moduleId,\n instanceId,\n declaredActionTags,\n initialReducers: options.reducers as any,\n txnContext,\n readState,\n setStateInternal,\n recordStatePatch,\n actionHub,\n actionTagHubsByTag,\n actionCommitHub,\n diagnostics: concurrencyDiagnostics,\n enqueueTransaction,\n resolveConcurrencyPolicy,\n runOperation,\n runWithStateTransaction,\n isDevEnv,\n }),\n )\n\n const dispatchSel = RuntimeKernel.selectRuntimeService(\n 'dispatch',\n resolveRuntimeServiceImpls('dispatch', [\n {\n implId: 'builtin',\n implVersion: 'v1',\n make: makeDispatchBuiltin,\n },\n ]),\n runtimeServicesOverrides,\n )\n\n const dispatchOps = yield* withRuntimeServiceBuiltins('dispatch', makeDispatchBuiltin, dispatchSel.impl.make)\n\n const runtimeServicesEvidence = RuntimeKernel.makeRuntimeServicesEvidence({\n moduleId: options.moduleId,\n instanceId,\n bindings: [enqueueTxnSel.binding, runOperationSel.binding, transactionSel.binding, dispatchSel.binding],\n overridesApplied: [\n ...enqueueTxnSel.overridesApplied,\n ...runOperationSel.overridesApplied,\n ...transactionSel.overridesApplied,\n ...dispatchSel.overridesApplied,\n ],\n })\n\n const gate = FullCutoverGate.evaluateFullCutoverGate({\n mode: cutoverGateMode,\n requestedKernelId: kernelImplementationRef.kernelId,\n runtimeServicesEvidence,\n diagnosticsLevel: isDevEnv() ? 'light' : 'off',\n })\n\n if (gate.verdict === 'FAIL') {\n const msg = isDevEnv()\n ? [\n '[FullCutoverGateFailed] Runtime assembly detected implicit fallback / missing bindings under fullCutover mode.',\n `requestedKernelId: ${kernelImplementationRef.kernelId}`,\n `reason: ${gate.reason}`,\n `missingServiceIds: ${gate.missingServiceIds.join(',')}`,\n `fallbackServiceIds: ${gate.fallbackServiceIds.join(',')}`,\n `requiredServiceCount: ${gate.evidence.requiredServiceCount}`,\n `anchor: moduleId=${gate.anchor.moduleId}, instanceId=${gate.anchor.instanceId}, txnSeq=${gate.anchor.txnSeq}`,\n ].join('\\n')\n : 'Full cutover gate failed'\n\n const err: any = new Error(msg)\n err.name = 'FullCutoverGateFailed'\n err.gate = gate\n err.reason = gate.reason\n err.evidence = gate.evidence\n err.instanceId = instanceId\n err.moduleId = options.moduleId\n throw err\n }\n\n const writeDenied = () =>\n Effect.die(new Error('[ModuleRuntime.ref] state ref is read-only. Use runtime.setState / $.state.update / $.state.mutate instead.'))\n\n const denyPublish = (_value: unknown): Effect.Effect<boolean> => writeDenied() as Effect.Effect<boolean>\n\n const denyWithPermits =\n (_permits: number) =>\n <A0, E0, R0>(_self: Effect.Effect<A0, E0, R0>): Effect.Effect<A0, E0, R0> =>\n writeDenied() as Effect.Effect<A0, E0, R0>\n\n const rootDenyWriteRef = {\n get: SubscriptionRef.get(stateRef),\n modify: writeDenied,\n }\n\n // Keep root ref identity stable for a runtime instance (important for storeId anchoring in ExternalStore.fromSubscriptionRef).\n const rootReadonlyRef = {\n get: SubscriptionRef.get(stateRef),\n changes: SubscriptionRef.changes(stateRef),\n // Runtime guard for unsafe casts (`as any as SubscriptionRef`) to keep failure deterministic.\n modify: writeDenied,\n ref: rootDenyWriteRef,\n pubsub: {\n publish: denyPublish,\n },\n semaphore: {\n withPermits: denyWithPermits,\n },\n } as const\n\n const runtime: PublicModuleRuntime<S, A> = {\n // Expose moduleId on the runtime so React / Devtools can correlate module information at the view layer.\n moduleId,\n instanceId,\n lifecycleStatus: lifecycle.getStatus,\n getState: readState,\n setState: (next) => setStateInternal(next, '*', 'unknown', undefined, next),\n dispatch: (action) =>\n // Enqueue the txn request to guarantee FIFO serialization within a single instance.\n dispatchOps.dispatch(action) as Effect.Effect<void, never, never>,\n dispatchBatch: (actions) => dispatchOps.dispatchBatch(actions) as Effect.Effect<void, never, never>,\n dispatchLowPriority: (action) => dispatchOps.dispatchLowPriority(action) as Effect.Effect<void, never, never>,\n actions$: actionsStream,\n actionsByTag$: actionsByTagStream,\n actionsWithMeta$: Stream.fromPubSub(actionCommitHub),\n changes: <V>(selector: (s: S) => V) => Stream.map(SubscriptionRef.changes(stateRef), selector).pipe(Stream.changes),\n changesWithMeta: <V>(selector: (s: S) => V) =>\n Stream.map(fromCommitHub, ({ value, meta }) => ({\n value: selector(value),\n meta,\n })),\n changesReadQueryWithMeta: <V>(input: ReadQuery.ReadQueryInput<S, V>) => {\n const compiled: ReadQuery.ReadQueryCompiled<S, V> = ReadQuery.isReadQueryCompiled<S, V>(input)\n ? input\n : ReadQuery.compile(input)\n\n if (compiled.lane !== 'static') {\n const buildGradeDecision = ReadQuery.resolveBuildGradeStrictGateDecision({\n moduleId,\n instanceId,\n txnSeq: 0,\n compiled,\n })\n\n const runtimeCompiled = ReadQuery.markRuntimeMissingBuildGrade(compiled)\n let strictGateChecked = false\n\n return Stream.mapEffect(fromCommitHub, ({ value, meta }) =>\n Effect.gen(function* () {\n if (!strictGateChecked) {\n strictGateChecked = true\n\n if (buildGradeDecision?.verdict === 'WARN') {\n yield* Debug.record(buildGradeDecision.diagnostic)\n } else if (buildGradeDecision?.verdict === 'FAIL') {\n yield* Debug.record(buildGradeDecision.diagnostic)\n yield* Effect.die(buildGradeDecision.error)\n }\n\n if (ReadQuery.shouldEvaluateStrictGateAtRuntime(runtimeCompiled)) {\n const strictGateOpt = yield* Effect.serviceOption(\n ReadQueryStrictGateConfigTag as unknown as ServiceMap.Key<any, ReadQuery.ReadQueryStrictGateConfig>,\n )\n\n if (Option.isSome(strictGateOpt)) {\n const decision = ReadQuery.evaluateStrictGate({\n config: strictGateOpt.value,\n moduleId,\n instanceId,\n txnSeq: 0,\n compiled: runtimeCompiled,\n })\n\n if (decision.verdict === 'WARN') {\n yield* Debug.record(decision.diagnostic)\n } else if (decision.verdict === 'FAIL') {\n yield* Debug.record(decision.diagnostic)\n yield* Effect.die(decision.error)\n }\n }\n }\n }\n\n return {\n value: runtimeCompiled.select(value),\n meta,\n }\n }),\n )\n }\n\n return Stream.unwrap(\n Effect.gen(function* () {\n const entry = yield* selectorGraph.ensureEntry(compiled)\n entry.subscriberCount += 1\n\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n selectorGraph.releaseEntry(compiled.selectorId)\n }),\n )\n\n if (!entry.hasValue) {\n const current = yield* readState\n try {\n entry.cachedValue = compiled.select(current) as any\n entry.hasValue = true\n entry.cachedAtTxnSeq = 0\n } catch {\n // keep entry empty; commit-time eval will emit diagnostic in diagnostics mode (if enabled)\n }\n }\n\n return Stream.fromPubSub(entry.hub) as Stream.Stream<StateChangeWithMeta<V>>\n }),\n )\n },\n ref: <V = S>(selector?: (s: S) => V): ReadonlySubscriptionRef<V> => {\n if (!selector) {\n return rootReadonlyRef as unknown as ReadonlySubscriptionRef<V>\n }\n\n const denyWriteRef = {\n get: Effect.map(SubscriptionRef.get(stateRef), selector),\n modify: writeDenied,\n }\n\n const derivedRef = {\n get: Effect.map(SubscriptionRef.get(stateRef), selector),\n // Derived stream: selector-map stateRef.changes and de-duplicate.\n changes: Stream.map(SubscriptionRef.changes(stateRef), selector).pipe(Stream.changes) as Stream.Stream<V>,\n // Runtime guard for unsafe casts (`as any as SubscriptionRef`) to keep failure deterministic.\n modify: writeDenied,\n ref: denyWriteRef,\n pubsub: {\n publish: denyPublish,\n },\n semaphore: {\n withPermits: denyWithPermits,\n },\n }\n\n return derivedRef\n },\n }\n\n // Best-effort sync action callables (perf / JS entrypoints):\n // - Exposes `runtime.actions.<tag>(payload?)` for callers that want \"just do it\" semantics.\n // - Tries to run the dispatch synchronously when possible (common case: no queue contention) so perf workloads\n // can time the tick flush separately from transaction overhead.\n // - Falls back to forking the dispatch Effect if it cannot complete synchronously.\n if (declaredActionTags && declaredActionTags.size > 0) {\n const services = yield* Effect.services<any>()\n const driver = ManagedRuntime.make(Layer.effectServices(Effect.succeed(services)))\n const actions: any = {}\n\n const dispatchSyncBestEffort = (action: A): void => {\n try {\n const exit = driver.runSyncExit(dispatchOps.dispatch(action) as any)\n if (Exit.isFailure(exit)) {\n driver.runFork(dispatchOps.dispatch(action) as any)\n }\n } catch {\n try {\n driver.runFork(dispatchOps.dispatch(action) as any)\n } catch {\n // ignore best-effort failures (e.g. runtime disposed)\n }\n }\n }\n\n for (const tag of declaredActionTags) {\n actions[tag] = (payload?: unknown) => {\n const action = payload === undefined ? ({ _tag: tag } as any) : ({ _tag: tag, payload } as any)\n dispatchSyncBestEffort(action as A)\n }\n }\n\n ;(runtime as any).actions = actions\n }\n\n KernelRef.setKernelImplementationRef(runtime, kernelImplementationRef)\n RuntimeKernel.setRuntimeServicesEvidence(runtime, runtimeServicesEvidence)\n\n // Optional: when RunSession/EvidenceCollector is in scope, write runtime services evidence into the collector.\n // By default (non-trial-run), Env does not contain EvidenceCollectorTag, so this adds no overhead.\n const collectorOpt = yield* Effect.serviceOption(\n EvidenceCollectorTag as unknown as ServiceMap.Key<any, { setKernelImplementationRef: (x: unknown) => void; setRuntimeServicesEvidence: (x: unknown) => void }>,\n )\n if (Option.isSome(collectorOpt)) {\n collectorOpt.value.setKernelImplementationRef(kernelImplementationRef)\n const level = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n if (level !== 'off') {\n collectorOpt.value.setRuntimeServicesEvidence(runtimeServicesEvidence)\n }\n }\n\n const convergeStaticIrCollectors = yield* Effect.service(currentConvergeStaticIrCollectors).pipe(Effect.orDie)\n const registerConvergeStaticIr = (staticIr: unknown): void => {\n if (convergeStaticIrCollectors.length === 0) return\n for (const collector of convergeStaticIrCollectors) {\n collector.register(staticIr as any)\n }\n }\n\n const sourceRefreshRegistry = new Map<string, (state: unknown) => Effect.Effect<void, never, any>>()\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n sourceRefreshRegistry.clear()\n }),\n )\n\n // Build a minimal imports-scope injector:\n // - Only store ModuleToken -> ModuleRuntime mappings.\n // - Never capture the whole Context into ModuleRuntime (avoid leaking root/base services by accident).\n const importsMap = new Map<ServiceMap.Key<any, PublicModuleRuntime<any, any>>, PublicModuleRuntime<any, any>>()\n\n for (const imported of options.imports ?? []) {\n const maybe = yield* Effect.serviceOption(imported)\n if (Option.isSome(maybe)) {\n importsMap.set(imported, maybe.value)\n }\n }\n\n const importsScope: RuntimeInternals['imports'] = {\n kind: 'imports-scope',\n get: (module) => importsMap.get(module),\n }\n\n const instanceKey = options.moduleId != null ? `${options.moduleId}::${instanceId}` : undefined\n\n if (instanceKey) {\n registerRuntimeByInstanceKey(instanceKey, runtime as PublicModuleRuntime<any, any>)\n }\n\n const registerStateTraitProgram = (\n program: any,\n registerOptions?: { readonly bumpReason?: any; readonly exportStaticIr?: boolean },\n ): void => {\n const nextIr = (program as any).convergeIr\n const nextKeys = nextIr\n ? {\n writersKey: nextIr.writersKey,\n depsKey: nextIr.depsKey,\n }\n : undefined\n\n const requestedBumpReason = registerOptions?.bumpReason\n let bumpReason: any\n\n if (traitState.lastConvergeIrKeys && nextKeys) {\n if (requestedBumpReason) {\n bumpReason = requestedBumpReason\n } else if (traitState.lastConvergeIrKeys.writersKey !== nextKeys.writersKey) {\n bumpReason = 'writers_changed'\n } else if (traitState.lastConvergeIrKeys.depsKey !== nextKeys.depsKey) {\n bumpReason = 'deps_changed'\n }\n } else if (traitState.lastConvergeIrKeys && !nextKeys) {\n bumpReason = requestedBumpReason ?? 'unknown'\n }\n\n if (bumpReason) {\n const nextGeneration = traitState.convergeGeneration.generation + 1\n const nextBumpCount = (traitState.convergeGeneration.generationBumpCount ?? 0) + 1\n traitState.convergeGeneration = {\n generation: nextGeneration,\n generationBumpCount: nextBumpCount,\n lastBumpReason: bumpReason,\n\t }\n\t\n\t traitState.pendingCacheMissReason = 'generation_bumped'\n\t traitState.pendingCacheMissReasonCount = (traitState.pendingCacheMissReasonCount ?? 0) + 1\n\t traitState.convergePlanCache = new StateTraitConverge.ConvergePlanCache(convergePlanCacheCapacity)\n\t }\n\n traitState.lastConvergeIrKeys = nextKeys\n\n const convergeIr = nextIr\n ? {\n ...nextIr,\n generation: traitState.convergeGeneration.generation,\n }\n : undefined\n\n const prevConvergeIr = (traitState.program as any)?.convergeIr as any | undefined\n const canPreserveInlinePlanCache =\n !!prevConvergeIr &&\n !!nextIr &&\n prevConvergeIr.writersKey === (nextIr as any).writersKey &&\n prevConvergeIr.depsKey === (nextIr as any).depsKey\n\n const prevConvergeExecIr = (traitState.program as any)?.convergeExecIr as ReturnType<typeof makeConvergeExecIr>\n | undefined\n\n const convergeExecIr =\n convergeIr && !(convergeIr as any).configError ? makeConvergeExecIr(convergeIr as any) : undefined\n\n if (convergeExecIr && prevConvergeExecIr) {\n // Preserve hot-path perf hints across generation bumps (forward-only; no compatibility layer).\n // This keeps auto mode stable under frequent register/bump cycles (e.g. graphChangeInvalidation perf boundary).\n convergeExecIr.perf.fullCommitEwmaOffMs = prevConvergeExecIr.perf.fullCommitEwmaOffMs\n convergeExecIr.perf.fullCommitLastTxnSeqOff = prevConvergeExecIr.perf.fullCommitLastTxnSeqOff\n convergeExecIr.perf.fullCommitMinOffMs = prevConvergeExecIr.perf.fullCommitMinOffMs\n convergeExecIr.perf.fullCommitSampleCountOff = prevConvergeExecIr.perf.fullCommitSampleCountOff\n convergeExecIr.perf.recentPlanMissHash1 = prevConvergeExecIr.perf.recentPlanMissHash1\n convergeExecIr.perf.recentPlanMissHash2 = prevConvergeExecIr.perf.recentPlanMissHash2\n\n // Reuse per-instance scratch draft across rebuilds (avoids per-txn allocations on shallow graphs).\n const nextScratch: any = convergeExecIr.scratch as any\n const prevScratch: any = prevConvergeExecIr.scratch as any\n nextScratch.shallowInPlaceDraft = prevScratch.shallowInPlaceDraft\n\n // Inline plan micro-cache is safe to preserve only when the converge graph keys are unchanged.\n if (canPreserveInlinePlanCache) {\n nextScratch.inlinePlanCacheHash1 = prevScratch.inlinePlanCacheHash1\n nextScratch.inlinePlanCacheSize1 = prevScratch.inlinePlanCacheSize1\n nextScratch.inlinePlanCachePlan1 = prevScratch.inlinePlanCachePlan1\n nextScratch.inlinePlanCacheHash2 = prevScratch.inlinePlanCacheHash2\n nextScratch.inlinePlanCacheSize2 = prevScratch.inlinePlanCacheSize2\n nextScratch.inlinePlanCachePlan2 = prevScratch.inlinePlanCachePlan2\n nextScratch.inlinePlanCacheRecentMissHash1 = prevScratch.inlinePlanCacheRecentMissHash1\n nextScratch.inlinePlanCacheRecentMissHash2 = prevScratch.inlinePlanCacheRecentMissHash2\n }\n }\n\n traitState.convergeStaticIrDigest =\n convergeIr && !(convergeIr as any).configError ? getConvergeStaticIrDigest(convergeIr as any) : undefined\n\n\t traitState.program = {\n\t ...(program as any),\n\t convergeIr,\n\t convergeExecIr,\n\t }\n\t traitState.listConfigs = RowId.collectListConfigs((program as any).spec)\n\t listPathSet = (() => {\n\t const configs = traitState.listConfigs\n\t if (!Array.isArray(configs) || configs.length === 0) return undefined\n\t const set = new Set<string>()\n\t for (const cfg of configs as ReadonlyArray<any>) {\n\t const p = cfg?.path\n\t if (typeof p === 'string' && p.length > 0) set.add(p)\n\t }\n\t return set.size > 0 ? set : undefined\n\t })()\n\t const owned: FieldPath[] = ((program as any)?.entries ?? [])\n\t .filter((e: any) => e && e.kind === 'externalStore' && typeof e.fieldPath === 'string')\n\t .map((e: any) => normalizeFieldPath(e.fieldPath))\n\t .filter((p: any): p is FieldPath => p != null)\n .sort(compareFieldPath)\n externalOwnedFieldPaths = owned\n externalOwnedFieldPathKeys = new Set(owned.map((p) => toKey(p)))\n\n if (!traitState.convergePlanCache) {\n traitState.convergePlanCache = new StateTraitConverge.ConvergePlanCache(convergePlanCacheCapacity)\n }\n\n const exportStaticIrEnabled = registerOptions?.exportStaticIr !== false\n\n if (exportStaticIrEnabled && convergeIr && !(convergeIr as any).configError) {\n if (convergeStaticIrCollectors.length > 0) {\n registerConvergeStaticIr(\n exportConvergeStaticIr({\n ir: convergeIr,\n moduleId: options.moduleId ?? 'unknown',\n instanceId,\n }),\n )\n }\n }\n }\n\n // 065: even if the module declares no traits, it must still have a schema-backed Static IR table (FieldPathIdRegistry),\n // otherwise reducer patchPaths / ReadQuery(static lane) cannot be mapped and will degrade to dirtyAll.\n if (!traitState.program) {\n const stateSchema = (options.tag as any)?.stateSchema as unknown\n if (stateSchema) {\n try {\n registerStateTraitProgram(StateTraitBuild.build(stateSchema as any, {} as any), { exportStaticIr: false })\n } catch {\n // best-effort: keep trait program undefined and fall back to dirtyAll scheduling when registry is missing.\n }\n }\n }\n\n const enqueueStateTraitValidateRequest = (request: StateTraitValidate.ScopedValidateRequest): void => {\n if (!txnContext.current) return\n const current: any = txnContext.current\n const list: Array<StateTraitValidate.ScopedValidateRequest> = current.stateTraitValidateRequests ?? []\n list.push(request)\n current.stateTraitValidateRequests = list\n }\n\n const recordReplayEvent = (event: unknown): void => {\n if (!txnContext.current) return\n const current: any = txnContext.current\n current.lastReplayEvent = {\n ...(event as any),\n txnId: current.txnId,\n trigger: current.origin,\n }\n }\n\n const runWithStateTransactionInternal = (\n origin: StateTransaction.StateTxnOrigin,\n body: () => Effect.Effect<void>,\n ): Effect.Effect<void> =>\n enqueueTransaction(\n runOperation(\n origin.kind as any as EffectOp.EffectOp['kind'],\n origin.name ? `txn:${origin.name}` : 'txn',\n { meta: { moduleId: options.moduleId, instanceId } },\n runWithStateTransaction(origin, body),\n ),\n )\n\n const applyTransactionSnapshot = (txnId: string, mode: 'before' | 'after'): Effect.Effect<void> =>\n enqueueTransaction(\n Effect.gen(function* () {\n // Time travel is disabled by default in production to avoid misuse.\n // Devtools should use this only in dev/test with instrumentation = \"full\".\n if (!isDevEnv()) {\n return\n }\n\n const txn = txnById.get(txnId)\n if (!txn) {\n return\n }\n\n const targetState = mode === 'before' ? txn.initialStateSnapshot : txn.finalStateSnapshot\n\n if (targetState === undefined) {\n // Time travel is not possible when snapshots are not collected.\n return\n }\n\n // Record a replay operation as a StateTransaction with origin.kind = \"devtools\"\n // so Devtools txn views can show a complete time-travel trace.\n yield* runWithStateTransaction(\n {\n kind: 'devtools',\n name: 'time-travel',\n details: {\n baseTxnId: txnId,\n mode,\n },\n },\n () =>\n Effect.sync(() => {\n StateTransaction.updateDraft(txnContext, targetState as S)\n StateTransaction.recordPatch(txnContext, '*', 'devtools')\n }),\n )\n }),\n )\n\n const stateSchema = (options.tag as any)?.stateSchema\n\n const effectsRegistry = makeEffectsRegistry({\n moduleId: options.moduleId,\n instanceId,\n actions$: runtime.actions$ as Stream.Stream<unknown>,\n })\n\n const runtimeInternals: RuntimeInternals = {\n moduleId: options.moduleId,\n instanceId,\n stateSchema,\n lifecycle: {\n registerInitRequired: (eff, options) => {\n lifecycle.registerInitRequired(eff, options)\n },\n registerStart: (eff, options) => {\n lifecycle.registerStart(eff, options)\n },\n registerDestroy: (eff, options) => {\n lifecycle.registerDestroy(eff, options)\n },\n registerOnError: (handler) => {\n lifecycle.registerOnError(handler)\n },\n registerPlatformSuspend: (eff, options) => {\n lifecycle.registerPlatformSuspend(eff, options)\n },\n registerPlatformResume: (eff, options) => {\n lifecycle.registerPlatformResume(eff, options)\n },\n registerPlatformReset: (eff, options) => {\n lifecycle.registerPlatformReset(eff, options)\n },\n },\n imports: importsScope,\n txn: {\n instrumentation,\n registerReducer: dispatchOps.registerReducer as any,\n registerActionStateWriteback: dispatchOps.registerActionStateWriteback as any,\n dispatchWithOriginOverride: dispatchOps.dispatchWithOriginOverride as any,\n dispatchLowPriorityWithOriginOverride: dispatchOps.dispatchLowPriorityWithOriginOverride as any,\n dispatchBatchWithOriginOverride: dispatchOps.dispatchBatchWithOriginOverride as any,\n runWithStateTransaction: runWithStateTransactionInternal as any,\n updateDraft,\n recordStatePatch,\n recordReplayEvent,\n applyTransactionSnapshot: applyTransactionSnapshot as any,\n },\n concurrency: {\n resolveConcurrencyPolicy,\n },\n txnLanes: {\n resolveTxnLanePolicy,\n },\n traits: {\n rowIdStore,\n getListConfigs: () => traitState.listConfigs as ReadonlyArray<unknown>,\n registerSourceRefresh: (fieldPath, handler) => {\n sourceRefreshRegistry.set(fieldPath, handler)\n },\n getSourceRefreshHandler: (fieldPath) => sourceRefreshRegistry.get(fieldPath),\n registerStateTraitProgram: registerStateTraitProgram as any,\n enqueueStateTraitValidateRequest: enqueueStateTraitValidateRequest as any,\n registerModuleTraitsContribution: (contribution) => {\n if (moduleTraitsState.frozen) {\n throw new Error('[ModuleTraitsFrozen] Cannot register traits contribution after finalize/freeze.')\n }\n moduleTraitsState.contributions.push(contribution)\n },\n freezeModuleTraits: () => {\n moduleTraitsState.frozen = true\n },\n getModuleTraitsContributions: () => moduleTraitsState.contributions,\n getModuleTraitsSnapshot: () => moduleTraitsState.snapshot,\n setModuleTraitsSnapshot: (snapshot) => {\n moduleTraitsState.snapshot = snapshot\n },\n },\n effects: {\n registerEffect: (args) => effectsRegistry.registerEffect(args as any),\n },\n devtools: {\n registerConvergeStaticIr: registerConvergeStaticIr as any,\n },\n }\n\n yield* installInternalHooks({ runtime, runtimeInternals })\n\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n importsMap.clear()\n }),\n )\n\n yield* Effect.addFinalizer(() =>\n Effect.sync(() => {\n if (Option.isSome(runtimeStoreOpt)) {\n runtimeStoreOpt.value.unregisterModuleInstance(moduleInstanceKey)\n }\n }),\n )\n\n if (options.tag) {\n registerRuntime(options.tag as ServiceMap.Key<any, PublicModuleRuntime<S, A>>, runtime)\n }\n\n yield* Effect.addFinalizer(() =>\n lifecycle.runDestroy.pipe(\n Effect.flatMap(() =>\n runOperation(\n 'lifecycle',\n 'module:destroy',\n { meta: { moduleId: options.moduleId, instanceId } },\n Debug.record({\n type: 'module:destroy',\n moduleId: options.moduleId,\n instanceId,\n }),\n ),\n ),\n Effect.tap(() =>\n Effect.sync(() => {\n if (options.tag) {\n unregisterRuntime(options.tag as ServiceMap.Key<any, PublicModuleRuntime<any, any>>)\n }\n if (instanceKey) {\n unregisterRuntimeByInstanceKey(instanceKey)\n }\n }),\n ),\n ),\n )\n\n if (options.tag && options.logics?.length) {\n yield* runModuleLogics({\n tag: options.tag as ServiceMap.Key<any, PublicModuleRuntime<S, A>>,\n logics: options.logics,\n runtime,\n lifecycle,\n moduleId,\n instanceId,\n })\n }\n\n if (options.processes && options.processes.length > 0) {\n const env = (yield* Effect.services<Scope.Scope | R>()) as ServiceMap.ServiceMap<any>\n const rootContextOpt = ServiceMap.getOption(env, RootContextTag as any)\n const isAppModule =\n Option.isSome(rootContextOpt) &&\n Array.isArray((rootContextOpt.value as RootContext).appModuleIds) &&\n (rootContextOpt.value as RootContext).appModuleIds!.includes(moduleId)\n\n if (!isAppModule) {\n const processRuntimeOpt = ServiceMap.getOption(env, ProcessRuntime.ProcessRuntimeTag as any)\n const processRuntime = Option.isSome(processRuntimeOpt)\n ? (processRuntimeOpt.value as ProcessRuntime.ProcessRuntime)\n : undefined\n const scope = {\n type: 'moduleInstance',\n moduleId,\n instanceId,\n } as const\n\n yield* Effect.forEach(\n options.processes,\n (process) =>\n Effect.gen(function* () {\n if (processRuntime) {\n const installEffect = processRuntime.install(process as any, {\n scope,\n enabled: true,\n installedAt: 'moduleRuntime',\n })\n\n // During the acquire phase of Layer.scoped(...), the current module runtime is not yet in Context,\n // but instance-scope processes (especially Link) may strictly require dependencies to be resolvable in scope.\n // We explicitly provide the current module runtime to avoid falsely treating itself as a missing dependency.\n const installation = options.tag\n ? yield* installEffect.pipe(\n Effect.provideService(options.tag as ServiceMap.Key<any, any>, runtime as any),\n )\n : yield* installEffect\n\n if (installation !== undefined) {\n return\n }\n }\n\n // Legacy fallback: a raw Effect is still allowed as a process host, but it has no Process static surface/diagnostics.\n yield* Effect.forkScoped(process as any)\n }),\n { discard: true },\n )\n }\n }\n\n return runtime\n })\n\n return program as Effect.Effect<PublicModuleRuntime<S, A>, never, Scope.Scope | R>\n}\n","import { Cause, Effect, Ref, ServiceMap } from 'effect'\nimport { toSerializableErrorSummary } from './errorSummary.js'\nimport * as Debug from './DebugSink.js'\n\nexport type Phase = 'init' | 'run' | 'destroy' | 'platform'\n\nexport type Hook = 'initRequired' | 'start' | 'destroy' | 'suspend' | 'resume' | 'reset' | 'unknown'\n\nexport type TaskKind = 'initRequired' | 'start' | 'destroy' | 'platformSuspend' | 'platformResume' | 'platformReset'\n\nexport interface ErrorContext {\n readonly phase: Phase\n readonly hook: Hook\n readonly moduleId: string\n readonly instanceId: string\n readonly taskId?: string\n readonly txnSeq?: number\n readonly opSeq?: number\n /**\n * For diagnostics only: an implementation-side marker indicating where the error originated,\n * e.g. \"logic.fork\" / \"initRequired\" / \"start\".\n *\n * Note: this field must be serializable and must not become a protocol anchor.\n */\n readonly origin?: string\n}\n\nexport interface ModuleRuntimeIdentity {\n readonly moduleId: string\n readonly instanceId: string\n readonly runtimeLabel?: string\n}\n\nexport type InstanceStatus = 'creating' | 'initializing' | 'ready' | 'failed' | 'terminating' | 'terminated'\n\nexport interface InitProgress {\n readonly total: number\n readonly completed: number\n readonly current?: number\n readonly startedAt?: number\n}\n\nexport type LifecycleOutcome =\n | { readonly status: 'success' }\n | {\n readonly status: 'failure'\n readonly error: import('./errorSummary.js').SerializableErrorSummary\n }\n\nexport interface LifecycleStatus {\n readonly identity: ModuleRuntimeIdentity\n readonly status: InstanceStatus\n readonly initOutcome?: LifecycleOutcome\n readonly initProgress?: InitProgress\n}\n\nexport interface TaskRef {\n readonly taskId: string\n readonly kind: TaskKind\n readonly order: number\n readonly name?: string\n readonly fatalOnFailure?: boolean\n}\n\nexport interface LifecycleTask extends TaskRef {\n readonly effect: Effect.Effect<void, never, any>\n}\n\nexport interface Budgets {\n /** Per-instance lifecycle event budget (aligned with specs/011 data-model; default ≤ 20). */\n readonly maxEventsPerInstance: number\n /** Per-event size budget (aligned with specs/011 data-model; default ≤ 4KB). */\n readonly maxEventBytes: number\n}\n\nexport interface LifecycleManager {\n readonly identity: ModuleRuntimeIdentity\n readonly budgets: Budgets\n\n readonly registerPlatformSuspend: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string },\n ) => void\n readonly registerPlatformResume: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string },\n ) => void\n readonly registerPlatformReset: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string },\n ) => void\n\n readonly registerInitRequired: (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => void\n readonly registerStart: (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string; readonly fatalOnFailure?: boolean },\n ) => void\n readonly registerDestroy: (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => void\n readonly registerOnError: (\n handler: (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>,\n ) => void\n\n readonly getStatus: Effect.Effect<LifecycleStatus>\n readonly setStatus: (\n status: InstanceStatus,\n patch?: {\n readonly initOutcome?: LifecycleOutcome | undefined\n readonly initProgress?: InitProgress | undefined\n readonly runtimeLabel?: string | undefined\n },\n ) => Effect.Effect<void>\n\n readonly notifyError: (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>\n\n readonly runPlatformSuspend: Effect.Effect<void, never, any>\n readonly runPlatformResume: Effect.Effect<void, never, any>\n readonly runPlatformReset: Effect.Effect<void, never, any>\n\n readonly runInitRequired: Effect.Effect<void, unknown, any>\n readonly runStart: Effect.Effect<void, never, any>\n readonly runDestroy: Effect.Effect<void, never, any>\n\n /** Diagnostics only: whether any onError handler has been registered. */\n readonly hasOnErrorHandlers: Effect.Effect<boolean>\n\n /** Tests/diagnostics only: read a snapshot of registered tasks (immutable view). */\n readonly getTaskSnapshot: Effect.Effect<\n Readonly<{\n readonly initRequired: ReadonlyArray<TaskRef>\n readonly start: ReadonlyArray<TaskRef>\n readonly destroy: ReadonlyArray<TaskRef>\n readonly platformSuspend: ReadonlyArray<TaskRef>\n readonly platformResume: ReadonlyArray<TaskRef>\n readonly platformReset: ReadonlyArray<TaskRef>\n }>,\n never,\n never\n >\n}\n\nexport class LifecycleContext extends ServiceMap.Service<LifecycleContext, LifecycleManager>()('@logixjs/LifecycleManager') {}\n\nconst safeRun = (label: string, eff: Effect.Effect<void, any, any>) =>\n eff.pipe(\n Effect.matchCauseEffect({\n onSuccess: () => Effect.void,\n onFailure: (cause) => Effect.logError(`[${label}] failed: ${Cause.pretty(cause)}`),\n }),\n )\n\nconst makeTaskId = (kind: TaskKind, order: number): string => `${kind}:${order}`\n\nexport const makeLifecycleManager = (identity: ModuleRuntimeIdentity): Effect.Effect<LifecycleManager> =>\n Effect.gen(function* () {\n const budgets: Budgets = {\n maxEventsPerInstance: 20,\n maxEventBytes: 4 * 1024,\n }\n\n const statusRef = yield* Ref.make<LifecycleStatus>({\n identity,\n status: 'creating',\n })\n\n const initRequired: LifecycleTask[] = []\n const start: LifecycleTask[] = []\n const destroy: LifecycleTask[] = []\n const platformSuspend: LifecycleTask[] = []\n const platformResume: LifecycleTask[] = []\n const platformReset: LifecycleTask[] = []\n const onErrorHandlers: Array<\n (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>\n > = []\n\n const getStatus: Effect.Effect<LifecycleStatus> = Ref.get(statusRef)\n\n const recordPhase = (phase: Phase, name: string, payload?: unknown): Effect.Effect<void, never, any> =>\n Debug.record({\n type: 'lifecycle:phase',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n phase,\n name,\n payload,\n })\n\n const setStatus = (\n status: InstanceStatus,\n patch?: {\n readonly initOutcome?: LifecycleOutcome | undefined\n readonly initProgress?: InitProgress | undefined\n readonly runtimeLabel?: string | undefined\n },\n ) =>\n Ref.update(statusRef, (prev) => ({\n ...prev,\n identity: {\n ...prev.identity,\n ...(patch?.runtimeLabel ? { runtimeLabel: patch.runtimeLabel } : null),\n },\n status,\n ...(patch?.initOutcome !== undefined ? { initOutcome: patch.initOutcome } : null),\n ...(patch?.initProgress !== undefined ? { initProgress: patch.initProgress } : null),\n }))\n\n const registerInitRequired = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = initRequired.length\n initRequired.push({\n taskId: makeTaskId('initRequired', order),\n kind: 'initRequired',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerStart = (\n effect: Effect.Effect<void, never, any>,\n options?: { readonly name?: string; readonly fatalOnFailure?: boolean },\n ) => {\n const order = start.length\n start.push({\n taskId: makeTaskId('start', order),\n kind: 'start',\n order,\n name: options?.name,\n fatalOnFailure: options?.fatalOnFailure,\n effect,\n })\n }\n\n const registerDestroy = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = destroy.length\n destroy.push({\n taskId: makeTaskId('destroy', order),\n kind: 'destroy',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerOnError = (\n handler: (cause: Cause.Cause<unknown>, context: ErrorContext) => Effect.Effect<void, never, any>,\n ) => {\n onErrorHandlers.push(handler)\n }\n\n const registerPlatformSuspend = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = platformSuspend.length\n platformSuspend.push({\n taskId: makeTaskId('platformSuspend', order),\n kind: 'platformSuspend',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerPlatformResume = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = platformResume.length\n platformResume.push({\n taskId: makeTaskId('platformResume', order),\n kind: 'platformResume',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const registerPlatformReset = (effect: Effect.Effect<void, never, any>, options?: { readonly name?: string }) => {\n const order = platformReset.length\n platformReset.push({\n taskId: makeTaskId('platformReset', order),\n kind: 'platformReset',\n order,\n name: options?.name,\n effect,\n })\n }\n\n const notifyError = (cause: Cause.Cause<unknown>, context: ErrorContext) => {\n // Interrupt/cancel should not be reported as an error.\n if (Cause.hasInterruptsOnly(cause)) {\n return Effect.void\n }\n\n return Debug.record({\n type: 'lifecycle:error',\n moduleId: context.moduleId,\n instanceId: context.instanceId,\n cause,\n phase: context.phase,\n hook: context.hook,\n taskId: context.taskId,\n txnSeq: context.txnSeq,\n opSeq: context.opSeq,\n origin: context.origin,\n }).pipe(\n Effect.flatMap(() =>\n Effect.forEach(\n onErrorHandlers,\n (handler) =>\n handler(cause, context).pipe(\n Effect.catchCause((inner) => Effect.logError(`[lifecycle.onError] failed: ${Cause.pretty(inner)}`)),\n ),\n { discard: true },\n ),\n ),\n )\n }\n\n const runInitRequired: Effect.Effect<void, unknown, any> = Effect.gen(function* () {\n const total = initRequired.length\n if (total === 0) {\n yield* setStatus('ready', {\n initProgress: { total: 0, completed: 0 },\n initOutcome: { status: 'success' },\n })\n return\n }\n\n const startedAt = Date.now()\n yield* recordPhase('init', 'initRequired:start', { total })\n yield* setStatus('initializing', {\n initProgress: { total, completed: 0, current: 0, startedAt },\n })\n\n let completed = 0\n for (let i = 0; i < initRequired.length; i++) {\n yield* setStatus('initializing', {\n initProgress: { total, completed, current: i, startedAt },\n })\n\n const task = initRequired[i]\n const exit = yield* Effect.exit(task.effect)\n\n if (exit._tag === 'Success') {\n completed += 1\n yield* setStatus('initializing', {\n initProgress: { total, completed, current: i + 1, startedAt },\n })\n continue\n }\n\n const summary = toSerializableErrorSummary(exit.cause)\n yield* notifyError(exit.cause, {\n phase: 'init',\n hook: 'initRequired',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'initRequired',\n })\n\n yield* setStatus('failed', {\n initProgress: { total, completed, current: i, startedAt },\n initOutcome: { status: 'failure', error: summary.errorSummary },\n })\n\n return yield* Effect.failCause(exit.cause)\n }\n\n yield* recordPhase('init', 'initRequired:success', { total })\n yield* setStatus('ready', {\n initProgress: { total, completed, current: total, startedAt },\n initOutcome: { status: 'success' },\n })\n })\n\n const runStart: Effect.Effect<void, never, any> = recordPhase('run', 'start:schedule', {\n total: start.length,\n }).pipe(\n Effect.flatMap(() =>\n Effect.forEach(\n start,\n (task) =>\n Effect.forkScoped(\n task.effect.pipe(\n Effect.catchCause((cause) =>\n notifyError(cause, {\n phase: 'run',\n hook: 'start',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'start',\n })),\n ),\n ).pipe(Effect.asVoid),\n { discard: true, concurrency: 'unbounded' },\n ),\n ),\n )\n\n const runDestroy: Effect.Effect<void, never, any> = Effect.gen(function* () {\n yield* recordPhase('destroy', 'destroy:start', { total: destroy.length })\n yield* setStatus('terminating')\n\n // destroy: run in reverse registration order (LIFO), best-effort (one failure does not block others).\n for (let i = destroy.length - 1; i >= 0; i--) {\n const task = destroy[i]\n yield* safeRun(\n 'lifecycle.onDestroy',\n task.effect.pipe(\n Effect.catchCause((cause) =>\n notifyError(cause, {\n phase: 'destroy',\n hook: 'destroy',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'destroy',\n })),\n ),\n )\n }\n\n yield* setStatus('terminated')\n yield* recordPhase('destroy', 'destroy:done', { total: destroy.length })\n })\n\n const runPlatformSuspend: Effect.Effect<void, never, any> = Effect.gen(function* () {\n if (platformSuspend.length === 0) return\n\n yield* recordPhase('platform', 'signal:suspend', { total: platformSuspend.length })\n for (const task of platformSuspend) {\n yield* safeRun(\n 'lifecycle.onSuspend',\n task.effect.pipe(\n Effect.catchCause((cause) =>\n notifyError(cause, {\n phase: 'platform',\n hook: 'suspend',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'platform.suspend',\n })),\n ),\n )\n }\n })\n\n const runPlatformResume: Effect.Effect<void, never, any> = Effect.gen(function* () {\n if (platformResume.length === 0) return\n\n yield* recordPhase('platform', 'signal:resume', { total: platformResume.length })\n for (const task of platformResume) {\n yield* safeRun(\n 'lifecycle.onResume',\n task.effect.pipe(\n Effect.catchCause((cause) =>\n notifyError(cause, {\n phase: 'platform',\n hook: 'resume',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'platform.resume',\n })),\n ),\n )\n }\n })\n\n const runPlatformReset: Effect.Effect<void, never, any> = Effect.gen(function* () {\n if (platformReset.length === 0) return\n\n yield* recordPhase('platform', 'signal:reset', { total: platformReset.length })\n for (const task of platformReset) {\n yield* safeRun(\n 'lifecycle.onReset',\n task.effect.pipe(\n Effect.catchCause((cause) =>\n notifyError(cause, {\n phase: 'platform',\n hook: 'reset',\n moduleId: identity.moduleId,\n instanceId: identity.instanceId,\n taskId: task.taskId,\n origin: 'platform.reset',\n })),\n ),\n )\n }\n })\n\n const getTaskSnapshot: LifecycleManager['getTaskSnapshot'] = Effect.sync(() => ({\n initRequired: initRequired.map(({ effect: _eff, ...rest }) => rest),\n start: start.map(({ effect: _eff, ...rest }) => rest),\n destroy: destroy.map(({ effect: _eff, ...rest }) => rest),\n platformSuspend: platformSuspend.map(({ effect: _eff, ...rest }) => rest),\n platformResume: platformResume.map(({ effect: _eff, ...rest }) => rest),\n platformReset: platformReset.map(({ effect: _eff, ...rest }) => rest),\n }))\n\n const hasOnErrorHandlers: LifecycleManager['hasOnErrorHandlers'] = Effect.sync(() => onErrorHandlers.length > 0)\n\n return {\n identity,\n budgets,\n registerPlatformSuspend,\n registerPlatformResume,\n registerPlatformReset,\n registerInitRequired,\n registerStart,\n registerDestroy,\n registerOnError,\n getStatus,\n setStatus,\n notifyError,\n runPlatformSuspend,\n runPlatformResume,\n runPlatformReset,\n runInitRequired,\n runStart,\n runDestroy,\n hasOnErrorHandlers,\n getTaskSnapshot,\n }\n })\n","import { Cause } from 'effect'\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport interface SerializableErrorSummary {\n readonly message: string\n readonly name?: string\n readonly code?: string\n readonly hint?: string\n}\n\nexport interface ErrorSummaryResult {\n readonly errorSummary: SerializableErrorSummary\n readonly downgrade?: DowngradeReason\n}\n\nconst truncate = (value: string, maxLen: number): { readonly value: string; readonly truncated: boolean } => {\n if (value.length <= maxLen) return { value, truncated: false }\n return { value: value.slice(0, maxLen), truncated: true }\n}\n\nconst safeStringify = (value: unknown): { readonly ok: true; readonly json: string } | { readonly ok: false } => {\n try {\n return { ok: true, json: JSON.stringify(value) }\n } catch {\n return { ok: false }\n }\n}\n\nconst getMessageFromUnknown = (cause: unknown): string => {\n if (typeof cause === 'string') return cause\n if (typeof cause === 'number' || typeof cause === 'boolean' || typeof cause === 'bigint') return String(cause)\n if (cause instanceof Error) return cause.message || cause.name || 'Error'\n if (cause && typeof cause === 'object' && 'message' in (cause as any) && typeof (cause as any).message === 'string') {\n return (cause as any).message as string\n }\n\n // Try Effect Cause pretty (best-effort). This may include more details than needed,\n // so callers MUST still treat it as an untrusted/oversized string and truncate.\n try {\n const pretty = Cause.pretty(cause as Cause.Cause<unknown>)\n if (typeof pretty === 'string' && pretty.length > 0) return pretty\n } catch {\n // ignore\n }\n\n return 'Unknown error'\n}\n\nexport const toSerializableErrorSummary = (\n cause: unknown,\n options?: {\n readonly maxMessageLength?: number\n },\n): ErrorSummaryResult => {\n const maxMessageLength = options?.maxMessageLength ?? 256\n\n const messageRaw = getMessageFromUnknown(cause)\n const { value: message, truncated } = truncate(messageRaw, maxMessageLength)\n\n const summary: { message: string; name?: string; code?: string; hint?: string } = {\n message,\n }\n\n if (cause instanceof Error) {\n if (cause.name && cause.name !== 'Error') summary.name = cause.name\n const anyCause = cause as any\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n else if (typeof anyCause.code === 'number' && Number.isFinite(anyCause.code)) summary.code = String(anyCause.code)\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n return {\n errorSummary: summary,\n downgrade: truncated ? 'oversized' : undefined,\n }\n }\n\n if (cause && typeof cause === 'object') {\n const anyCause = cause as any\n if (typeof anyCause.name === 'string' && anyCause.name.length > 0) summary.name = anyCause.name\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n }\n\n // If the original cause isn't JSON-serializable, mark it explicitly.\n const stringifyResult = safeStringify(cause)\n if (!stringifyResult.ok) {\n return {\n errorSummary: summary,\n downgrade: 'non_serializable',\n }\n }\n\n if (truncated) {\n return {\n errorSummary: summary,\n downgrade: 'oversized',\n }\n }\n\n if (message === 'Unknown error') {\n return {\n errorSummary: summary,\n downgrade: 'unknown',\n }\n }\n\n return { errorSummary: summary }\n}\n","import { Cause, Effect, Layer, Logger, Option, ServiceMap } from 'effect'\nimport {\n projectJsonValue,\n type DowngradeReason as JsonDowngradeReason,\n type JsonValue,\n type JsonValueProjectionStats,\n} from '../../observability/jsonValue.js'\nimport type * as ReplayLog from './ReplayLog.js'\nimport {\n toSerializableErrorSummary,\n type DowngradeReason as ErrorDowngradeReason,\n type SerializableErrorSummary,\n} from './errorSummary.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport type * as ProcessProtocol from './process/protocol.js'\n\nexport interface TriggerRef {\n readonly kind: string\n readonly name?: string\n readonly details?: unknown\n}\n\ntype TraceEventType = `trace:${string}`\ntype GenericTraceEventType = Exclude<\n TraceEventType,\n 'trace:trait:converge' | 'trace:trait:check' | 'trace:trait:validate'\n>\n\n/**\n * ReplayEventRef:\n * - Replay event structure referenced from Debug events.\n * - Based on ReplayLog.Event, enriched with txn/trigger association fields for Devtools aggregation and explanation.\n */\nexport type ReplayEventRef = ReplayLog.ReplayLogEvent & {\n readonly txnId?: string\n readonly trigger?: TriggerRef\n}\n\nexport type Event =\n | {\n readonly type: 'module:init'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'module:destroy'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:phase'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly phase: 'init' | 'run' | 'destroy' | 'platform'\n readonly name: string\n readonly payload?: unknown\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'action:dispatch'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly action: unknown\n readonly actionTag?: string\n readonly unknownAction?: boolean\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'state:update'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly state: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n /**\n * Optional: Static IR digest aligned with FieldPathId/StepId (for consumer-side reverse-mapping & alignment).\n * - When missing or mismatched, consumers must not attempt to reverse-map rootIds -> rootPaths (avoid wrong UI).\n * - Allowed to be omitted on near-zero-cost diagnostics=off paths.\n */\n readonly staticIrDigest?: string\n /**\n * Optional: the affected scope aggregated by this commit (field-level dirty-set).\n * - Populated by Runtime at commit time;\n * - Must stay slim and serializable;\n * - Devtools can use it to explain \"why converge/validate ran / why it degraded to full\".\n */\n readonly dirtySet?: unknown\n /**\n * Optional: patch count aggregated by this commit (from StateTransaction).\n * - Populated by Runtime only on transaction paths.\n * - Devtools can use it as a lightweight transaction summary metric.\n */\n readonly patchCount?: number\n /**\n * Optional: whether patch records were truncated (bounded) under full instrumentation.\n */\n readonly patchesTruncated?: boolean\n /**\n * Optional: truncation reason code (stable enum).\n */\n readonly patchesTruncatedReason?: 'max_patches'\n /**\n * Optional: commit mode (normal/batched/low-priority, etc).\n * - Populated by Runtime;\n * - Default is chosen by the caller (typically \"normal\").\n */\n readonly commitMode?: string\n /**\n * Optional: external visibility priority (normal/low).\n * - Populated by Runtime.\n * - Mainly used by React external subscription scheduling (avoid unnecessary renders).\n */\n readonly priority?: string\n /**\n * Optional: transaction origin kind (origin.kind) that triggered this state commit:\n * - e.g. \"action\" / \"source-refresh\" / \"service-callback\" / \"devtools\".\n * - Populated by Runtime only on StateTransaction-based paths.\n * - Devtools can distinguish app transactions vs devtools time-travel operations.\n */\n readonly originKind?: string\n /**\n * Optional: transaction origin name (origin.name) that triggered this state commit:\n * - e.g. action dispatch / fieldPath / task:success/task:failure, etc.\n * - Populated by Runtime only on StateTransaction-based paths.\n */\n readonly originName?: string\n /**\n * Reserved: Trait converge summary (for Devtools window-level stats / TopN costs / degrade reasons, etc.).\n * - Phase 2: field slot only; structure is not fixed.\n * - Later phases will align with the Trait/Replay event model into an explainable structure.\n */\n readonly traitSummary?: unknown\n /**\n * Reserved: replay event associated with this transaction (re-emit source of truth from ReplayLog).\n * - Phase 2: field slot only.\n * - Later phases will align with ReplayLog.Event structure.\n */\n readonly replayEvent?: ReplayEventRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly identity: ProcessProtocol.ProcessInstanceIdentity\n readonly severity: 'info' | 'warning' | 'error'\n readonly eventSeq: number\n readonly timestampMs: number\n readonly trigger?: ProcessProtocol.ProcessTrigger\n readonly dispatch?: {\n readonly moduleId: string\n readonly instanceId: string\n readonly actionId: string\n }\n readonly error?: ProcessProtocol.SerializableErrorSummary\n readonly budgetEnvelope?: ProcessProtocol.ProcessEvent['budgetEnvelope']\n readonly degrade?: ProcessProtocol.ProcessEvent['degrade']\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly cause: unknown\n readonly phase?: 'init' | 'run' | 'destroy' | 'platform'\n readonly hook?: 'initRequired' | 'start' | 'destroy' | 'suspend' | 'resume' | 'reset' | 'unknown'\n readonly taskId?: string\n readonly opSeq?: number\n readonly origin?: string\n readonly txnSeq?: number\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'diagnostic'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n readonly txnSeq?: number\n readonly txnId?: string\n readonly opSeq?: number\n readonly trigger?: TriggerRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'warn:priority-inversion'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly tickSeq: number\n readonly reason: 'deferredBacklog' | 'subscribedNonUrgent'\n readonly selectorId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'warn:microtask-starvation'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly tickSeq: number\n readonly microtaskChainDepth?: number\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n /**\n * trace:* events:\n * - Extension hook for runtime tracing / Playground / Alignment Lab.\n * - Only the type prefix and moduleId are standardized; payload shape is defined by higher layers (e.g. spanId/attributes in data).\n */\n | {\n readonly type: 'trace:trait:converge'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:check'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:validate'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: GenericTraceEventType\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data?: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n\nexport interface Sink {\n readonly record: (event: Event) => Effect.Effect<void>\n}\nexport const currentDebugSinks = ServiceMap.Reference<ReadonlyArray<Sink>>('@logixjs/core/Debug.currentDebugSinks', {\n defaultValue: () => [],\n})\nexport const currentRuntimeLabel = ServiceMap.Reference<string | undefined>('@logixjs/core/Debug.currentRuntimeLabel', {\n defaultValue: () => undefined,\n})\nexport const currentTxnId = ServiceMap.Reference<string | undefined>('@logixjs/core/Debug.currentTxnId', {\n defaultValue: () => undefined,\n})\nexport const currentOpSeq = ServiceMap.Reference<number | undefined>('@logixjs/core/Debug.currentOpSeq', {\n defaultValue: () => undefined,\n})\nexport type DiagnosticsLevel = 'off' | 'light' | 'sampled' | 'full'\nexport const currentDiagnosticsLevel = ServiceMap.Reference<DiagnosticsLevel>('@logixjs/core/Debug.currentDiagnosticsLevel', {\n defaultValue: () => 'off',\n})\n\nexport const diagnosticsLevel = (level: DiagnosticsLevel): Layer.Layer<any, never, never> =>\n Layer.succeed(currentDiagnosticsLevel, level) as Layer.Layer<any, never, never>\n\nexport type DiagnosticsMaterialization = 'eager' | 'lazy'\nexport const currentDiagnosticsMaterialization = ServiceMap.Reference<DiagnosticsMaterialization>(\n '@logixjs/core/Debug.currentDiagnosticsMaterialization',\n {\n defaultValue: () => 'eager',\n },\n)\n\nexport const diagnosticsMaterialization = (mode: DiagnosticsMaterialization): Layer.Layer<any, never, never> =>\n Layer.succeed(currentDiagnosticsMaterialization, mode) as Layer.Layer<any, never, never>\n\nexport type TraceMode = 'off' | 'on'\nexport const currentTraceMode = ServiceMap.Reference<TraceMode>('@logixjs/core/Debug.currentTraceMode', {\n defaultValue: () => 'on',\n})\n\nexport const traceMode = (mode: TraceMode): Layer.Layer<any, never, never> =>\n Layer.succeed(currentTraceMode, mode) as Layer.Layer<any, never, never>\n\nexport interface TraitConvergeDiagnosticsSamplingConfig {\n /**\n * Sample once every N txns (deterministic, based on stable txnSeq).\n * - 1: sample every txn (timing granularity similar to full, while keeping payload slim)\n */\n readonly sampleEveryN: number\n /**\n * Max number of TopK hotspots to output (recommended ≤ 3).\n */\n readonly topK: number\n}\n\nexport const currentTraitConvergeDiagnosticsSampling = ServiceMap.Reference<TraitConvergeDiagnosticsSamplingConfig>(\n '@logixjs/core/Debug.currentTraitConvergeDiagnosticsSampling',\n {\n defaultValue: () => ({\n sampleEveryN: 32,\n topK: 3,\n }),\n },\n)\n\nexport const traitConvergeDiagnosticsSampling = (\n config: TraitConvergeDiagnosticsSamplingConfig,\n): Layer.Layer<any, never, never> =>\n Layer.succeed(currentTraitConvergeDiagnosticsSampling, config) as Layer.Layer<any, never, never>\n\nexport const appendSinks = (sinks: ReadonlyArray<Sink>): Layer.Layer<any, never, never> =>\n Layer.effect(\n currentDebugSinks,\n Effect.gen(function* () {\n const current = yield* Effect.service(currentDebugSinks)\n return [...current, ...sinks]\n }),\n ) as Layer.Layer<any, never, never>\n\nexport type RuntimeDebugEventKind =\n | 'action'\n | 'state'\n | 'service'\n | 'process'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'lifecycle'\n | 'react-render'\n | 'devtools'\n | 'diagnostic'\n | (string & {})\n\nexport interface RuntimeDebugEventRef {\n readonly eventId: string\n readonly eventSeq: number\n readonly moduleId: string\n readonly instanceId: string\n readonly runtimeLabel?: string\n readonly txnSeq: number\n readonly txnId?: string\n /**\n * linkId:\n * - Current operation chain id (shared by boundary ops in the same chain).\n * - Created by Runtime at the boundary root and propagated via FiberRef across nested/cross-module chains.\n */\n readonly linkId?: string\n readonly timestamp: number\n readonly kind: RuntimeDebugEventKind\n readonly label: string\n readonly meta?: JsonValue\n readonly errorSummary?: SerializableErrorSummary\n readonly downgrade?: {\n readonly reason?: 'non_serializable' | 'oversized' | 'budget_exceeded' | 'unknown'\n }\n}\n\nexport type TxnLaneEvidenceReason =\n | 'disabled'\n | 'forced_off'\n | 'forced_sync'\n | 'queued_non_urgent'\n | 'preempted_by_urgent'\n | 'budget_yield'\n | 'coalesced'\n | 'canceled'\n | 'max_lag_forced'\n | 'starvation_protection'\n\nexport type TxnLaneNonUrgentYieldReason = 'none' | 'input_pending' | 'budget_exceeded' | 'forced_frame_yield'\n\nexport type TxnLaneEvidence = {\n readonly anchor: {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly opSeq?: number\n }\n readonly lane: 'urgent' | 'nonUrgent'\n readonly kind: string\n readonly policy: {\n readonly enabled: boolean\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n readonly configScope: 'provider' | 'runtime_module' | 'runtime_default' | 'builtin'\n readonly budgetMs: number\n readonly debounceMs: number\n readonly maxLagMs: number\n readonly allowCoalesce: boolean\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n readonly queueMode?: 'fifo' | 'lanes'\n }\n readonly backlog: {\n readonly pendingCount: number\n readonly ageMs?: number\n readonly coalescedCount?: number\n readonly canceledCount?: number\n }\n readonly budget?: {\n readonly budgetMs?: number\n readonly sliceDurationMs?: number\n readonly yieldCount?: number\n readonly yielded?: boolean\n readonly yieldReason?: TxnLaneNonUrgentYieldReason\n }\n readonly starvation?: {\n readonly triggered?: boolean\n readonly reason?: string\n }\n readonly reasons: ReadonlyArray<TxnLaneEvidenceReason>\n}\n\nlet nextGlobalEventSeq = 0\n\nexport const clearRuntimeDebugEventSeq = (): void => {\n nextGlobalEventSeq = 0\n}\n\nconst nextEventSeq = (): number => {\n nextGlobalEventSeq += 1\n return nextGlobalEventSeq\n}\n\nconst makeEventId = (instanceId: string, eventSeq: number): string => `${instanceId}::e${eventSeq}`\n\ntype DowngradeReason = JsonDowngradeReason | ErrorDowngradeReason | 'budget_exceeded'\n\nconst mergeDowngrade = (\n current: DowngradeReason | undefined,\n next: DowngradeReason | undefined,\n): DowngradeReason | undefined => {\n if (!current) return next\n if (!next) return current\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n if (current === 'budget_exceeded' || next === 'budget_exceeded') return 'budget_exceeded'\n return 'unknown'\n}\n\nconst stripDirtyRootPaths = (value: unknown): unknown => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n const { rootPaths, ...rest } = value as Record<string, unknown> & { readonly rootPaths?: unknown }\n return rest\n}\n\nconst stripTraitConvergeLegacyFields = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const { dirtyRoots, ...rest } = anyValue\n const dirty = (rest as any).dirty\n if (!dirty || typeof dirty !== 'object' || Array.isArray(dirty)) {\n return rest as JsonValue\n }\n\n return {\n ...rest,\n dirty: stripDirtyRootPaths(dirty),\n } as JsonValue\n}\n\nconst stripTraitConvergeLight = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n ...(Array.isArray((dirty as any).rootIds) ? { rootIds: (dirty as any).rootIds } : null),\n ...(typeof (dirty as any).rootIdsTruncated === 'boolean'\n ? { rootIdsTruncated: (dirty as any).rootIdsTruncated }\n : null),\n }\n : undefined\n\n const { top3, dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n}\n\nconst stripTraitConvergeSampled = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n }\n : undefined\n\n const { dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n}\n\nconst stripTraitCheckLight = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const degraded = anyValue.degraded\n const degradedSlim =\n degraded && typeof degraded === 'object' && !Array.isArray(degraded) ? { kind: (degraded as any).kind } : undefined\n const summary = anyValue.summary\n let summarySlim: Record<string, number> | undefined\n if (summary && typeof summary === 'object' && !Array.isArray(summary)) {\n const candidate: Record<string, number> = {}\n let hasSummaryField = false\n if (typeof (summary as any).scannedRows === 'number') {\n candidate.scannedRows = (summary as any).scannedRows\n hasSummaryField = true\n }\n if (typeof (summary as any).affectedRows === 'number') {\n candidate.affectedRows = (summary as any).affectedRows\n hasSummaryField = true\n }\n if (typeof (summary as any).changedRows === 'number') {\n candidate.changedRows = (summary as any).changedRows\n hasSummaryField = true\n }\n if (hasSummaryField) {\n summarySlim = candidate\n }\n }\n\n const slim: Record<string, unknown> = {}\n if (typeof anyValue.ruleId === 'string') slim.ruleId = anyValue.ruleId\n if (Array.isArray(anyValue.scopeFieldPath)) slim.scopeFieldPath = anyValue.scopeFieldPath\n if (typeof anyValue.mode === 'string') slim.mode = anyValue.mode\n if (anyValue.trigger && typeof anyValue.trigger === 'object' && !Array.isArray(anyValue.trigger)) {\n slim.trigger = anyValue.trigger\n }\n if (typeof anyValue.rowIdMode === 'string') slim.rowIdMode = anyValue.rowIdMode\n if (summarySlim) slim.summary = summarySlim\n if (degradedSlim) slim.degraded = degradedSlim\n\n return slim as JsonValue\n}\n\n// In browsers, to reduce duplicated noise caused by React StrictMode, etc.,\n// de-duplicate lifecycle:error and diagnostic events: print the same moduleId+payload only once.\nconst browserLifecycleSeen = new Set<string>()\nconst browserDiagnosticSeen = new Set<string>()\n\n// Align trace:react-render events with the most recent state:update txn (UI-only association).\nconst lastTxnByInstance = new Map<string, { readonly txnId: string; readonly txnSeq: number }>()\n\n// trace:react-render / trace:react-selector may enter the sink before state:update (reordering due to concurrency/scheduling).\n// To provide usable txn anchors in Devtools/UI, we allow a one-time backfill for refs missing txn fields.\nconst pendingTxnAlignmentByInstance = new Map<string, Array<RuntimeDebugEventRef>>()\n\nconst enqueuePendingTxnAlignment = (instanceId: string, ref: RuntimeDebugEventRef): void => {\n const list = pendingTxnAlignmentByInstance.get(instanceId)\n if (!list) {\n pendingTxnAlignmentByInstance.set(instanceId, [ref])\n return\n }\n list.push(ref)\n if (list.length > 64) {\n list.shift()\n }\n}\n\nconst backfillPendingTxnAlignment = (\n instanceId: string,\n txn: { readonly txnId: string; readonly txnSeq: number },\n): void => {\n const pending = pendingTxnAlignmentByInstance.get(instanceId)\n if (!pending || pending.length === 0) {\n pendingTxnAlignmentByInstance.delete(instanceId)\n return\n }\n\n for (const ref of pending) {\n const anyRef: any = ref as any\n if (anyRef.txnId == null) {\n anyRef.txnId = txn.txnId\n }\n if (typeof anyRef.txnSeq !== 'number' || anyRef.txnSeq <= 0) {\n anyRef.txnSeq = txn.txnSeq\n }\n }\n\n pendingTxnAlignmentByInstance.delete(instanceId)\n}\n\nconst lifecycleErrorLog = (event: Extract<Event, { readonly type: 'lifecycle:error' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>)\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const message = `[Logix][module=${moduleId}] lifecycle:error\\n${causePretty}`\n\n return Effect.logError(message).pipe(\n Effect.annotateLogs({\n 'logix.moduleId': moduleId,\n 'logix.event': 'lifecycle:error',\n 'logix.cause': causePretty,\n }),\n )\n}\n\nconst diagnosticLog = (event: Extract<Event, { readonly type: 'diagnostic' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const header = `[Logix][module=${moduleId}] diagnostic(${event.severity})`\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n const msg = `${header}\\n${detail}`\n\n const base =\n event.severity === 'warning'\n ? Effect.logWarning(msg)\n : event.severity === 'info'\n ? Effect.logInfo(msg)\n : Effect.logError(msg)\n\n const annotations: Record<string, unknown> = {\n 'logix.moduleId': moduleId,\n 'logix.event': `diagnostic(${event.severity})`,\n 'logix.diagnostic.code': event.code,\n 'logix.diagnostic.message': event.message,\n }\n if (event.hint) {\n annotations['logix.diagnostic.hint'] = event.hint\n }\n if (event.actionTag) {\n annotations['logix.diagnostic.actionTag'] = event.actionTag\n }\n\n return base.pipe(Effect.annotateLogs(annotations))\n}\n\n/**\n * Default Layer composition based on FiberRef.currentDebugSinks:\n * - Uses Layer.locallyScoped to inject Debug sinks via FiberRef state.\n * - Avoids misusing FiberRef as a Context.Tag.\n */\nexport const noopLayer = Layer.succeed(currentDebugSinks, [])\n\n/**\n * errorOnlyLayer:\n * - Default DebugSink implementation that only cares about lifecycle:error events.\n * - Suitable as a \"minimum observability\" layer so fatal errors don't silently disappear.\n * - Other events (module:init/destroy, action:dispatch, state:update) are not recorded by default.\n */\nconst errorOnlySink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void,\n}\n\nexport const errorOnlyLayer = Layer.succeed(currentDebugSinks, [errorOnlySink])\n\nexport const isErrorOnlyOnlySinks = (sinks: ReadonlyArray<Sink>): boolean => sinks.length === 1 && sinks[0] === errorOnlySink\n\n/**\n * consoleLayer:\n * - Full debug layer that logs all Debug events via Effect logs (logfmt / structured).\n * - Suitable as an observability layer for general environments (Node / tests).\n */\nconst consoleSink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event }),\n}\n\nexport const consoleLayer = Layer.succeed(currentDebugSinks, [consoleSink])\n\nconst isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined'\n\n// Shared browser console rendering logic used by the default DebugSink and browserConsoleLayer.\nconst renderBrowserConsoleEvent = (event: Event): Effect.Effect<void> => {\n // trace:* events: shown as separate groups in browsers for Playground / DevTools observation.\n if (typeof (event as any).type === 'string' && (event as any).type.startsWith('trace:')) {\n const moduleId = (event as any).moduleId ?? 'unknown'\n const type = (event as any).type\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c trace %c' + moduleId + '%c ' + String(type),\n 'color:#6b7280;font-weight:bold', // tag\n 'color:#3b82f6', // label\n 'color:#9ca3af', // module id\n 'color:#6b7280', // type\n )\n // eslint-disable-next-line no-console\n console.log(event)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'lifecycle:error') {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>)\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const key = `${moduleId}|${causePretty}`\n if (browserLifecycleSeen.has(key)) {\n return Effect.void\n }\n browserLifecycleSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c lifecycle:error %c' + moduleId,\n 'color:#ef4444;font-weight:bold', // tag\n 'color:#ef4444', // label\n 'color:#9ca3af', // module id\n )\n // eslint-disable-next-line no-console\n console.error(causePretty)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'diagnostic') {\n const moduleId = event.moduleId ?? 'unknown'\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n\n const color =\n event.severity === 'warning' ? 'color:#d97706' : event.severity === 'info' ? 'color:#3b82f6' : 'color:#ef4444'\n\n const label =\n event.severity === 'warning'\n ? 'diagnostic(warning)'\n : event.severity === 'info'\n ? 'diagnostic(info)'\n : 'diagnostic(error)'\n\n const key = `${moduleId}|${event.code}|${event.message}`\n if (browserDiagnosticSeen.has(key)) {\n return Effect.void\n }\n browserDiagnosticSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c ' + label + '%c module=' + moduleId,\n 'color:#6b7280;font-weight:bold',\n color,\n 'color:#9ca3af',\n )\n if (event.severity === 'warning') {\n // eslint-disable-next-line no-console\n console.warn(detail)\n } else if (event.severity === 'info') {\n // eslint-disable-next-line no-console\n console.info(detail)\n } else {\n // eslint-disable-next-line no-console\n console.error(detail)\n }\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n // Other events are not printed to the browser console by default to avoid being too noisy during development.\n // For internal debug events, use a custom Debug sink or use consoleLayer in Node.\n return Effect.void\n}\n\n/**\n * Browser console debug layer:\n * - In browsers, uses console.groupCollapsed + colored labels to simulate pretty logger grouping.\n * - In non-browser environments, falls back to consoleLayer's Effect logging implementation.\n */\nconst browserConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n // Non-browser: fall back to consoleLayer behavior (Effect.log*).\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event })\n }\n\n return renderBrowserConsoleEvent(event)\n },\n}\n\nexport const browserConsoleLayer = Layer.succeed(currentDebugSinks, [browserConsoleSink])\n\n/**\n * Browser diagnostic-only debug layer:\n * - In browsers, prints only lifecycle:error + diagnostic(warning/error) via console.groupCollapsed.\n * - Drops trace:* and other high-frequency events from the browser console (use DevtoolsHub instead).\n * - In non-browser environments, behaves like errorOnlySink (Effect.log*).\n */\nconst browserDiagnosticConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void\n }\n\n return event.type === 'lifecycle:error' || (event.type === 'diagnostic' && event.severity !== 'info')\n ? renderBrowserConsoleEvent(event)\n : Effect.void\n },\n}\n\nexport const browserDiagnosticConsoleLayer = Layer.succeed(currentDebugSinks, [browserDiagnosticConsoleSink])\n\n/**\n * Browser-friendly Logger layer: replaces the default logger with Effect's pretty logger (browser mode).\n * - Avoids hand-written console styles; reuses Effect's colored/grouped formatting.\n * - Safely degrades to the default logger in server environments.\n */\nexport const browserPrettyLoggerLayer = Layer.effect(\n Logger.CurrentLoggers,\n Effect.gen(function* () {\n const current = yield* Effect.service(Logger.CurrentLoggers)\n return new Set(\n [...current].filter((logger) => logger !== Logger.defaultLogger).concat(Logger.consolePretty({ mode: 'browser', colors: true })),\n )\n }),\n)\n\n/**\n * defaultLayer:\n * - Public default layer; currently equivalent to errorOnlyLayer.\n * - Records lifecycle:error only, avoiding a large volume of action/state logs by default.\n */\nexport const defaultLayer = errorOnlyLayer\n\nexport const record = (event: Event) =>\n Effect.gen(function* () {\n const sinks = yield* Effect.service(currentDebugSinks)\n\n // Fast path: production default installs errorOnlyLayer (sinks=1).\n // Avoid paying diagnostics FiberRef + enrichment costs for high-frequency events that are always dropped by errorOnly.\n if (isErrorOnlyOnlySinks(sinks)) {\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic' && event.severity !== 'info') {\n yield* diagnosticLog(event)\n }\n return\n }\n\n // Fast path: when no sinks are installed, only a small subset of events are ever surfaced.\n // Avoid paying per-event FiberRef + enrichment costs for high-frequency events like state:update.\n if (sinks.length === 0) {\n if (isBrowser) {\n if (event.type === 'lifecycle:error' || event.type === 'diagnostic') {\n yield* renderBrowserConsoleEvent(event)\n }\n return\n }\n\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n yield* diagnosticLog(event)\n }\n return\n }\n\n // Trace events are performance-sensitive and should be explicitly enabled.\n // Keep the check scoped to trace:* only so non-trace events stay on the fast path.\n if (typeof event.type === 'string' && event.type.startsWith('trace:')) {\n const mode = yield* Effect.service(currentTraceMode)\n if (mode === 'off') return\n }\n\n const enriched = event as Event\n\n const diagnosticsLevel = yield* Effect.service(currentDiagnosticsLevel)\n\n // Enrich Debug.Event with basic fields (enabled only when diagnosticsLevel!=off):\n // - timestamp: for Devtools/Timeline/Overview time aggregation; avoids UI-side \"first observed time\" distortion.\n // - runtimeLabel: from FiberRef for grouping by runtime (injected only when not already provided by the event).\n let now: number | undefined\n const getNow = (): number => {\n if (now === undefined) now = Date.now()\n return now\n }\n\n // diagnostics=off: keep near-zero cost; do not add timestamp for high-frequency events (avoid extra Date.now()).\n // Low-frequency events (lifecycle:error/diagnostic) may still get timestamp for easier debugging.\n if (\n enriched.timestamp === undefined &&\n (diagnosticsLevel !== 'off' || enriched.type === 'lifecycle:error' || enriched.type === 'diagnostic')\n ) {\n ;(enriched as any).timestamp = getNow()\n }\n if (diagnosticsLevel !== 'off' && enriched.runtimeLabel === undefined) {\n const runtimeLabel = yield* Effect.service(currentRuntimeLabel)\n if (runtimeLabel) {\n ;(enriched as any).runtimeLabel = runtimeLabel\n }\n }\n\n if (enriched.type === 'diagnostic' && (enriched as any).txnId === undefined) {\n const txnId = yield* Effect.service(currentTxnId)\n if (txnId) {\n ;(enriched as any).txnId = txnId\n }\n }\n\n if (\n diagnosticsLevel !== 'off' &&\n (enriched as any).type === 'trace:effectop' &&\n (enriched as any).linkId === undefined\n ) {\n const maybeLinkId = yield* Effect.serviceOption(EffectOpCore.currentLinkId)\n if (Option.isSome(maybeLinkId) && maybeLinkId.value) {\n ;(enriched as any).linkId = maybeLinkId.value\n }\n }\n\n if (sinks.length === 1) {\n yield* sinks[0]!.record(enriched)\n return\n }\n\n yield* Effect.forEach(sinks as ReadonlyArray<Sink>, (sink) => sink.record(enriched), { discard: true })\n })\n\n/**\n * Normalizes internal Debug.Event into RuntimeDebugEventRef:\n * - Allows Devtools / Runtime to consume Debug events uniformly.\n * - Does not change DebugSink behavior; provides a structured view only.\n */\nexport const toRuntimeDebugEventRef = (\n event: Event,\n options?: {\n readonly diagnosticsLevel?: DiagnosticsLevel\n readonly materialization?: DiagnosticsMaterialization\n readonly eventSeq?: number\n readonly onMetaProjection?: (projection: {\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: JsonDowngradeReason\n }) => void\n },\n): RuntimeDebugEventRef | undefined => {\n const diagnosticsLevel = options?.diagnosticsLevel ?? 'full'\n if (diagnosticsLevel === 'off') {\n return undefined\n }\n\n const isLightLike = diagnosticsLevel === 'light' || diagnosticsLevel === 'sampled'\n const materialization = options?.materialization ?? 'eager'\n const isLazyMaterialization = materialization === 'lazy'\n\n const timestamp =\n typeof event.timestamp === 'number' && Number.isFinite(event.timestamp) ? event.timestamp : Date.now()\n\n const moduleIdRaw = (event as any).moduleId\n const moduleId = typeof moduleIdRaw === 'string' && moduleIdRaw.length > 0 ? moduleIdRaw : 'unknown'\n\n const instanceIdRaw = (event as any).instanceId\n const instanceId = typeof instanceIdRaw === 'string' && instanceIdRaw.length > 0 ? instanceIdRaw : 'unknown'\n\n const runtimeLabelRaw = (event as any).runtimeLabel\n const runtimeLabel = typeof runtimeLabelRaw === 'string' && runtimeLabelRaw.length > 0 ? runtimeLabelRaw : undefined\n\n const txnSeqRaw = (event as any).txnSeq\n const txnSeq =\n typeof txnSeqRaw === 'number' && Number.isFinite(txnSeqRaw) && txnSeqRaw >= 0 ? Math.floor(txnSeqRaw) : 0\n\n const txnIdRaw = (event as any).txnId\n const txnId =\n typeof txnIdRaw === 'string' && txnIdRaw.length > 0\n ? txnIdRaw\n : txnSeq > 0\n ? `${instanceId}::t${txnSeq}`\n : undefined\n\n const linkId = (() => {\n const linkIdRaw = (event as any).linkId\n if (typeof linkIdRaw === 'string' && linkIdRaw.length > 0) return linkIdRaw\n\n // trace:*: allow fallback extraction from data.meta.linkId (avoid UI diving into deep meta).\n if (typeof (event as any).type !== 'string' || !(event as any).type.startsWith('trace:')) {\n return undefined\n }\n\n const data: any = (event as any).data\n const meta: any = data?.meta\n const linkIdFromMeta = meta?.linkId\n if (typeof linkIdFromMeta === 'string' && linkIdFromMeta.length > 0) return linkIdFromMeta\n\n return undefined\n })()\n\n const eventSeqRaw = options?.eventSeq\n const eventSeq =\n typeof eventSeqRaw === 'number' && Number.isFinite(eventSeqRaw) && eventSeqRaw > 0\n ? Math.floor(eventSeqRaw)\n : nextEventSeq()\n const eventId = makeEventId(instanceId, eventSeq)\n\n const base = {\n eventId,\n eventSeq,\n moduleId,\n instanceId,\n runtimeLabel,\n txnSeq,\n txnId,\n linkId,\n timestamp,\n } as const\n\n let downgrade: DowngradeReason | undefined\n\n const withDowngrade = (ref: Omit<RuntimeDebugEventRef, 'downgrade'>): RuntimeDebugEventRef => {\n if (!downgrade) return ref\n return { ...ref, downgrade: { reason: downgrade } }\n }\n\n switch (event.type) {\n case 'module:init':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:init',\n })\n case 'module:destroy':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:destroy',\n })\n case 'lifecycle:phase': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:phase' }>\n const metaInput = isLightLike\n ? { type: 'lifecycle:phase', phase: e.phase, name: e.name }\n : { type: 'lifecycle:phase', phase: e.phase, name: e.name, payload: e.payload }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: e.name,\n meta: metaProjection.value,\n })\n }\n case 'action:dispatch': {\n const action: any = (event as any).action\n const actionTagRaw = (event as any).actionTag\n const tag = typeof actionTagRaw === 'string' && actionTagRaw.length > 0 ? actionTagRaw : (action?._tag ?? action?.type)\n const label = String(tag ?? 'action:dispatch')\n const labelNormalized = label.length > 0 ? label : 'unknown'\n const unknownAction = (event as any).unknownAction === true ? true : undefined\n const metaInput = isLightLike\n ? { actionTag: labelNormalized, ...(unknownAction ? { unknownAction: true } : {}) }\n : { action, ...(unknownAction ? { unknownAction: true } : {}) }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (unknownAction) {\n downgrade = mergeDowngrade(downgrade, 'unknown')\n }\n return withDowngrade({\n ...base,\n kind: 'action',\n label: labelNormalized,\n meta: metaProjection.value,\n })\n }\n case 'state:update': {\n const e = event as Extract<Event, { readonly type: 'state:update' }>\n const dirtySetCanonical = stripDirtyRootPaths(e.dirtySet)\n const slimMetaInput: Record<string, unknown> = {}\n if (dirtySetCanonical !== undefined) slimMetaInput.dirtySet = dirtySetCanonical\n if (e.patchCount !== undefined) slimMetaInput.patchCount = e.patchCount\n if (e.patchesTruncated !== undefined) slimMetaInput.patchesTruncated = e.patchesTruncated\n if (e.patchesTruncatedReason !== undefined) slimMetaInput.patchesTruncatedReason = e.patchesTruncatedReason\n if (e.staticIrDigest !== undefined) slimMetaInput.staticIrDigest = e.staticIrDigest\n if (e.commitMode !== undefined) slimMetaInput.commitMode = e.commitMode\n if (e.priority !== undefined) slimMetaInput.priority = e.priority\n if (e.originKind !== undefined) slimMetaInput.originKind = e.originKind\n if (e.originName !== undefined) slimMetaInput.originName = e.originName\n\n const metaInput = isLightLike\n ? isLazyMaterialization\n ? slimMetaInput\n : { state: e.state, ...slimMetaInput }\n : isLazyMaterialization\n ? slimMetaInput\n : { state: e.state, ...slimMetaInput, traitSummary: e.traitSummary, replayEvent: e.replayEvent }\n\n const metaProjection = isLazyMaterialization\n ? {\n value: metaInput as unknown as JsonValue,\n stats: { dropped: 0, oversized: 0, nonSerializable: 0 } satisfies JsonValueProjectionStats,\n downgrade: undefined,\n }\n : projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (txnId) {\n lastTxnByInstance.set(instanceId, { txnId, txnSeq })\n backfillPendingTxnAlignment(instanceId, { txnId, txnSeq })\n }\n return withDowngrade({\n ...base,\n kind: 'state',\n label: 'state:update',\n meta: metaProjection.value,\n })\n }\n case 'process:start':\n case 'process:stop':\n case 'process:restart':\n case 'process:trigger':\n case 'process:dispatch':\n case 'process:error': {\n const e = event as Extract<\n Event,\n {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n }\n >\n\n const ts2 = typeof e.timestampMs === 'number' && Number.isFinite(e.timestampMs) ? e.timestampMs : timestamp\n\n const metaInput = {\n identity: e.identity,\n severity: e.severity,\n eventSeq: e.eventSeq,\n timestampMs: e.timestampMs,\n trigger: e.trigger,\n dispatch: e.dispatch,\n error: e.error,\n budgetEnvelope: (e as any).budgetEnvelope,\n degrade: (e as any).degrade,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const processDegradeReason = (() => {\n const marker = (e as any).degrade\n if (!marker || typeof marker !== 'object' || Array.isArray(marker)) return undefined\n if ((marker as any).degraded !== true) return undefined\n const reason = (marker as any).reason\n if (reason === 'budget_exceeded') return 'budget_exceeded' as const\n if (reason === 'payload_oversized') return 'oversized' as const\n if (reason === 'payload_non_serializable') return 'non_serializable' as const\n return 'unknown' as const\n })()\n downgrade = mergeDowngrade(downgrade, processDegradeReason)\n\n const errorSummary =\n e.type === 'process:error' || e.type === 'process:restart'\n ? (e.error as any as SerializableErrorSummary | undefined)\n : undefined\n\n return withDowngrade({\n ...base,\n timestamp: ts2,\n kind: 'process',\n label: e.type,\n meta: metaProjection.value,\n errorSummary,\n })\n }\n case 'lifecycle:error': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:error' }>\n const summary = toSerializableErrorSummary(e.cause)\n downgrade = mergeDowngrade(downgrade, summary.downgrade)\n const metaInput = isLightLike\n ? { type: 'lifecycle:error', phase: e.phase, name: e.hook }\n : {\n type: 'lifecycle:error',\n phase: e.phase,\n name: e.hook,\n hook: e.hook,\n taskId: e.taskId,\n origin: e.origin,\n txnSeq: e.txnSeq,\n opSeq: e.opSeq,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'lifecycle:error',\n meta: metaProjection.value,\n errorSummary: summary.errorSummary,\n })\n }\n case 'diagnostic': {\n const e = event as Extract<Event, { readonly type: 'diagnostic' }>\n const metaInput = {\n code: e.code,\n severity: e.severity,\n message: e.message,\n hint: e.hint,\n actionTag: e.actionTag,\n kind: e.kind,\n opSeq: e.opSeq,\n trigger: e.trigger,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.code,\n meta: metaProjection.value,\n })\n }\n case 'warn:priority-inversion': {\n const e = event as Extract<Event, { readonly type: 'warn:priority-inversion' }>\n const metaInput = isLightLike\n ? {\n tickSeq: e.tickSeq,\n reason: e.reason,\n selectorId: e.selectorId,\n }\n : {\n tickSeq: e.tickSeq,\n reason: e.reason,\n selectorId: e.selectorId,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.type,\n meta: metaProjection.value,\n })\n }\n case 'warn:microtask-starvation': {\n const e = event as Extract<Event, { readonly type: 'warn:microtask-starvation' }>\n const metaInput = isLightLike\n ? {\n tickSeq: e.tickSeq,\n microtaskChainDepth: e.microtaskChainDepth,\n }\n : {\n tickSeq: e.tickSeq,\n microtaskChainDepth: e.microtaskChainDepth,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.type,\n meta: metaProjection.value,\n })\n }\n default: {\n if (typeof event.type !== 'string' || !event.type.startsWith('trace:')) {\n return undefined\n }\n\n // trace:devtools:ring-trim-policy: emit a stable slim payload for ring trim policy changes.\n if (event.type === 'trace:devtools:ring-trim-policy') {\n const data = (event as {\n readonly data?: {\n readonly mode?: unknown\n readonly threshold?: unknown\n readonly bufferSize?: unknown\n }\n }).data\n const metaInput = {\n mode: data?.mode,\n threshold: data?.threshold,\n bufferSize: data?.bufferSize,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:tick: runtime tick evidence; keep Slim payload even in light tier.\n if (event.type === 'trace:tick') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n tickSeq: data?.tickSeq,\n phase: data?.phase,\n schedule: data?.schedule,\n triggerSummary: data?.triggerSummary,\n anchors: data?.anchors,\n budget: data?.budget,\n backlog: data?.backlog,\n result: data?.result,\n }\n : {\n data,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:txn-lane: slim evidence for Txn Lanes (lane/backlog/reasons), used for Devtools summary and offline export.\n if (event.type === 'trace:txn-lane') {\n const data: any = (event as any).data\n const evidence = data?.evidence ?? data\n\n const metaProjection = projectJsonValue(evidence)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const label =\n typeof evidence?.kind === 'string' && evidence.kind.length > 0 ? String(evidence.kind) : 'txn-lane'\n\n return withDowngrade({\n ...base,\n kind: 'txn-lane',\n label,\n meta: metaProjection.value,\n })\n }\n\n if (event.type === 'trace:txn-phase') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n kind: data?.kind,\n originKind: data?.originKind,\n originName: data?.originName,\n commitMode: data?.commitMode,\n priority: data?.priority,\n txnPreludeMs: data?.txnPreludeMs,\n queue: data?.queue\n ? {\n lane: data.queue.lane,\n contextLookupMs: data.queue.contextLookupMs,\n resolvePolicyMs: data.queue.resolvePolicyMs,\n backpressureMs: data.queue.backpressureMs,\n enqueueBookkeepingMs: data.queue.enqueueBookkeepingMs,\n queueWaitMs: data.queue.queueWaitMs,\n startHandoffMs: data.queue.startHandoffMs,\n startMode: data.queue.startMode,\n }\n : undefined,\n dispatchActionRecordMs: data?.dispatchActionRecordMs,\n dispatchActionCommitHubMs: data?.dispatchActionCommitHubMs,\n dispatchActionCount: data?.dispatchActionCount,\n bodyShellMs: data?.bodyShellMs,\n asyncEscapeGuardMs: data?.asyncEscapeGuardMs,\n traitConvergeMs: data?.traitConvergeMs,\n scopedValidateMs: data?.scopedValidateMs,\n sourceSyncMs: data?.sourceSyncMs,\n commit: data?.commit\n ? {\n totalMs: data.commit.totalMs,\n rowIdSyncMs: data.commit.rowIdSyncMs,\n publishCommitMs: data.commit.publishCommitMs,\n stateUpdateDebugRecordMs: data.commit.stateUpdateDebugRecordMs,\n onCommitBeforeStateUpdateMs: data.commit.onCommitBeforeStateUpdateMs,\n onCommitAfterStateUpdateMs: data.commit.onCommitAfterStateUpdateMs,\n }\n : undefined,\n }\n : data\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:react-render / trace:react-selector: keep slim meta only (field trimming is handled by JsonValue projection).\n if (event.type === 'trace:react-render' || event.type === 'trace:react-selector') {\n const data: any = (event as any).data\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n }\n : {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n meta: data?.meta,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n const label =\n typeof data?.componentLabel === 'string' && data.componentLabel.length > 0\n ? data.componentLabel\n : event.type === 'trace:react-selector'\n ? 'react-selector'\n : 'react-render'\n const last = lastTxnByInstance.get(instanceId)\n const txnSeqFromMeta =\n typeof data?.meta?.txnSeq === 'number' && Number.isFinite(data.meta.txnSeq) && data.meta.txnSeq >= 0\n ? Math.floor(data.meta.txnSeq)\n : undefined\n const txnIdFromMeta =\n typeof data?.meta?.txnId === 'string' && data.meta.txnId.length > 0 ? data.meta.txnId : undefined\n const txnIdAligned = txnIdFromMeta ?? base.txnId ?? last?.txnId\n const txnSeqAligned = txnSeqFromMeta ?? (base.txnSeq > 0 ? base.txnSeq : (last?.txnSeq ?? base.txnSeq))\n const ref = withDowngrade({\n ...base,\n txnId: txnIdAligned,\n txnSeq: txnSeqAligned,\n kind: event.type === 'trace:react-selector' ? 'react-selector' : 'react-render',\n label,\n meta: metaProjection.value,\n })\n\n if (instanceId !== 'unknown' && (ref.txnId == null || ref.txnSeq <= 0)) {\n enqueuePendingTxnAlignment(instanceId, ref)\n }\n\n return ref\n }\n\n // trace:selector:eval: SelectorGraph evaluation evidence within commit (used for txn→selector→render causal chain).\n if (event.type === 'trace:selector:eval') {\n const data: any = (event as any).data\n const metaInput = {\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n changed: data?.changed,\n evalMs: data?.evalMs,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:exec-vm: Exec VM hit/miss evidence (049). In light tier we keep minimal summary fields.\n if (event.type === 'trace:exec-vm') {\n const data: any = (event as any).data\n const metaInput = {\n version: data?.version,\n stage: data?.stage,\n hit: data?.hit,\n reasonCode: data?.reasonCode ?? data?.reason,\n reasonDetail: data?.reasonDetail,\n execIrVersion: data?.execIrVersion,\n execIrHash: data?.execIrHash,\n serviceId: data?.serviceId,\n implId: data?.implId,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:converge: converge evidence must be exportable (JsonValue hard gate) and trims heavy fields in light tier.\n if (event.type === 'trace:trait:converge') {\n const data = (event as Extract<Event, { readonly type: 'trace:trait:converge' }>).data\n const metaInput =\n diagnosticsLevel === 'light'\n ? stripTraitConvergeLight(data)\n : diagnosticsLevel === 'sampled'\n ? stripTraitConvergeSampled(data)\n : stripTraitConvergeLegacyFields(data)\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:converge',\n label: 'trait:converge',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:check: validation diagnostics must be exportable and stay slim in light tier (keep key fields).\n if (event.type === 'trace:trait:check') {\n const data = (event as Extract<Event, { readonly type: 'trace:trait:check' }>).data\n const metaInput = isLightLike ? stripTraitCheckLight(data) : data\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:check',\n label: 'trait:check',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:validate: validation decision summary must be exportable and slim in light tier (no heavy fields by default).\n if (event.type === 'trace:trait:validate') {\n const data = (event as Extract<Event, { readonly type: 'trace:trait:validate' }>).data\n const metaProjection = projectJsonValue(data)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:validate',\n label: 'trait:validate',\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits: final traits snapshot must be exportable and slim in light tier (digest/count).\n if (event.type === 'trace:module:traits') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n digest: data?.digest,\n count: data?.count,\n }\n : {\n digest: data?.digest,\n count: data?.count,\n traits: data?.traits,\n provenanceIndex: data?.provenanceIndex,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits:conflict: conflict details must be exportable; avoid relying on truncated lifecycle:error messages.\n if (event.type === 'trace:module:traits:conflict') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n conflictCount: data?.conflictCount,\n traitIds: data?.traitIds,\n }\n : {\n conflictCount: data?.conflictCount,\n conflicts: data?.conflicts,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:descriptor: keep key anchors even in light tier (avoid data being fully trimmed).\n if (event.type === 'trace:module:descriptor') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n id: data?.id,\n traits: data?.traits,\n source: data?.source,\n }\n : { data }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:effectop: keep slim op meta and prefer EffectOp.meta.moduleId when present.\n if (event.type === 'trace:effectop') {\n const data: any = (event as any).data\n const opMeta: any = data?.meta\n const opKind = (data?.kind ?? 'service') as RuntimeDebugEventKind\n const label = typeof data?.name === 'string' ? data.name : 'effectop'\n const moduleId2 = typeof opMeta?.moduleId === 'string' ? opMeta.moduleId : moduleId\n const txnId2 = typeof opMeta?.txnId === 'string' && opMeta.txnId.length > 0 ? opMeta.txnId : base.txnId\n const txnSeq2 =\n typeof opMeta?.txnSeq === 'number' && Number.isFinite(opMeta.txnSeq) && opMeta.txnSeq >= 0\n ? Math.floor(opMeta.txnSeq)\n : base.txnSeq\n\n const metaInput = isLightLike\n ? {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n meta: opMeta,\n }\n : {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n payload: data?.payload,\n meta: opMeta,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n moduleId: moduleId2,\n txnId: txnId2,\n txnSeq: txnSeq2,\n kind: opKind,\n label,\n meta: metaProjection.value,\n })\n }\n\n // Other trace:* events: categorize as devtools and trim meta by tier.\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n data: undefined,\n }\n : {\n data: (event as any).data,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n }\n}\n","export type JsonValue =\n | null\n | boolean\n | number\n | string\n | ReadonlyArray<JsonValue>\n | { readonly [key: string]: JsonValue }\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport const isJsonValue = (input: unknown): input is JsonValue => {\n const seen = new WeakSet<object>()\n\n const loop = (value: unknown, depth: number): value is JsonValue => {\n if (depth > 64) return false\n if (value === null) return true\n\n switch (typeof value) {\n case 'string':\n case 'boolean':\n return true\n case 'number':\n return Number.isFinite(value)\n case 'object': {\n if (Array.isArray(value)) {\n if (seen.has(value)) return false\n seen.add(value)\n for (const item of value) {\n if (!loop(item, depth + 1)) return false\n }\n return true\n }\n\n if (!isPlainRecord(value)) return false\n if (seen.has(value)) return false\n seen.add(value)\n\n for (const v of Object.values(value)) {\n if (!loop(v, depth + 1)) return false\n }\n\n return true\n }\n default:\n return false\n }\n }\n\n return loop(input, 0)\n}\n\nexport interface JsonValueProjectionStats {\n readonly dropped: number\n readonly oversized: number\n readonly nonSerializable: number\n}\n\nexport interface JsonValueProjection {\n readonly value: JsonValue\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: DowngradeReason\n}\n\nexport interface JsonValueProjectOptions {\n readonly maxDepth?: number\n readonly maxObjectKeys?: number\n readonly maxArrayLength?: number\n readonly maxStringLength?: number\n readonly maxJsonBytes?: number\n readonly oversizedPreviewBytes?: number\n}\n\nconst defaultOptions: Required<JsonValueProjectOptions> = {\n maxDepth: 6,\n maxObjectKeys: 32,\n maxArrayLength: 32,\n maxStringLength: 256,\n maxJsonBytes: 4 * 1024,\n oversizedPreviewBytes: 256,\n}\n\nconst truncateString = (value: string, maxLen: number, stats: MutableStats): string => {\n if (value.length <= maxLen) return value\n stats.oversized += 1\n return value.slice(0, maxLen)\n}\n\ntype MutableStats = {\n dropped: number\n oversized: number\n nonSerializable: number\n}\n\nconst mergeDowngrade = (current: DowngradeReason | undefined, next: DowngradeReason): DowngradeReason => {\n if (!current) return next\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\nfunction isPlainRecord(value: unknown): value is Record<string, unknown> {\n if (typeof value !== 'object' || value === null) return false\n const proto = Object.getPrototypeOf(value)\n return proto === Object.prototype || proto === null\n}\n\nconst asNumber = (value: number, stats: MutableStats): JsonValue => {\n if (Number.isFinite(value)) return value\n stats.nonSerializable += 1\n return String(value)\n}\n\nconst toJsonValueInternal = (\n input: unknown,\n options: Required<JsonValueProjectOptions>,\n stats: MutableStats,\n seen: WeakSet<object>,\n depth: number,\n): JsonValue => {\n if (input === null) return null\n\n switch (typeof input) {\n case 'string':\n return truncateString(input, options.maxStringLength, stats)\n case 'number':\n return asNumber(input, stats)\n case 'boolean':\n return input\n case 'bigint':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'symbol':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'function':\n stats.nonSerializable += 1\n return '[Function]'\n case 'undefined':\n stats.dropped += 1\n return null\n }\n\n // object\n if (depth >= options.maxDepth) {\n stats.oversized += 1\n return '[Truncated]'\n }\n\n if (input instanceof Date) {\n return input.toISOString()\n }\n\n if (input instanceof Error) {\n stats.nonSerializable += 1\n return {\n name: truncateString(input.name, options.maxStringLength, stats),\n message: truncateString(input.message, options.maxStringLength, stats),\n }\n }\n\n if (typeof input === 'object') {\n if (seen.has(input)) {\n stats.nonSerializable += 1\n return '[Circular]'\n }\n seen.add(input)\n }\n\n if (Array.isArray(input)) {\n const out: Array<JsonValue> = []\n const limit = Math.min(input.length, options.maxArrayLength)\n for (let i = 0; i < limit; i++) {\n out.push(toJsonValueInternal(input[i], options, stats, seen, depth + 1))\n }\n if (input.length > limit) {\n stats.oversized += 1\n out.push(`[...${input.length - limit} more]`)\n }\n return out\n }\n\n if (!isPlainRecord(input)) {\n stats.nonSerializable += 1\n return truncateString(String(input), options.maxStringLength, stats)\n }\n\n const keys = Object.keys(input).sort()\n const limit = Math.min(keys.length, options.maxObjectKeys)\n const out: Record<string, JsonValue> = {}\n\n for (let i = 0; i < limit; i++) {\n const rawKey = keys[i]!\n const rawValue = (input as any)[rawKey]\n const key = truncateString(rawKey, options.maxStringLength, stats)\n if (rawValue === undefined) {\n stats.dropped += 1\n continue\n }\n out[key] = toJsonValueInternal(rawValue, options, stats, seen, depth + 1)\n }\n\n if (keys.length > limit) {\n stats.oversized += 1\n out.__truncatedKeys = keys.length - limit\n }\n\n return out\n}\n\nexport const projectJsonValue = (input: unknown, options?: JsonValueProjectOptions): JsonValueProjection => {\n const resolved: Required<JsonValueProjectOptions> = { ...defaultOptions, ...(options ?? {}) }\n const stats: MutableStats = { dropped: 0, oversized: 0, nonSerializable: 0 }\n const seen = new WeakSet<object>()\n\n let downgrade: DowngradeReason | undefined\n const value = toJsonValueInternal(input, resolved, stats, seen, 0)\n\n if (stats.nonSerializable > 0) {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n }\n if (stats.oversized > 0) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n }\n\n // Hard gate: ensure JSON.stringify never throws and respect the max byte budget.\n try {\n const json = JSON.stringify(value)\n if (json.length > resolved.maxJsonBytes) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n const preview = json.slice(0, Math.min(resolved.oversizedPreviewBytes, resolved.maxJsonBytes))\n return {\n value: {\n _tag: 'oversized',\n bytes: json.length,\n preview,\n },\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized + 1,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n }\n } catch {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n return {\n value: '[Unserializable]',\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable + 1,\n },\n downgrade,\n }\n }\n\n return {\n value,\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n}\n","// EffectOp core model and middleware composition logic.\n// For higher-level Runtime / Devtools integration, see:\n// specs/000-module-traits-runtime/references/effectop-and-middleware.md\n\nimport { Effect, ServiceMap } from 'effect'\n\nexport const currentLinkId = ServiceMap.Reference<string | undefined>('@logixjs/core/CurrentLinkId', {\n defaultValue: () => undefined,\n})\n\n/**\n * OperationPolicy:\n * - Local policy markers (intent only; no rule logic attached).\n *\n * Constraints (enforced by Runtime/middleware together):\n * - Only observation-only capabilities (Observer) may be disabled; global guards must not be disabled.\n */\nexport interface OperationPolicy {\n readonly disableObservers?: boolean\n}\n\n/**\n * OperationRejected:\n * - Unified failure result when a guard rejects execution.\n * - Semantics: explicit failure with no business side effects (rejection must happen before user code executes).\n */\nexport interface OperationRejected {\n readonly _tag: 'OperationRejected'\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}\n\n/**\n * OperationError:\n * - Any boundary operation executed via EffectOp may be explicitly rejected by Guard middleware.\n * - Therefore, the middleware error channel must allow OperationRejected to be added.\n */\nexport type OperationError<E> = E | OperationRejected\n\nexport const makeOperationRejected = (params: {\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}): OperationRejected => ({\n _tag: 'OperationRejected',\n message: params.message,\n kind: params.kind,\n name: params.name,\n linkId: params.linkId,\n details: params.details,\n})\n\n/**\n * EffectOp: a unified representation of an Effect execution at an \"observable boundary\".\n *\n * - Out / Err / Env are the generic parameters of the underlying Effect.\n * - meta carries structured context needed by Devtools / Middleware.\n */\nexport interface EffectOp<Out = unknown, Err = unknown, Env = unknown> {\n readonly id: string\n readonly kind:\n | 'action'\n | 'flow'\n | 'state'\n | 'service'\n | 'lifecycle'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'devtools'\n readonly name: string\n readonly payload?: unknown\n readonly meta?: {\n /**\n * linkId:\n * - Operation chain id: multiple boundary ops in the same chain must share it.\n * - Runtime ensures this field is populated on all boundary ops.\n */\n linkId?: string\n moduleId?: string\n instanceId?: string\n runtimeLabel?: string\n txnId?: string\n txnSeq?: number\n opSeq?: number\n fieldPath?: string\n deps?: ReadonlyArray<string>\n from?: string\n to?: string\n traitNodeId?: string\n stepId?: string\n resourceId?: string\n key?: unknown\n trace?: ReadonlyArray<string>\n tags?: ReadonlyArray<string>\n policy?: OperationPolicy\n // Reserved extension slot for middleware/devtools to attach extra information.\n readonly [k: string]: unknown\n }\n readonly effect: Effect.Effect<Out, Err, Env>\n}\n\n/**\n * Middleware: the general middleware model for observing / wrapping / guarding EffectOps.\n */\nexport type Middleware = <A, E, R>(op: EffectOp<A, E, R>) => Effect.Effect<A, OperationError<E>, R>\n\nexport type MiddlewareStack = ReadonlyArray<Middleware>\n\nconst composeMiddlewareCache = new WeakMap<MiddlewareStack, Middleware>()\n\n/**\n * EffectOpMiddlewareEnv:\n * - A Service in Effect Env that carries the current Runtime's MiddlewareStack.\n * - Injected by Runtime.ts when constructing a ManagedRuntime.\n * - Runtime code (e.g. StateTrait.install) uses this Service to decide which MiddlewareStack to use.\n */\nexport interface EffectOpMiddlewareEnv {\n readonly stack: MiddlewareStack\n}\n\nexport class EffectOpMiddlewareTag extends ServiceMap.Service<\n EffectOpMiddlewareTag,\n EffectOpMiddlewareEnv\n>()('Logix/EffectOpMiddleware') {}\n\n/**\n * composeMiddleware:\n * - Composes Middleware from \"outer to inner\" in declaration order:\n * - stack = [mw1, mw2] => mw1 -> mw2 -> effect -> mw2 -> mw1\n * - Matches the reduceRight example in the reference docs.\n */\nexport const composeMiddleware = (stack: MiddlewareStack): Middleware => {\n const cached = composeMiddlewareCache.get(stack)\n if (cached) {\n return cached\n }\n\n const composed: Middleware = <A, E, R>(op: EffectOp<A, E, R>): Effect.Effect<A, OperationError<E>, R> =>\n stack.reduceRight<Effect.Effect<A, OperationError<E>, R>>(\n (eff, mw) => mw({ ...op, effect: eff } as any) as any,\n op.effect as Effect.Effect<A, OperationError<E>, R>,\n )\n\n composeMiddlewareCache.set(stack, composed)\n return composed\n}\n\n/**\n * runWithMiddleware:\n * - Executes a given EffectOp with a MiddlewareStack according to the composition rules.\n * - If the stack is empty, returns op.effect directly.\n */\nexport const runWithMiddleware = <A, E, R>(op: EffectOp<A, E, R>, stack: MiddlewareStack): Effect.Effect<A, E, R> => {\n return Effect.gen(function* () {\n const existing = yield* Effect.service(currentLinkId)\n const metaLinkId = (op.meta as any)?.linkId\n const linkId = typeof metaLinkId === 'string' && metaLinkId.length > 0 ? metaLinkId : (existing ?? op.id)\n\n const nextOp: EffectOp<A, E, R> = {\n ...op,\n meta: {\n ...(op.meta ?? {}),\n linkId,\n },\n }\n\n const program = stack.length ? composeMiddleware(stack)(nextOp) : nextOp.effect\n\n // linkId is created at the boundary root and reused for nested ops (the FiberRef is the global single source of truth).\n // NOTE: middleware may explicitly reject with OperationRejected.\n return yield* Effect.provideService(program as any, currentLinkId, linkId)\n }) as Effect.Effect<A, E, R>\n}\n","import { Effect, Layer, ServiceMap } from 'effect'\nimport type { ConvergeStaticIrExport } from '../../state-trait/converge-ir.js'\n\n/**\n * ConvergeStaticIrCollector:\n * - Consumer interface for collecting ConvergeStaticIrExport (de-duplicated/indexed by staticIrDigest); an internal injectable capability.\n * - Typical implementations: DevtoolsHub (process-level) / EvidenceCollector (RunSession-level).\n *\n * Notes:\n * - Uses FiberRef<ReadonlyArray<...>> to allow appending multiple collectors within the same scope (similar to Debug sinks).\n * - ModuleRuntime reads the FiberRef value during installation and captures it in a closure, avoiding Env lookup on hot paths.\n */\nexport interface ConvergeStaticIrCollector {\n readonly register: (ir: ConvergeStaticIrExport) => void\n}\n\nexport const currentConvergeStaticIrCollectors = ServiceMap.Reference<ReadonlyArray<ConvergeStaticIrCollector>>(\n '@logixjs/core/ConvergeStaticIrCollectors',\n {\n defaultValue: () => [],\n },\n)\n\nexport const appendConvergeStaticIrCollectors = (\n collectors: ReadonlyArray<ConvergeStaticIrCollector>,\n): Layer.Layer<any, never, never> =>\n Layer.effect(\n currentConvergeStaticIrCollectors,\n Effect.gen(function* () {\n const current = yield* Effect.service(currentConvergeStaticIrCollectors)\n return [...current, ...collectors]\n }),\n ) as Layer.Layer<any, never, never>\n","import { Effect, SubscriptionRef } from 'effect'\nimport {\n getFieldPathId,\n normalizeFieldPath,\n normalizePatchReason,\n type FieldPathIdRegistry,\n type DirtyAllReason,\n type FieldPath,\n type FieldPathId,\n type PatchReason,\n} from '../../field-path.js'\n\nexport type { PatchReason } from '../../field-path.js'\n\nexport type StatePatchPath = string | FieldPath | FieldPathId\n\nexport interface TxnPatchRecord {\n readonly opSeq: number\n readonly pathId?: FieldPathId\n readonly reason: PatchReason\n readonly stepId?: number\n readonly traitNodeId?: string\n readonly from?: unknown\n readonly to?: unknown\n}\n\ninterface MutableTxnPatchRecord {\n opSeq: number\n pathId?: FieldPathId\n reason: PatchReason\n stepId?: number\n traitNodeId?: string\n from?: unknown\n to?: unknown\n}\n\nexport interface StateTxnOrigin {\n readonly kind: string\n readonly name?: string\n readonly details?: unknown\n}\n\nexport type StateTxnInstrumentationLevel = 'full' | 'light'\n\n/**\n * TxnDirtyEvidenceSnapshot:\n * - Immutable snapshot attached to the committed StateTransaction.\n * - Designed for hot-path consumers (SelectorGraph / RowId gate / Debug evidence) without forcing DirtySet(rootIds) construction.\n *\n * Notes:\n * - dirtyPathIds is captured as an Array at commit time (stable across transactions).\n * - When registry is missing, the snapshot conservatively degrades to dirtyAll.\n */\nexport type TxnDirtyEvidenceSnapshot = {\n readonly dirtyAll: boolean\n readonly dirtyAllReason?: DirtyAllReason\n readonly dirtyPathIds: ReadonlyArray<FieldPathId>\n readonly dirtyPathsKeyHash: number\n readonly dirtyPathsKeySize: number\n}\n\n/**\n * TxnDirtyEvidence:\n * - Unified \"dirty evidence\" protocol within a single transaction window.\n * - Carries both root-level dirty path ids (Static IR anchors) and best-effort list index hints.\n *\n * IMPORTANT:\n * - This evidence is only valid within the current transaction window.\n * - Consumers must not persist references (maps/sets are reused across transactions).\n *\n * Key format must match validate.impl.ts `toListInstanceKey`:\n * - root list: `${listPath}@@`\n * - nested list: `${listPath}@@${parentIndexPath.join(',')}`\n */\nexport type TxnDirtyEvidence = {\n readonly dirtyAll: boolean\n readonly dirtyAllReason?: DirtyAllReason\n readonly dirtyPathIds: ReadonlySet<FieldPathId>\n readonly dirtyPathsKeyHash: number\n readonly dirtyPathsKeySize: number\n readonly list?: {\n readonly indexBindings: ReadonlyMap<string, ReadonlySet<number>>\n readonly rootTouched: ReadonlySet<string>\n /**\n * itemTouched:\n * - Indices for which the patch path directly targeted a list index (e.g. \"items.3\" / \"items[3]\"),\n * which is a stronger structural hint than nested field writes (e.g. \"items.3.name\").\n */\n readonly itemTouched: ReadonlyMap<string, ReadonlySet<number>>\n }\n}\n\nexport interface StateTxnConfig {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly instrumentation?: StateTxnInstrumentationLevel\n readonly getFieldPathIdRegistry?: () => FieldPathIdRegistry | undefined\n /**\n * Optional: list path set for this module instance (derived from StateTrait.list configs).\n * - When absent/empty, list-index evidence is not recorded (zero overhead for modules without list traits).\n */\n readonly getListPathSet?: () => ReadonlySet<string> | undefined\n /**\n * Whether to capture initial/final state snapshots:\n * - enabled by default in full mode\n * - disabled by default in light mode\n */\n readonly captureSnapshots?: boolean\n /**\n * Time source function (useful for injecting a fake clock in tests).\n */\n readonly now?: () => number\n}\n\nexport interface StateTransaction<S> {\n readonly txnId: string\n readonly txnSeq: number\n readonly origin: StateTxnOrigin\n readonly startedAt: number\n readonly endedAt: number\n readonly durationMs: number\n readonly dirty: TxnDirtyEvidenceSnapshot\n readonly patchCount: number\n readonly patchesTruncated: boolean\n readonly patchesTruncatedReason?: 'max_patches'\n readonly initialStateSnapshot?: S\n readonly finalStateSnapshot?: S\n readonly patches: ReadonlyArray<TxnPatchRecord>\n readonly moduleId?: string\n readonly instanceId?: string\n}\n\nexport interface StateTransactionCommitResult<S> {\n readonly transaction: StateTransaction<S>\n readonly finalState: S\n}\n\n/**\n * StateTxnContext:\n * - Holds transaction state within a single ModuleRuntime.\n * - current is the active transaction (undefined when none).\n *\n * Notes:\n * - The current implementation supports a single active transaction; queueing strategies are added later (US1).\n * - To avoid premature coupling, Context provides only minimal begin/update/record/commit primitives; entry points\n * (dispatch/source-refresh/devtools) are controlled by higher layers.\n */\nexport interface StateTxnRuntimeConfig {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly instrumentation: StateTxnInstrumentationLevel\n readonly captureSnapshots: boolean\n readonly now: () => number\n readonly getFieldPathIdRegistry?: () => FieldPathIdRegistry | undefined\n readonly getListPathSet?: () => ReadonlySet<string> | undefined\n}\n\nexport interface StateTxnContext<S> {\n readonly config: StateTxnRuntimeConfig\n current?: StateTxnState<S>\n nextTxnSeq: number\n readonly scratch: StateTxnState<S>\n /**\n * recordPatch:\n * - makeContext selects the implementation based on instrumentation (full/light).\n * - Avoids branching per patch record inside hot loops (051: branch relocation).\n */\n recordPatch: (\n path: StatePatchPath | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n}\n\ninterface StateTxnState<S> {\n txnId: string\n txnSeq: number\n origin: StateTxnOrigin\n startedAt: number\n baseState: S\n draft: S\n initialStateSnapshot?: S\n /**\n * inferReplaceEvidence:\n * - Set when a whole-state replacement write occurred without explicit patch paths.\n * - On commit, the transaction infers best-effort field-level dirty evidence by diffing baseState -> finalState.\n *\n * Motivation:\n * - Avoid dirtyAll fallback on `runtime.setState` / `$.state.update` / reducers without sink patchPaths.\n * - Preserve correctness in `dispatchBatch` where different reducers may mix \"has patchPaths\" and \"no patchPaths\".\n *\n * Note:\n * - Kept internal to the txn window; never exported as part of the committed transaction.\n */\n inferReplaceEvidence: boolean\n /**\n * inferReplaceEvidenceIfEmpty:\n * - When true, inference runs only if there is no explicit field-level dirty evidence at commit time.\n * - Primary use case: `setState/state.update` in perf harnesses that record precise patch paths separately.\n * - When a reducer falls back to `path=\"*\"`, this flag is forced to false (supplement mode) to preserve correctness.\n */\n inferReplaceEvidenceIfEmpty: boolean\n /**\n * listPathSet:\n * - Captured once at transaction start from runtime config.\n * - Used to enable list-index evidence recording only when the module actually declares list traits.\n */\n listPathSet?: ReadonlySet<string>\n patches: Array<TxnPatchRecord>\n patchCount: number\n patchesTruncated: boolean\n fieldPathIdRegistry?: FieldPathIdRegistry\n /**\n * dirtyPathIds:\n * - The set of FieldPathIds for all trackable writes within the transaction window (hot path records only integer anchors).\n * - Any non-mappable/non-trackable write must explicitly degrade to dirtyAll (dirtyAllReason); no silent fallback.\n * - Independent of instrumentation: light mode does not keep patches, but still maintains dirtyPathIds/dirtyAllReason for low-cost semantics (e.g. scheduling/diagnostics).\n */\n readonly dirtyPathIds: Set<FieldPathId>\n dirtyPathIdSnapshot: Array<FieldPathId>\n /**\n * dirtyPathIdsKeyHash / dirtyPathIdsKeySize:\n * - Incrementally maintained key for the current dirtyPathIds Set (in insertion order),\n * optimized for ultra-hot converge paths (inline_dirty micro-cache).\n * - Hash: FNV-1a (32-bit) over unique FieldPathIds in Set insertion order.\n * - Size: number of unique ids (mirrors dirtyPathIds.size when no dirtyAllReason).\n */\n dirtyPathIdsKeyHash: number\n dirtyPathIdsKeySize: number\n dirtyAllReason?: DirtyAllReason\n /**\n * listIndexEvidence:\n * - key: listInstanceKey (\"<listPath>@@<parentIndexPath>\")\n * - value: changed indices for that list instance within the current transaction window.\n */\n readonly listIndexEvidence: Map<string, Set<number>>\n /**\n * listItemTouched:\n * - key: listInstanceKey\n * - value: indices for which the patch directly targeted the item itself (terminal numeric segment).\n */\n readonly listItemTouched: Map<string, Set<number>>\n /**\n * listRootTouched:\n * - listInstanceKey set for which a patch directly touched the list root (structure may have changed),\n * so changedIndices hints must be ignored.\n */\n readonly listRootTouched: Set<string>\n}\n\nconst MAX_PATCHES_FULL = 256\nconst MAX_INFERRED_LIST_INDICES = 64\nconst EMPTY_DIRTY_PATH_IDS: ReadonlyArray<FieldPathId> = []\nconst EMPTY_TXN_PATCHES: ReadonlyArray<TxnPatchRecord> = []\n\nconst defaultNow = () => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === 'function') {\n return perf.now()\n }\n return Date.now()\n}\n\nconst normalizePatchStepId = (stepId?: number): number | undefined => {\n if (typeof stepId !== 'number' || !Number.isFinite(stepId) || stepId < 0) {\n return undefined\n }\n return Math.floor(stepId)\n}\n\nconst toListInstanceKey = (listPath: string, parentIndexPathKey: string): string =>\n parentIndexPathKey.length === 0 ? `${listPath}@@` : `${listPath}@@${parentIndexPathKey}`\n\nconst parseNonNegativeIntMaybe = (text: string): number | undefined => {\n if (!text) return undefined\n let n = 0\n for (let i = 0; i < text.length; i++) {\n const c = text.charCodeAt(i)\n if (c < 48 /* '0' */ || c > 57 /* '9' */) return undefined\n n = n * 10 + (c - 48)\n // Best-effort guard: keep values in a reasonable integer range.\n if (n > 2_147_483_647) return undefined\n }\n return n\n}\n\nconst recordListIndexEvidenceFromPathString = <S>(state: StateTxnState<S>, path: string): void => {\n if (state.dirtyAllReason) return\n const listPathSet = state.listPathSet\n if (!listPathSet || listPathSet.size === 0) return\n if (!path || path === '*') return\n\n // Hot path: plain dot/bracket-free path can only contribute list-root touched evidence.\n const dotIdx = path.indexOf('.')\n const bracketIdx = path.indexOf('[')\n if (dotIdx < 0 && bracketIdx < 0 && path.indexOf(']') < 0) {\n if (listPathSet.has(path)) {\n state.listRootTouched.add(toListInstanceKey(path, ''))\n }\n return\n }\n\n let listPath = ''\n let parentIndexPathKey = ''\n let endedWithNumeric = false\n\n\t const parts = path.split('.')\n\t for (let i = 0; i < parts.length; i++) {\n\t const raw = parts[i]\n\t if (!raw) continue\n\t const seg = raw\n\t endedWithNumeric = false\n\n // \"foo[]\" => list root marker (no index)\n if (seg.endsWith('[]')) {\n const base = seg.slice(0, -2)\n if (base) {\n listPath = listPath.length === 0 ? base : `${listPath}.${base}`\n }\n continue\n }\n\n // \"foo[123]\" => list index marker\n const left = seg.indexOf('[')\n if (left > 0 && seg.endsWith(']')) {\n const base = seg.slice(0, left)\n const inside = seg.slice(left + 1, -1)\n const idx = parseNonNegativeIntMaybe(inside)\n\n if (base) {\n listPath = listPath.length === 0 ? base : `${listPath}.${base}`\n }\n\n if (idx !== undefined) {\n if (listPath && listPathSet.has(listPath)) {\n const key = toListInstanceKey(listPath, parentIndexPathKey)\n const set = state.listIndexEvidence.get(key) ?? new Set<number>()\n set.add(idx)\n state.listIndexEvidence.set(key, set)\n\n // Stronger structural hint: item-level write (\"items[3]\" as terminal segment).\n if (i === parts.length - 1) {\n const touched = state.listItemTouched.get(key) ?? new Set<number>()\n touched.add(idx)\n state.listItemTouched.set(key, touched)\n }\n }\n\n // Descend into this list item: subsequent nested list bindings should carry this index as parent indexPath.\n parentIndexPathKey = parentIndexPathKey.length === 0 ? String(idx) : `${parentIndexPathKey},${idx}`\n endedWithNumeric = true\n }\n\n continue\n }\n\n // \".<digits>\" => list index segment\n const idx = parseNonNegativeIntMaybe(seg)\n if (idx !== undefined) {\n if (listPath && listPathSet.has(listPath)) {\n const key = toListInstanceKey(listPath, parentIndexPathKey)\n const set = state.listIndexEvidence.get(key) ?? new Set<number>()\n set.add(idx)\n state.listIndexEvidence.set(key, set)\n\n // Stronger structural hint: item-level write (\"items.3\" as terminal segment).\n if (i === parts.length - 1) {\n const touched = state.listItemTouched.get(key) ?? new Set<number>()\n touched.add(idx)\n state.listItemTouched.set(key, touched)\n }\n }\n\n parentIndexPathKey = parentIndexPathKey.length === 0 ? String(idx) : `${parentIndexPathKey},${idx}`\n endedWithNumeric = true\n continue\n }\n\n // Unknown bracket syntax: bail out for this segment (best-effort).\n if (seg.includes('[') || seg.includes(']')) {\n continue\n }\n\n listPath = listPath.length === 0 ? seg : `${listPath}.${seg}`\n }\n\n // If the terminal normalized path is a configured list path, treat it as \"list root touched\" (structure may have changed).\n if (!endedWithNumeric && listPath && listPathSet.has(listPath)) {\n state.listRootTouched.add(toListInstanceKey(listPath, parentIndexPathKey))\n }\n}\n\nconst recordListIndexEvidenceFromPathArray = <S>(state: StateTxnState<S>, path: ReadonlyArray<string>): void => {\n if (state.dirtyAllReason) return\n const listPathSet = state.listPathSet\n if (!listPathSet || listPathSet.size === 0) return\n if (!path || path.length === 0) return\n\n // Array-path evidence (from mutative patches) can include list indices as digit strings (\"3\").\n // Unlike string-path parsing, we do not support bracket syntax here (segments are already split).\n let listPath = ''\n let parentIndexPathKey = ''\n let endedWithNumeric = false\n\n for (let i = 0; i < path.length; i++) {\n const raw = path[i]\n if (!raw) continue\n\n // List root marker (rare but supported): \"items[]\"\n if (raw.endsWith('[]')) {\n const base = raw.slice(0, -2)\n if (base) {\n listPath = listPath.length === 0 ? base : `${listPath}.${base}`\n }\n endedWithNumeric = false\n continue\n }\n\n const idx = parseNonNegativeIntMaybe(raw)\n if (idx !== undefined) {\n endedWithNumeric = true\n\n if (listPath && listPathSet.has(listPath)) {\n const key = toListInstanceKey(listPath, parentIndexPathKey)\n const set = state.listIndexEvidence.get(key) ?? new Set<number>()\n set.add(idx)\n state.listIndexEvidence.set(key, set)\n\n // Stronger structural hint: item-level write (terminal numeric segment).\n if (i === path.length - 1) {\n const touched = state.listItemTouched.get(key) ?? new Set<number>()\n touched.add(idx)\n state.listItemTouched.set(key, touched)\n }\n }\n\n parentIndexPathKey = parentIndexPathKey.length === 0 ? String(idx) : `${parentIndexPathKey},${idx}`\n continue\n }\n\n // Unknown segment encoding: bail out for best-effort evidence recording.\n if (raw.includes('[') || raw.includes(']') || raw.includes('.')) {\n endedWithNumeric = false\n continue\n }\n\n endedWithNumeric = false\n listPath = listPath.length === 0 ? raw : `${listPath}.${raw}`\n }\n\n // If the terminal normalized path is a configured list path, treat it as \"list root touched\" (structure may have changed).\n if (!endedWithNumeric && listPath && listPathSet.has(listPath)) {\n state.listRootTouched.add(toListInstanceKey(listPath, parentIndexPathKey))\n }\n}\n\nconst buildPatchRecord = (\n opSeq: number,\n pathId: FieldPathId | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n): TxnPatchRecord => {\n const record: MutableTxnPatchRecord = {\n opSeq,\n reason: normalizePatchReason(reason),\n }\n\n if (pathId != null) {\n record.pathId = pathId\n }\n if (from !== undefined) {\n record.from = from\n }\n if (to !== undefined) {\n record.to = to\n }\n if (traitNodeId) {\n record.traitNodeId = traitNodeId\n }\n\n const normalizedStepId = normalizePatchStepId(stepId)\n if (normalizedStepId !== undefined) {\n record.stepId = normalizedStepId\n }\n\n return record\n}\n\nconst buildDirtyEvidenceSnapshot = <S>(state: StateTxnState<S>): TxnDirtyEvidenceSnapshot => {\n const registry = state.fieldPathIdRegistry\n const dirtyAllReason = state.dirtyAllReason\n\n // If registry is missing, we cannot safely map pathIds -> FieldPaths for consumers;\n // conservatively degrade to dirtyAll (same as DirtySet fallback policy).\n if (registry == null) {\n return {\n dirtyAll: true,\n dirtyAllReason: dirtyAllReason ?? 'fallbackPolicy',\n dirtyPathIds: EMPTY_DIRTY_PATH_IDS,\n dirtyPathsKeyHash: 0,\n dirtyPathsKeySize: 0,\n }\n }\n\n if (dirtyAllReason != null) {\n return {\n dirtyAll: true,\n dirtyAllReason,\n dirtyPathIds: EMPTY_DIRTY_PATH_IDS,\n dirtyPathsKeyHash: 0,\n dirtyPathsKeySize: 0,\n }\n }\n\n // IMPORTANT:\n // - If there is no dirty evidence at all (empty set), we must degrade to dirtyAll=unknownWrite.\n // - This preserves legacy behavior where DirtySet construction would fallback to dirtyAll on empty roots.\n if (state.dirtyPathIds.size === 0) {\n return {\n dirtyAll: true,\n dirtyAllReason: 'unknownWrite',\n dirtyPathIds: EMPTY_DIRTY_PATH_IDS,\n dirtyPathsKeyHash: 0,\n dirtyPathsKeySize: 0,\n }\n }\n\n return {\n dirtyAll: false,\n dirtyPathIds: state.dirtyPathIdSnapshot,\n dirtyPathsKeyHash: state.dirtyPathIdsKeyHash,\n dirtyPathsKeySize: state.dirtyPathIdsKeySize,\n }\n}\n\nconst inferReplaceEvidence = <S>(ctx: StateTxnContext<S>, state: StateTxnState<S>, finalState: S): void => {\n if (!state.inferReplaceEvidence) return\n if (state.dirtyAllReason) return\n\n // If explicit dirty evidence exists and this replace marker is \"if_empty\" mode, skip inference (perf-first contract).\n if (state.inferReplaceEvidenceIfEmpty && state.dirtyPathIds.size > 0) return\n\n const registry = state.fieldPathIdRegistry\n if (!registry) {\n state.dirtyAllReason = 'fallbackPolicy'\n return\n }\n\n const base = state.baseState as any\n const next = finalState as any\n\n // Best-effort inference supports plain object states only.\n if (!base || !next) {\n state.dirtyAllReason = 'unknownWrite'\n return\n }\n if (typeof base !== 'object' || typeof next !== 'object') {\n state.dirtyAllReason = 'unknownWrite'\n return\n }\n if (Array.isArray(base) || Array.isArray(next)) {\n state.dirtyAllReason = 'unknownWrite'\n return\n }\n\n const pathStringToId = registry.pathStringToId\n const listPathSet = state.listPathSet\n\n const recordKey = (key: string, prevValue: unknown, nextValue: unknown): void => {\n if (state.dirtyAllReason) return\n if (!key) return\n\n // Only infer for keys that exist in the Static IR registry (avoid degrading due to extra/untracked keys).\n if (!pathStringToId || !pathStringToId.has(key)) {\n return\n }\n\n if (listPathSet && listPathSet.has(key)) {\n const instanceKey = toListInstanceKey(key, '')\n\n // If the list instance is already marked as structurally dirty, skip.\n if (state.listRootTouched.has(instanceKey)) {\n return\n }\n\n const prevArr = Array.isArray(prevValue) ? (prevValue as ReadonlyArray<unknown>) : undefined\n const nextArr = Array.isArray(nextValue) ? (nextValue as ReadonlyArray<unknown>) : undefined\n\n if (!prevArr || !nextArr) {\n // Treat unknown encoding as a structural list change (disable incremental hints).\n ctx.recordPatch(`${key}[]`, 'unknown')\n return\n }\n\n if (prevArr.length !== nextArr.length) {\n ctx.recordPatch(`${key}[]`, 'unknown')\n return\n }\n\n let changed = 0\n for (let i = 0; i < prevArr.length; i++) {\n if (Object.is(prevArr[i], nextArr[i])) continue\n changed += 1\n ctx.recordPatch([key, String(i)], 'unknown')\n\n // Guard: if too many indices differ, treat it as a structural churn and stop tracking individual indices.\n if (changed > MAX_INFERRED_LIST_INDICES) {\n ctx.recordPatch(`${key}[]`, 'unknown')\n break\n }\n }\n\n // If the array identity changed but no element differs, treat it as a structural list touch.\n if (changed === 0) {\n ctx.recordPatch(`${key}[]`, 'unknown')\n }\n\n return\n }\n\n ctx.recordPatch(key, 'unknown')\n }\n\n // Removed/changed keys (covers \"key removed\" as next[key] becomes undefined).\n const baseKeys = Object.keys(base)\n for (let i = 0; i < baseKeys.length; i++) {\n const key = baseKeys[i]!\n const prevValue = base[key]\n const nextValue = next[key]\n if (!Object.is(prevValue, nextValue) || !Object.prototype.hasOwnProperty.call(next, key)) {\n recordKey(key, prevValue, nextValue)\n }\n }\n\n // Added keys (rare for schema-backed states, but supported).\n const nextKeys = Object.keys(next)\n for (let i = 0; i < nextKeys.length; i++) {\n const key = nextKeys[i]!\n if (Object.prototype.hasOwnProperty.call(base, key)) continue\n recordKey(key, base[key], next[key])\n }\n\n // If inference produced nothing (e.g., non-trackable schema), deterministically degrade to dirtyAll.\n if (!state.dirtyAllReason && state.dirtyPathIds.size === 0) {\n state.dirtyAllReason = 'unknownWrite'\n }\n}\n\nconst buildCommittedTransaction = <S>(\n ctx: StateTxnContext<S>,\n state: StateTxnState<S>,\n finalState: S,\n endedAt: number,\n): StateTransaction<S> => {\n const { config } = ctx\n inferReplaceEvidence(ctx, state, finalState)\n const dirty = buildDirtyEvidenceSnapshot(state)\n const patches =\n config.instrumentation === 'full'\n ? state.patches.length === 0\n ? EMPTY_TXN_PATCHES\n : (state.patches as ReadonlyArray<TxnPatchRecord>)\n : EMPTY_TXN_PATCHES\n\n return {\n txnId: state.txnId,\n txnSeq: state.txnSeq,\n origin: state.origin,\n startedAt: state.startedAt,\n endedAt,\n durationMs: Math.max(0, endedAt - state.startedAt),\n dirty,\n patchCount: state.patchCount,\n patchesTruncated: state.patchesTruncated,\n ...(state.patchesTruncated ? { patchesTruncatedReason: 'max_patches' } : null),\n initialStateSnapshot: state.initialStateSnapshot,\n finalStateSnapshot: config.captureSnapshots ? finalState : undefined,\n patches,\n moduleId: config.moduleId,\n instanceId: config.instanceId,\n }\n}\n\nexport const makeContext = <S>(config: StateTxnConfig): StateTxnContext<S> => {\n const instrumentation: StateTxnInstrumentationLevel = config.instrumentation ?? 'full'\n\n const captureSnapshots = config.captureSnapshots ?? instrumentation === 'full'\n\n const scratch: StateTxnState<S> = {\n txnId: '',\n txnSeq: 0,\n origin: { kind: 'unknown' },\n startedAt: 0,\n baseState: undefined as any,\n draft: undefined as any,\n initialStateSnapshot: undefined,\n inferReplaceEvidence: false,\n inferReplaceEvidenceIfEmpty: true,\n listPathSet: undefined,\n patches: [],\n patchCount: 0,\n patchesTruncated: false,\n dirtyPathIds: new Set(),\n dirtyPathIdSnapshot: [],\n dirtyPathIdsKeyHash: 2166136261 >>> 0,\n dirtyPathIdsKeySize: 0,\n dirtyAllReason: undefined,\n listIndexEvidence: new Map(),\n listItemTouched: new Map(),\n listRootTouched: new Set(),\n }\n\n const ctx: StateTxnContext<S> = {\n config: {\n instrumentation,\n captureSnapshots,\n now: config.now ?? defaultNow,\n moduleId: config.moduleId,\n instanceId: config.instanceId,\n getFieldPathIdRegistry: config.getFieldPathIdRegistry,\n getListPathSet: config.getListPathSet,\n },\n current: undefined,\n nextTxnSeq: 0,\n scratch,\n recordPatch: () => {},\n }\n\n const recordPatchLight = (\n path: StatePatchPath | undefined,\n _reason: PatchReason,\n _from?: unknown,\n _to?: unknown,\n _traitNodeId?: string,\n _stepId?: number,\n ): void => {\n const state = ctx.current\n if (!state) return\n state.patchCount += 1\n if (typeof path === 'string') {\n recordListIndexEvidenceFromPathString(state, path)\n } else if (Array.isArray(path)) {\n recordListIndexEvidenceFromPathArray(state, path)\n }\n resolveAndRecordDirtyPathId(state, path, _reason)\n }\n\n const recordPatchFull = (\n path: StatePatchPath | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ): void => {\n const state = ctx.current\n if (!state) return\n state.patchCount += 1\n if (typeof path === 'string') {\n recordListIndexEvidenceFromPathString(state, path)\n } else if (Array.isArray(path)) {\n recordListIndexEvidenceFromPathArray(state, path)\n }\n const opSeq = state.patchCount - 1\n const pathId = resolveAndRecordDirtyPathId(state, path, reason)\n if (state.patchesTruncated || state.patches.length >= MAX_PATCHES_FULL) {\n state.patchesTruncated = true\n return\n }\n state.patches.push(buildPatchRecord(opSeq, pathId, reason, from, to, traitNodeId, stepId))\n }\n\n ctx.recordPatch = instrumentation === 'full' ? recordPatchFull : recordPatchLight\n\n return ctx\n}\n\n/**\n * Begins a new transaction:\n * - Default behavior: overrides the current transaction (queueing/nesting are refined in US1).\n * - initialState is provided by the caller (typically the current SubscriptionRef snapshot).\n */\nexport const beginTransaction = <S>(ctx: StateTxnContext<S>, origin: StateTxnOrigin, initialState: S): void => {\n const { config } = ctx\n const now = config.now\n const startedAt = now()\n\n ctx.nextTxnSeq += 1\n const txnSeq = ctx.nextTxnSeq\n const anchor = config.instanceId ?? 'unknown'\n const txnId = `${anchor}::t${txnSeq}`\n\n const initialSnapshot = config.captureSnapshots ? initialState : undefined\n\n const state = ctx.scratch\n state.txnId = txnId\n state.txnSeq = txnSeq\n state.origin = origin\n state.startedAt = startedAt\n state.baseState = initialState\n state.draft = initialState\n state.initialStateSnapshot = initialSnapshot\n state.inferReplaceEvidence = false\n state.inferReplaceEvidenceIfEmpty = true\n state.patches = []\n state.patchCount = 0\n state.patchesTruncated = false\n state.fieldPathIdRegistry = ctx.config.getFieldPathIdRegistry?.()\n state.dirtyPathIds.clear()\n state.dirtyPathIdSnapshot = []\n state.dirtyPathIdsKeyHash = 2166136261 >>> 0\n state.dirtyPathIdsKeySize = 0\n state.dirtyAllReason = undefined\n state.listPathSet = ctx.config.getListPathSet?.()\n state.listIndexEvidence.clear()\n state.listItemTouched.clear()\n state.listRootTouched.clear()\n ctx.current = state\n}\n\nconst resolveAndRecordDirtyPathId = <S>(\n state: StateTxnState<S>,\n path: StatePatchPath | undefined,\n reason: PatchReason,\n): FieldPathId | undefined => {\n if (state.dirtyAllReason) return undefined\n\n if (path === undefined) {\n state.dirtyAllReason = 'customMutation'\n return undefined\n }\n\n if (path === '*') {\n // Perf boundary harness: keep a stable way to force dirtyAll (explicit contract).\n if (reason === 'perf') {\n state.dirtyAllReason = 'unknownWrite'\n return undefined\n }\n\n // Whole-state replacement without explicit patch paths:\n // defer to commit-time inference rather than eagerly degrading to dirtyAll.\n state.inferReplaceEvidence = true\n // Reducer fallback must preserve correctness even when other reducers already produced evidence.\n // For non-reducer callers (setState/update), default to if_empty mode to avoid extra diff cost when precise evidence exists.\n if (reason === 'reducer') {\n state.inferReplaceEvidenceIfEmpty = false\n }\n return undefined\n }\n\n const registry = state.fieldPathIdRegistry\n if (!registry) {\n state.dirtyAllReason = reason === 'reducer' ? 'customMutation' : 'fallbackPolicy'\n return undefined\n }\n\n let id: FieldPathId | undefined\n\n if (typeof path === 'number') {\n if (!Number.isFinite(path)) {\n state.dirtyAllReason = 'nonTrackablePatch'\n return undefined\n }\n const n = Math.floor(path)\n if (n < 0) {\n state.dirtyAllReason = 'nonTrackablePatch'\n return undefined\n }\n if (!registry.fieldPaths[n]) {\n state.dirtyAllReason = 'fallbackPolicy'\n return undefined\n }\n id = n\n } else if (typeof path === 'string') {\n // Fast path: direct dot-separated lookup.\n const direct = registry.pathStringToId?.get(path)\n if (direct != null) {\n id = direct\n } else {\n // Structural string fallback: support list/index syntax such as:\n // - \"b123[456]\" / \"b123[]\" -> \"b123\"\n // - \"a.0.b\" / \"a.0.b[3].c\" -> \"a.b.c\"\n //\n // IMPORTANT: only attempt normalization when the input clearly contains structural syntax\n // (brackets or a numeric segment). This avoids accidentally interpreting literal \".\" keys\n // (which are intentionally excluded from pathStringToId due to ambiguity).\n\n const dotIdx = path.indexOf('.')\n const bracketIdx = path.indexOf('[')\n\n // Extremely hot case in perf boundaries: single-segment \"foo[123]\" should not allocate.\n if (dotIdx < 0 && bracketIdx > 0) {\n const base = path.slice(0, bracketIdx)\n const baseDirect = registry.pathStringToId?.get(base)\n if (baseDirect != null) {\n id = baseDirect\n }\n }\n\n if (id == null) {\n let hasStructuralSyntax = bracketIdx >= 0 || path.indexOf(']') >= 0\n\n // Detect \".<digits>(.|$)\" segments without regex allocations.\n if (!hasStructuralSyntax) {\n for (let i = 0; i < path.length; i++) {\n if (path.charCodeAt(i) !== 46 /* '.' */) continue\n let j = i + 1\n if (j >= path.length) break\n const c = path.charCodeAt(j)\n if (c < 48 /* '0' */ || c > 57 /* '9' */) continue\n\n while (j < path.length) {\n const d = path.charCodeAt(j)\n if (d < 48 /* '0' */ || d > 57 /* '9' */) break\n j += 1\n }\n\n if (j === path.length || path.charCodeAt(j) === 46 /* '.' */) {\n hasStructuralSyntax = true\n break\n }\n\n i = j\n }\n }\n\n if (hasStructuralSyntax) {\n const normalized = normalizeFieldPath(path)\n if (normalized) {\n const next = getFieldPathId(registry, normalized)\n if (next != null) {\n id = next\n }\n }\n }\n }\n\n if (id == null) {\n state.dirtyAllReason = 'fallbackPolicy'\n return undefined\n }\n }\n } else {\n const normalized = normalizeFieldPath(path)\n if (!normalized) {\n state.dirtyAllReason = 'nonTrackablePatch'\n return undefined\n }\n\n const next = getFieldPathId(registry, normalized)\n if (next == null) {\n state.dirtyAllReason = 'fallbackPolicy'\n return undefined\n }\n id = next\n }\n\n state.dirtyPathIds.add(id)\n // Maintain an incremental key for inline_dirty micro-cache without scanning the Set.\n // Only update when the id is newly inserted (Set ignores duplicates but keeps insertion order).\n const afterSize = state.dirtyPathIds.size\n if (afterSize !== state.dirtyPathIdsKeySize) {\n state.dirtyPathIdSnapshot.push(id)\n let h = state.dirtyPathIdsKeyHash >>> 0\n h ^= id >>> 0\n h = Math.imul(h, 16777619)\n state.dirtyPathIdsKeyHash = h >>> 0\n state.dirtyPathIdsKeySize = afterSize\n }\n return id\n}\n\n/**\n * Updates the draft state:\n * - next is the latest draft.\n * - When instrumentation is full, patch info is recorded into the transaction via recordPatch.\n */\nexport const updateDraft = <S>(ctx: StateTxnContext<S>, next: S): void => {\n const state = ctx.current\n if (!state) {\n // No active transaction: ignore patch info; higher layers decide whether to start an implicit transaction.\n return\n }\n\n state.draft = next\n}\n\n/**\n * recordPatch:\n * - In full mode, appends a Patch.\n * - In light mode, silently ignores to avoid extra overhead.\n */\nexport const recordPatch = <S>(\n ctx: StateTxnContext<S>,\n path: StatePatchPath | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n): void => {\n ctx.recordPatch(path, reason, from, to, traitNodeId, stepId)\n}\n\nexport const markDirtyPath = <S>(\n ctx: StateTxnContext<S>,\n path: StatePatchPath | undefined,\n reason: PatchReason,\n): FieldPathId | undefined => {\n const state = ctx.current\n if (!state) return undefined\n return resolveAndRecordDirtyPathId(state, path, reason)\n}\n\nexport const readDirtyEvidence = <S>(ctx: StateTxnContext<S>): TxnDirtyEvidence | undefined => {\n const state = ctx.current as StateTxnState<S> | undefined\n if (!state) return undefined\n const listPathSet = state.listPathSet\n const list =\n listPathSet && listPathSet.size > 0\n ? {\n indexBindings: state.listIndexEvidence,\n rootTouched: state.listRootTouched,\n itemTouched: state.listItemTouched,\n }\n : undefined\n return {\n dirtyAll: state.dirtyAllReason != null,\n dirtyAllReason: state.dirtyAllReason,\n dirtyPathIds: state.dirtyPathIds,\n dirtyPathsKeyHash: state.dirtyPathIdsKeyHash,\n dirtyPathsKeySize: state.dirtyPathIdsKeySize,\n ...(list ? { list } : null),\n }\n}\n\n/**\n * commitWithState:\n * - Commits current transaction and returns both aggregated transaction metadata and committed final state.\n * - Used by runtime hot path to avoid an extra stateRef round-trip after commit.\n */\nexport const commitWithState = <S>(\n ctx: StateTxnContext<S>,\n stateRef: SubscriptionRef.SubscriptionRef<S>,\n): Effect.Effect<StateTransactionCommitResult<S> | undefined> =>\n Effect.gen(function* () {\n const state = ctx.current\n if (!state) {\n return undefined\n }\n\n const { config } = ctx\n const now = config.now\n\n const finalState = state.draft\n\n // 0 commit: when there is no change, do not write SubscriptionRef and do not emit state:update.\n if (Object.is(finalState, state.baseState)) {\n ctx.current = undefined\n return undefined\n }\n\n // Single write to SubscriptionRef: ensures only one external state commit + subscription notification.\n yield* SubscriptionRef.set(stateRef, finalState)\n\n const endedAt = now()\n const transaction = buildCommittedTransaction(ctx, state, finalState, endedAt)\n\n // Hand off the current patch array to the committed transaction, then switch the scratch\n // state to a fresh array so later transactions do not mutate the committed snapshot.\n state.patches = []\n\n // Clear the current transaction.\n ctx.current = undefined\n\n return {\n transaction,\n finalState,\n }\n })\n\n/**\n * Commits the transaction:\n * - Writes the final draft to SubscriptionRef exactly once.\n * - Returns the aggregated StateTransaction; returns undefined if there is no active transaction.\n *\n * Notes:\n * - Emitting Debug/Devtools events is decided by the caller based on the returned transaction.\n * - This module does not depend on DebugSink to avoid circular dependencies in core.\n */\nexport const commit = <S>(\n ctx: StateTxnContext<S>,\n stateRef: SubscriptionRef.SubscriptionRef<S>,\n): Effect.Effect<StateTransaction<S> | undefined> =>\n commitWithState(ctx, stateRef).pipe(Effect.map((result) => result?.transaction))\n\n/**\n * abort:\n * - Terminates the current transaction and clears context.\n * - Does not write to stateRef.\n * - Higher layers decide whether to record diagnostics/observability events.\n */\nexport const abort = <S>(ctx: StateTxnContext<S>): void => {\n ctx.current = undefined\n}\n","import { Effect, Option, ServiceMap } from 'effect'\nimport { isDevEnv } from './env.js'\nimport {\n makeRuntimeServicesEvidence as makeRuntimeServicesEvidenceImpl,\n selectRuntimeService as selectRuntimeServiceImpl,\n type RuntimeServicesOverrideLayers,\n} from './RuntimeKernel.selection.js'\n\nexport type OverrideScope = 'builtin' | 'runtime_default' | 'runtime_module' | 'provider' | 'instance'\n\nexport type RuntimeServiceOverride = {\n readonly implId?: string\n readonly notes?: string\n}\n\n/**\n * RuntimeServicesOverrides: a serializable override for runtime service implementation selection.\n *\n * - The key is a stable serviceId.\n * - The value may only contain serializable fields (no functions/closures) so evidence can be produced and explained.\n */\nexport type RuntimeServicesOverrides = Readonly<Record<string, RuntimeServiceOverride>>\n\nexport interface RuntimeServicesRuntimeConfig {\n /** Runtime-level default overrides (runtime_default). */\n readonly services?: RuntimeServicesOverrides\n /** Per-module delta overrides by moduleId (runtime_module). */\n readonly servicesByModuleId?: Readonly<Record<string, RuntimeServicesOverrides>>\n}\n\nclass RuntimeServicesRuntimeConfigTagImpl extends ServiceMap.Service<\n RuntimeServicesRuntimeConfigTagImpl,\n RuntimeServicesRuntimeConfig\n>()('@logixjs/core/RuntimeServicesRuntimeConfig') {}\n\nexport const RuntimeServicesRuntimeConfigTag = RuntimeServicesRuntimeConfigTagImpl\n\nexport interface RuntimeServicesProviderOverrides {\n /** Provider-scoped default overrides (provider). */\n readonly services?: RuntimeServicesOverrides\n /** Provider-scoped per-module delta overrides by moduleId (provider). */\n readonly servicesByModuleId?: Readonly<Record<string, RuntimeServicesOverrides>>\n}\n\nclass RuntimeServicesProviderOverridesTagImpl extends ServiceMap.Service<\n RuntimeServicesProviderOverridesTagImpl,\n RuntimeServicesProviderOverrides\n>()('@logixjs/core/RuntimeServicesProviderOverrides') {}\n\nexport const RuntimeServicesProviderOverridesTag = RuntimeServicesProviderOverridesTagImpl\n\nclass RuntimeServicesInstanceOverridesTagImpl extends ServiceMap.Service<\n RuntimeServicesInstanceOverridesTagImpl,\n RuntimeServicesOverrides\n>()('@logixjs/core/RuntimeServicesInstanceOverrides') {}\n\nexport const RuntimeServicesInstanceOverridesTag = RuntimeServicesInstanceOverridesTagImpl\n\n/**\n * FullCutoverGateMode: controls whether fallbacks are allowed during assembly.\n *\n * - trial: allows fallbacks (for trial-run / comparison / diagnostics).\n * - fullCutover: forbids fallbacks (any fallback or missing binding fails).\n *\n * Default: fullCutover. trial must be opted in explicitly.\n */\nexport type FullCutoverGateMode = 'trial' | 'fullCutover'\n\nclass FullCutoverGateModeTagImpl extends ServiceMap.Service<\n FullCutoverGateModeTagImpl,\n FullCutoverGateMode\n>()('@logixjs/core/FullCutoverGateMode') {}\n\nexport const FullCutoverGateModeTag = FullCutoverGateModeTagImpl\n\nexport interface RuntimeServiceBinding {\n readonly serviceId: string\n readonly implId?: string\n readonly implVersion?: string\n readonly scope: OverrideScope\n readonly overridden: boolean\n readonly notes?: string\n}\n\nexport interface RuntimeServicesEvidence {\n readonly moduleId?: string\n readonly instanceId: string\n readonly scope: OverrideScope\n readonly bindings: ReadonlyArray<RuntimeServiceBinding>\n readonly overridesApplied: ReadonlyArray<string>\n}\n\nconst isPlainRecord = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\nconst normalizeRuntimeServiceOverride = (value: unknown): RuntimeServiceOverride | undefined => {\n if (!isPlainRecord(value)) return undefined\n\n const implIdRaw = value.implId\n const notesRaw = value.notes\n\n const implId = typeof implIdRaw === 'string' && implIdRaw.length > 0 ? implIdRaw : undefined\n const notes = typeof notesRaw === 'string' && notesRaw.length > 0 ? notesRaw : undefined\n if (!implId && !notes) return undefined\n\n return Object.freeze({\n ...(implId ? { implId } : {}),\n ...(notes ? { notes } : {}),\n }) as RuntimeServiceOverride\n}\n\nconst freezeRuntimeServicesOverrides = (\n value: RuntimeServicesOverrides | undefined,\n): RuntimeServicesOverrides | undefined => {\n if (!isPlainRecord(value)) return undefined\n\n const out: Record<string, RuntimeServiceOverride> = {}\n for (const [serviceId, rawOverride] of Object.entries(value)) {\n if (serviceId.length === 0) continue\n const normalized = normalizeRuntimeServiceOverride(rawOverride)\n if (!normalized) continue\n out[serviceId] = normalized\n }\n\n if (Object.keys(out).length === 0) return undefined\n return Object.freeze(out) as RuntimeServicesOverrides\n}\n\nconst freezeRuntimeServicesOverridesAtModule = (args: {\n readonly moduleId: string | undefined\n readonly byModuleId: Readonly<Record<string, RuntimeServicesOverrides>> | undefined\n}): RuntimeServicesOverrides | undefined => {\n if (!args.moduleId || args.moduleId.length === 0) return undefined\n if (!isPlainRecord(args.byModuleId)) return undefined\n return freezeRuntimeServicesOverrides(args.byModuleId[args.moduleId])\n}\n\nexport const resolveRuntimeServicesOverrides = (args: {\n readonly moduleId: string | undefined\n}): Effect.Effect<RuntimeServicesOverrideLayers, never, any> =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(RuntimeServicesRuntimeConfigTag)\n const providerOverridesOpt = yield* Effect.serviceOption(RuntimeServicesProviderOverridesTag)\n const instanceOverridesOpt = yield* Effect.serviceOption(RuntimeServicesInstanceOverridesTag)\n\n const runtimeConfig = Option.isSome(runtimeConfigOpt) ? runtimeConfigOpt.value : undefined\n const providerOverrides = Option.isSome(providerOverridesOpt) ? providerOverridesOpt.value : undefined\n const instanceOverrides = Option.isSome(instanceOverridesOpt) ? instanceOverridesOpt.value : undefined\n\n const moduleId = args.moduleId\n const runtimeDefaults = freezeRuntimeServicesOverrides(runtimeConfig?.services)\n const runtimeModule = freezeRuntimeServicesOverridesAtModule({\n moduleId,\n byModuleId: runtimeConfig?.servicesByModuleId,\n })\n const providerDefaults = freezeRuntimeServicesOverrides(providerOverrides?.services)\n const providerModule = freezeRuntimeServicesOverridesAtModule({\n moduleId,\n byModuleId: providerOverrides?.servicesByModuleId,\n })\n const instance = freezeRuntimeServicesOverrides(instanceOverrides)\n\n return Object.freeze({\n runtimeDefault: runtimeDefaults,\n runtimeModule,\n provider: providerDefaults,\n providerModule,\n instance,\n }) as RuntimeServicesOverrideLayers\n })\n\nexport interface RuntimeServiceImpl<Service> {\n readonly implId: string\n readonly implVersion: string\n readonly make: Effect.Effect<Service, never, any>\n readonly notes?: string\n}\n\n/**\n * RuntimeServicesRegistry:\n * - Used to inject additional serviceId → impls (e.g. implementations provided by core-ng).\n * - A non-serializable contract used only during assembly; selection evidence is still carried by\n * RuntimeServicesOverrides + RuntimeServicesEvidence.\n */\nexport interface RuntimeServicesRegistry {\n readonly implsByServiceId: Readonly<Record<string, ReadonlyArray<RuntimeServiceImpl<any>>>>\n}\n\nclass RuntimeServicesRegistryTagImpl extends ServiceMap.Service<\n RuntimeServicesRegistryTagImpl,\n RuntimeServicesRegistry\n>()('@logixjs/core/RuntimeServicesRegistry') {}\n\nexport const RuntimeServicesRegistryTag = RuntimeServicesRegistryTagImpl\n\nexport interface RuntimeServiceSelection<Service> {\n readonly binding: RuntimeServiceBinding\n readonly impl: RuntimeServiceImpl<Service>\n readonly overridesApplied: ReadonlyArray<string>\n}\n\nexport const selectRuntimeService = selectRuntimeServiceImpl\n\nexport const makeRuntimeServicesEvidence = makeRuntimeServicesEvidenceImpl\n\nconst RUNTIME_SERVICES_EVIDENCE = Symbol.for('@logixjs/core/runtimeServicesEvidence')\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nconst formatScope = (moduleId: unknown, instanceId: unknown): string => {\n const m = typeof moduleId === 'string' && moduleId.length > 0 ? moduleId : 'unknown'\n const i = typeof instanceId === 'string' && instanceId.length > 0 ? instanceId : 'unknown'\n return `moduleId=${m}, instanceId=${i}`\n}\n\nexport const setRuntimeServicesEvidence = (runtime: object, evidence: RuntimeServicesEvidence): void => {\n defineHidden(runtime, RUNTIME_SERVICES_EVIDENCE, evidence)\n}\n\nexport const getRuntimeServicesEvidence = (runtime: object): RuntimeServicesEvidence => {\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n const evidence = (runtime as any)[RUNTIME_SERVICES_EVIDENCE] as RuntimeServicesEvidence | undefined\n if (!evidence) {\n const msg = isDevEnv()\n ? [\n '[MissingRuntimeServicesEvidence] Runtime services evidence not installed on ModuleRuntime instance.',\n `scope: ${formatScope(scope.moduleId, scope.instanceId)}`,\n 'fix:',\n '- Ensure ModuleRuntime.make assembles RuntimeKernel and attaches evidence (020 US2).',\n '- If you created a mock runtime for tests, attach evidence or avoid calling evidence-only APIs.',\n ].join('\\n')\n : 'Runtime services evidence not installed'\n throw new Error(msg)\n }\n\n const runtimeInstanceId = scope.instanceId\n if (\n typeof runtimeInstanceId === 'string' &&\n runtimeInstanceId.length > 0 &&\n runtimeInstanceId !== evidence.instanceId\n ) {\n throw new Error(\n isDevEnv()\n ? [\n '[InconsistentRuntimeServicesEvidence] Runtime services evidence instanceId mismatch.',\n `runtime: ${formatScope(scope.moduleId, runtimeInstanceId)}`,\n `evidence: ${formatScope(evidence.moduleId, evidence.instanceId)}`,\n ].join('\\n')\n : 'Runtime services evidence mismatch',\n )\n }\n\n return evidence\n}\n","import { Effect, Layer, ServiceMap } from 'effect'\nimport type { TraitConvergeRequestedMode } from '../../state-trait/model.js'\nimport type { ReadQueryStrictGateConfig } from './ReadQuery.js'\nimport { getGlobalHostScheduler, type HostScheduler } from './HostScheduler.js'\nimport { makeRuntimeStore, type RuntimeStore } from './RuntimeStore.js'\nimport { makeTickScheduler, type TickScheduler, type TickSchedulerConfig } from './TickScheduler.js'\nimport { makeDeclarativeLinkRuntime, type DeclarativeLinkRuntime } from './DeclarativeLinkRuntime.js'\n\n// Unified runtime env detection, avoiding bundlers inlining NODE_ENV at build time.\nexport const getNodeEnv = (): string | undefined => {\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const env = (globalThis as any)?.process?.env\n return typeof env?.NODE_ENV === 'string' ? env.NODE_ENV : undefined\n } catch {\n return undefined\n }\n}\n\nexport const isDevEnv = (): boolean => getNodeEnv() !== 'production'\n\nexport type StateTransactionInstrumentation = 'full' | 'light'\n\n/**\n * getDefaultStateTxnInstrumentation:\n * - Currently chooses default instrumentation by NODE_ENV:\n * - dev / test: full (keep patches and snapshots for debugging).\n * - production: light (keep minimal semantics to reduce overhead).\n * - May evolve with finer-grained overrides in Runtime.make / Module.make.\n */\nexport const getDefaultStateTxnInstrumentation = (): StateTransactionInstrumentation => (isDevEnv() ? 'full' : 'light')\n\n/**\n * Runtime-level StateTransaction config Service:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime.make can read runtime-level defaults from Env.\n *\n * Notes:\n * - instrumentation is only a runtime-level default.\n * - Explicit instrumentation in ModuleImpl / ModuleRuntimeOptions has higher priority.\n */\nexport interface StateTransactionRuntimeConfig {\n readonly instrumentation?: StateTransactionInstrumentation\n /**\n * StateTrait derived converge budget (ms):\n * - Exceeding the budget triggers a soft degrade (freeze derived fields, preserve base writes and 0/1 commit semantics).\n * - Default is 200ms (aligned with the 007 spec threshold).\n */\n readonly traitConvergeBudgetMs?: number\n /**\n * Auto-mode decision budget (ms):\n * - Only used during the decision phase when requestedMode=\"auto\".\n * - Exceeding the budget must immediately fall back to full (and record evidence).\n */\n readonly traitConvergeDecisionBudgetMs?: number\n /**\n * StateTrait converge scheduling strategy:\n * - full: full topo execution (current default; safest).\n * - dirty: minimal triggering based on dirtyPaths + deps in the txn window (requires accurate deps).\n */\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n /**\n * 043: Trait converge time-slicing (explicit opt-in). Disabled by default.\n */\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n /**\n * 060: Txn Lanes (priority scheduling for transaction follow-up work). Enabled by default since 062.\n */\n readonly txnLanes?: TxnLanesPatch\n /**\n * Runtime-level per-module overrides (hotfix path):\n * - Only affects converge behavior for the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /**\n * 060: Txn Lanes runtime_module overrides (hotfix / gradual tuning).\n * - Only affects the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionConfigTagImpl extends ServiceMap.Service<\n StateTransactionConfigTagImpl,\n StateTransactionRuntimeConfig\n>()('@logixjs/core/StateTransactionRuntimeConfig') {}\n\nexport const StateTransactionConfigTag = StateTransactionConfigTagImpl\n\nexport type ReadQueryStrictGateRuntimeConfig = ReadQueryStrictGateConfig\n\nclass ReadQueryStrictGateConfigTagImpl extends ServiceMap.Service<\n ReadQueryStrictGateConfigTagImpl,\n ReadQueryStrictGateRuntimeConfig\n>()('@logixjs/core/ReadQueryStrictGateRuntimeConfig') {}\n\nexport const ReadQueryStrictGateConfigTag = ReadQueryStrictGateConfigTagImpl\n\nexport type ReplayMode = 'live' | 'replay'\n\nexport interface ReplayModeConfig {\n readonly mode: ReplayMode\n}\n\nclass ReplayModeConfigTagImpl extends ServiceMap.Service<\n ReplayModeConfigTagImpl,\n ReplayModeConfig\n>()('@logixjs/core/ReplayModeConfig') {}\n\nexport const ReplayModeConfigTag = ReplayModeConfigTagImpl\n\nexport const replayModeLayer = (mode: ReplayMode): Layer.Layer<ReplayModeConfigTagImpl, never, never> =>\n Layer.succeed(ReplayModeConfigTag, { mode })\n\nexport interface StateTransactionTraitConvergeOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n}\n\nexport interface TxnLanesPatch {\n /**\n * enabled: whether Txn Lanes is enabled (default on since 062).\n * - undefined: default enabled (when not explicitly configured)\n * - false: disabled (returns to baseline behavior)\n * - true: enabled (only affects scheduling of follow-up work outside the transaction; transactions remain synchronous)\n */\n readonly enabled?: boolean\n /**\n * overrideMode: runtime temporary override (for debugging/rollback/comparison).\n * - forced_off: forcibly disables Txn Lanes (returns to baseline behavior).\n * - forced_sync: forces fully synchronous execution (ignores non-urgent deferral and time-slicing; used for comparisons).\n *\n * Notes:\n * - Override precedence follows StateTransactionOverrides: provider > runtime_module > runtime_default > builtin.\n * - Overrides must be explainable by evidence (see 060 LaneEvidence reasons).\n */\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n /** non-urgent work loop slice budget (ms). */\n readonly budgetMs?: number\n /** Non-urgent backlog coalescing window (ms). */\n readonly debounceMs?: number\n /** Max lag upper bound (ms): exceeding it triggers an explainable starvation protection (forced catch-up). */\n readonly maxLagMs?: number\n /** Whether to allow coalescing/canceling intermediate non-urgent work (must preserve eventual consistency). */\n readonly allowCoalesce?: boolean\n /**\n * Yield strategy for the non-urgent work loop (progressive enhancement).\n * - baseline: uses only time budget + hard upper bound\n * - inputPending: when supported by browsers, also consults `navigator.scheduling.isInputPending`\n */\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n}\n\nexport interface TraitConvergeTimeSlicingPatch {\n /**\n * enabled:\n * - false/undefined: disabled (default)\n * - true: enables time-slicing (only affects computed/link explicitly marked as deferred)\n */\n readonly enabled?: boolean\n /**\n * debounceMs: coalescing interval (ms) for the deferral window; merges high-frequency inputs into one deferred flush.\n */\n readonly debounceMs?: number\n /**\n * maxLagMs: max lag upper bound (ms); exceeding it triggers an explainable forced flush (starvation protection).\n */\n readonly maxLagMs?: number\n}\n\n/**\n * Provider-scoped StateTransactionOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface StateTransactionOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /** 060: Txn Lanes provider-level overrides (delta overrides). */\n readonly txnLanes?: TxnLanesPatch\n /** 060: Txn Lanes provider_module overrides (by moduleId). */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionOverridesTagImpl extends ServiceMap.Service<\n StateTransactionOverridesTagImpl,\n StateTransactionOverrides\n>()('@logixjs/core/StateTransactionOverrides') {}\n\nexport const StateTransactionOverridesTag = StateTransactionOverridesTagImpl\n\nexport type SchedulingPolicyLimit = number | 'unbounded'\nexport type ConcurrencyLimit = SchedulingPolicyLimit\n\nexport interface SchedulingPolicySurfacePatch {\n readonly concurrencyLimit?: SchedulingPolicyLimit\n readonly losslessBackpressureCapacity?: number\n readonly allowUnbounded?: boolean\n readonly pressureWarningThreshold?: {\n readonly backlogCount?: number\n readonly backlogDurationMs?: number\n }\n readonly warningCooldownMs?: number\n}\n\n/**\n * Runtime-level unified scheduling policy surface:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime merges sources via a resolver (builtin/runtime_module/provider, etc.).\n *\n * Notes:\n * - overridesByModuleId is used for runtime_module hot-switching (hotfix / gradual tuning) and is lower priority than provider overrides.\n */\nexport interface SchedulingPolicySurface extends SchedulingPolicySurfacePatch {\n readonly overridesByModuleId?: Readonly<Record<string, SchedulingPolicySurfacePatch>>\n}\n\nclass SchedulingPolicySurfaceTagImpl extends ServiceMap.Service<\n SchedulingPolicySurfaceTagImpl,\n SchedulingPolicySurface\n>()('@logixjs/core/SchedulingPolicySurface') {}\n\nexport const SchedulingPolicySurfaceTag = SchedulingPolicySurfaceTagImpl\n\n/**\n * Provider-scoped SchedulingPolicySurfaceOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface SchedulingPolicySurfaceOverrides extends SchedulingPolicySurfacePatch {\n readonly overridesByModuleId?: Readonly<Record<string, SchedulingPolicySurfacePatch>>\n}\n\nclass SchedulingPolicySurfaceOverridesTagImpl extends ServiceMap.Service<\n SchedulingPolicySurfaceOverridesTagImpl,\n SchedulingPolicySurfaceOverrides\n>()('@logixjs/core/SchedulingPolicySurfaceOverrides') {}\n\nexport const SchedulingPolicySurfaceOverridesTag = SchedulingPolicySurfaceOverridesTagImpl\n\n/**\n * Legacy aliases:\n * - Keep old names as pure aliases to support migration without behavior drift.\n * - Canonical naming for new code should use SchedulingPolicySurface*.\n */\nexport type ConcurrencyPolicyPatch = SchedulingPolicySurfacePatch\nexport type ConcurrencyPolicy = SchedulingPolicySurface\nexport type ConcurrencyPolicyOverrides = SchedulingPolicySurfaceOverrides\n\nexport const ConcurrencyPolicyTag = SchedulingPolicySurfaceTag\nexport const ConcurrencyPolicyOverridesTag = SchedulingPolicySurfaceOverridesTag\n\n// ---- 073: TickScheduler + RuntimeStore (injectable runtime services) ----\n\nexport interface RuntimeStoreService extends RuntimeStore {}\n\nexport class RuntimeStoreTag extends ServiceMap.Service<RuntimeStoreTag, RuntimeStoreService>()('@logixjs/core/RuntimeStore') {}\n\nexport const runtimeStoreLayer: Layer.Layer<any, never, never> = Layer.effect(\n RuntimeStoreTag,\n Effect.acquireRelease(\n Effect.sync(() => makeRuntimeStore() as RuntimeStoreService),\n (store) => Effect.sync(() => store.dispose()),\n ),\n) as Layer.Layer<any, never, never>\n\nexport const runtimeStoreTestStubLayer = (store: RuntimeStoreService): Layer.Layer<any, never, never> =>\n Layer.succeed(RuntimeStoreTag, store) as Layer.Layer<any, never, never>\n\nexport interface HostSchedulerService extends HostScheduler {}\n\nexport class HostSchedulerTag extends ServiceMap.Service<\n HostSchedulerTag,\n HostSchedulerService\n>()('@logixjs/core/HostScheduler') {}\n\nexport const hostSchedulerLayer: Layer.Layer<any, never, never> = Layer.succeed(\n HostSchedulerTag,\n getGlobalHostScheduler() as HostSchedulerService,\n) as Layer.Layer<any, never, never>\n\nexport const hostSchedulerTestStubLayer = (scheduler: HostSchedulerService): Layer.Layer<any, never, never> =>\n Layer.succeed(HostSchedulerTag, scheduler) as Layer.Layer<any, never, never>\n\nexport interface DeclarativeLinkRuntimeService extends DeclarativeLinkRuntime {}\n\nexport class DeclarativeLinkRuntimeTag extends ServiceMap.Service<\n DeclarativeLinkRuntimeTag,\n DeclarativeLinkRuntimeService\n>()('@logixjs/core/DeclarativeLinkRuntime') {}\n\nexport const declarativeLinkRuntimeLayer: Layer.Layer<any, never, never> = Layer.succeed(\n DeclarativeLinkRuntimeTag,\n makeDeclarativeLinkRuntime() as DeclarativeLinkRuntimeService,\n) as Layer.Layer<any, never, never>\n\nexport const declarativeLinkRuntimeTestStubLayer = (\n runtime: DeclarativeLinkRuntimeService,\n): Layer.Layer<any, never, never> => Layer.succeed(DeclarativeLinkRuntimeTag, runtime) as Layer.Layer<any, never, never>\n\nexport interface TickSchedulerService extends TickScheduler {}\n\nexport class TickSchedulerTag extends ServiceMap.Service<TickSchedulerTag, TickSchedulerService>()('@logixjs/core/TickScheduler') {}\n\nexport const tickSchedulerLayer = (config?: TickSchedulerConfig): Layer.Layer<any, never, never> =>\n Layer.effect(\n TickSchedulerTag,\n Effect.gen(function* () {\n const store = yield* Effect.service(RuntimeStoreTag).pipe(Effect.orDie)\n const declarativeLinkRuntime = yield* Effect.service(DeclarativeLinkRuntimeTag).pipe(Effect.orDie)\n const hostScheduler = yield* Effect.service(HostSchedulerTag).pipe(Effect.orDie)\n return makeTickScheduler({ runtimeStore: store, declarativeLinkRuntime, hostScheduler, config }) as TickSchedulerService\n }),\n ) as Layer.Layer<any, never, never>\n\nexport const tickSchedulerTestStubLayer = (scheduler: TickSchedulerService): Layer.Layer<any, never, never> =>\n Layer.succeed(TickSchedulerTag, scheduler) as Layer.Layer<any, never, never>\n","export type Cancel = () => void\n\nexport type HostScheduler = {\n readonly nowMs: () => number\n readonly scheduleMicrotask: (cb: () => void) => void\n readonly scheduleMacrotask: (cb: () => void) => Cancel\n readonly scheduleAnimationFrame: (cb: () => void) => Cancel\n readonly scheduleTimeout: (ms: number, cb: () => void) => Cancel\n}\n\nconst noopCancel: Cancel = () => {}\n\nconst safeNowMs = (): number => {\n const perf = (globalThis as any).performance as { now?: () => number } | undefined\n if (perf && typeof perf.now === 'function') {\n try {\n const v = perf.now()\n if (typeof v === 'number' && Number.isFinite(v)) return v\n } catch {\n // fallthrough\n }\n }\n\n return Date.now()\n}\n\nconst safeQueueMicrotask = (cb: () => void): void => {\n const qm = (globalThis as any).queueMicrotask as ((run: () => void) => void) | undefined\n if (typeof qm === 'function') {\n try {\n qm(cb)\n return\n } catch {\n // fallthrough\n }\n }\n\n // Promise job fallback (still a microtask boundary).\n try {\n Promise.resolve().then(cb)\n } catch {\n // last resort\n setTimeout(cb, 0)\n }\n}\n\nconst safeSetTimeout = (ms: number, cb: () => void): Cancel => {\n const id = setTimeout(cb, ms)\n return () => {\n try {\n clearTimeout(id)\n } catch {\n // best-effort\n }\n }\n}\n\nconst makeMessageChannelMacrotask = (): ((cb: () => void) => Cancel) | undefined => {\n const MC = (globalThis as any).MessageChannel as { new (): MessageChannel } | undefined\n if (typeof MC !== 'function') return undefined\n\n let channel: MessageChannel\n try {\n channel = new MC()\n } catch {\n return undefined\n }\n\n type Task = { canceled: boolean; cb: () => void }\n const queue: Array<Task> = []\n let scheduled = false\n\n const flush = (): void => {\n scheduled = false\n const tasks = queue.splice(0, queue.length)\n for (const t of tasks) {\n if (t.canceled) continue\n try {\n t.cb()\n } catch {\n // best-effort\n }\n }\n }\n\n try {\n channel.port1.onmessage = flush\n } catch {\n return undefined\n }\n\n const schedule = (cb: () => void): Cancel => {\n const task: Task = { canceled: false, cb }\n queue.push(task)\n if (!scheduled) {\n scheduled = true\n try {\n channel.port2.postMessage(undefined)\n } catch {\n scheduled = false\n // fallback to timeout if postMessage fails\n return safeSetTimeout(0, cb)\n }\n }\n return () => {\n task.canceled = true\n }\n }\n\n return schedule\n}\n\nconst makeSetImmediateMacrotask = (): ((cb: () => void) => Cancel) | undefined => {\n const si = (globalThis as any).setImmediate as ((run: () => void) => any) | undefined\n const ci = (globalThis as any).clearImmediate as ((id: any) => void) | undefined\n if (typeof si !== 'function') return undefined\n\n return (cb) => {\n let id: any\n try {\n id = si(cb)\n } catch {\n return safeSetTimeout(0, cb)\n }\n\n return () => {\n if (typeof ci !== 'function') return\n try {\n ci(id)\n } catch {\n // best-effort\n }\n }\n }\n}\n\nconst makeRaf = (): ((cb: () => void) => Cancel) | undefined => {\n const raf = (globalThis as any).requestAnimationFrame as ((run: () => void) => number) | undefined\n const cancel = (globalThis as any).cancelAnimationFrame as ((id: number) => void) | undefined\n if (typeof raf !== 'function') return undefined\n\n return (cb) => {\n let id: number\n try {\n id = raf(cb)\n } catch {\n return noopCancel\n }\n\n return () => {\n if (typeof cancel !== 'function') return\n try {\n cancel(id)\n } catch {\n // best-effort\n }\n }\n }\n}\n\nexport const makeDefaultHostScheduler = (): HostScheduler => {\n const macrotask =\n makeSetImmediateMacrotask() ??\n makeMessageChannelMacrotask() ??\n ((cb: () => void) => safeSetTimeout(0, cb))\n\n const raf = makeRaf()\n\n return {\n nowMs: safeNowMs,\n scheduleMicrotask: safeQueueMicrotask,\n scheduleMacrotask: macrotask,\n scheduleAnimationFrame: (cb) => raf?.(cb) ?? macrotask(cb),\n scheduleTimeout: safeSetTimeout,\n }\n}\n\nlet globalHostScheduler: HostScheduler | undefined\n\nexport const getGlobalHostScheduler = (): HostScheduler => {\n globalHostScheduler ??= makeDefaultHostScheduler()\n return globalHostScheduler\n}\n\nexport const __unsafeSetGlobalHostSchedulerForTests = (next: HostScheduler | undefined): void => {\n globalHostScheduler = next\n}\n\nexport type DeterministicHostScheduler = HostScheduler & {\n readonly flushMicrotasks: (options?: { readonly max?: number }) => number\n readonly flushOneMacrotask: () => boolean\n readonly flushAll: (options?: { readonly maxTurns?: number }) => { readonly turns: number; readonly ran: number }\n readonly getQueueSize: () => { readonly microtasks: number; readonly macrotasks: number }\n}\n\nexport const makeDeterministicHostScheduler = (): DeterministicHostScheduler => {\n const microtasks: Array<() => void> = []\n const macrotasks: Array<{ canceled: boolean; cb: () => void }> = []\n\n const flushMicrotasks = (options?: { readonly max?: number }): number => {\n const max = options?.max ?? 10_000\n let ran = 0\n while (microtasks.length > 0 && ran < max) {\n const cb = microtasks.shift()!\n ran += 1\n try {\n cb()\n } catch {\n // best-effort\n }\n }\n return ran\n }\n\n const flushOneMacrotask = (): boolean => {\n const t = macrotasks.shift()\n if (!t) return false\n if (t.canceled) return true\n try {\n t.cb()\n } catch {\n // best-effort\n }\n return true\n }\n\n const flushAll = (options?: { readonly maxTurns?: number }): { turns: number; ran: number } => {\n const maxTurns = options?.maxTurns ?? 10_000\n let turns = 0\n let ran = 0\n\n while (turns < maxTurns) {\n const before = microtasks.length + macrotasks.length\n ran += flushMicrotasks()\n if (microtasks.length > 0) {\n turns += 1\n continue\n }\n if (flushOneMacrotask()) {\n turns += 1\n continue\n }\n const after = microtasks.length + macrotasks.length\n if (after === 0 || after === before) break\n turns += 1\n }\n\n return { turns, ran }\n }\n\n return {\n nowMs: safeNowMs,\n scheduleMicrotask: (cb) => {\n microtasks.push(cb)\n },\n scheduleMacrotask: (cb) => {\n const task = { canceled: false, cb }\n macrotasks.push(task)\n return () => {\n task.canceled = true\n }\n },\n scheduleAnimationFrame: (cb) => {\n const task = { canceled: false, cb }\n macrotasks.push(task)\n return () => {\n task.canceled = true\n }\n },\n scheduleTimeout: (_ms, cb) => {\n const task = { canceled: false, cb }\n macrotasks.push(task)\n return () => {\n task.canceled = true\n }\n },\n flushMicrotasks,\n flushOneMacrotask,\n flushAll,\n getQueueSize: () => ({ microtasks: microtasks.length, macrotasks: macrotasks.length }),\n }\n}\n","import type { StateCommitMeta, StateCommitPriority } from './module.js'\n\nexport type ModuleInstanceKey = `${string}::${string}`\nexport type TopicKey = string\n\nexport type TopicKind = 'module' | 'readQuery'\n\nexport type TopicInfo =\n | { readonly kind: 'module'; readonly moduleInstanceKey: ModuleInstanceKey }\n | { readonly kind: 'readQuery'; readonly moduleInstanceKey: ModuleInstanceKey; readonly selectorId: string }\n\nexport const makeModuleInstanceKey = (moduleId: string, instanceId: string): ModuleInstanceKey =>\n `${moduleId}::${instanceId}`\n\nexport const makeReadQueryTopicKey = (moduleInstanceKey: ModuleInstanceKey, selectorId: string): TopicKey =>\n `${moduleInstanceKey}::rq:${selectorId}`\n\nexport const parseTopicKey = (topicKey: string): TopicInfo | undefined => {\n const idx = topicKey.indexOf('::')\n if (idx <= 0) return undefined\n\n const moduleId = topicKey.slice(0, idx)\n const rest = topicKey.slice(idx + 2)\n if (rest.length === 0) return undefined\n\n const idx2 = rest.indexOf('::')\n if (idx2 < 0) {\n return { kind: 'module', moduleInstanceKey: `${moduleId}::${rest}` }\n }\n\n const instanceId = rest.slice(0, idx2)\n const suffix = rest.slice(idx2 + 2)\n if (suffix.startsWith('rq:')) {\n const selectorId = suffix.slice('rq:'.length)\n if (selectorId.length === 0) return undefined\n return {\n kind: 'readQuery',\n moduleInstanceKey: `${moduleId}::${instanceId}`,\n selectorId,\n }\n }\n\n return { kind: 'module', moduleInstanceKey: `${moduleId}::${instanceId}` }\n}\n\nexport interface RuntimeStoreModuleCommit {\n readonly moduleId: string\n readonly instanceId: string\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly state: unknown\n readonly meta: StateCommitMeta\n readonly opSeq?: number\n readonly schedulingPolicy?: {\n readonly configScope: 'builtin' | 'runtime_default' | 'runtime_module' | 'provider'\n readonly concurrencyLimit: number | 'unbounded'\n readonly allowUnbounded: boolean\n readonly losslessBackpressureCapacity: number\n readonly pressureWarningThreshold: {\n readonly backlogCount: number\n readonly backlogDurationMs: number\n }\n readonly warningCooldownMs: number\n readonly resolvedAtTxnSeq: number\n }\n}\n\nexport interface RuntimeStorePendingDrain {\n readonly modules: ReadonlyMap<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly dirtyTopics: ReadonlyMap<TopicKey, StateCommitPriority>\n}\n\nexport interface RuntimeStoreCommitResult {\n readonly changedTopicListeners: ReadonlyArray<() => void>\n}\n\nexport type RuntimeStoreListenerCallback = (listener: () => void) => void\n\ninterface TopicListenersState {\n readonly listeners: Set<() => void>\n snapshot: ReadonlyArray<() => void>\n}\n\nconst EMPTY_LISTENER_SNAPSHOT: ReadonlyArray<() => void> = []\n\nexport interface RuntimeStore {\n // ---- React-facing sync snapshot APIs ----\n readonly getTickSeq: () => number\n readonly getModuleState: (moduleInstanceKey: ModuleInstanceKey) => unknown\n readonly getTopicVersion: (topicKey: TopicKey) => number\n readonly getTopicPriority: (topicKey: TopicKey) => StateCommitPriority\n readonly subscribeTopic: (topicKey: TopicKey, listener: () => void) => () => void\n readonly getTopicSubscriberCount: (topicKey: TopicKey) => number\n readonly getModuleSubscriberCount: (moduleInstanceKey: ModuleInstanceKey) => number\n\n // ---- Runtime integration ----\n readonly registerModuleInstance: (args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly initialState: unknown\n }) => void\n readonly unregisterModuleInstance: (moduleInstanceKey: ModuleInstanceKey) => void\n\n // ---- TickScheduler integration (internal) ----\n readonly commitTick: (args: {\n readonly tickSeq: number\n readonly accepted: RuntimeStorePendingDrain\n readonly onListener?: RuntimeStoreListenerCallback\n }) => RuntimeStoreCommitResult\n\n readonly dispose: () => void\n}\n\nconst NO_CHANGED_TOPIC_LISTENERS: ReadonlyArray<() => void> = []\n\nexport const makeRuntimeStore = (): RuntimeStore => {\n let tickSeq = 0\n\n // ---- Committed snapshot (read by React) ----\n const moduleStates = new Map<ModuleInstanceKey, unknown>()\n const topicVersions = new Map<TopicKey, number>()\n const topicPriorities = new Map<TopicKey, StateCommitPriority>()\n\n // ---- Subscriptions ----\n const listenersByTopic = new Map<TopicKey, TopicListenersState>()\n const subscriberCountByModule = new Map<ModuleInstanceKey, number>()\n\n const getTopicVersion = (topicKey: TopicKey): number => topicVersions.get(topicKey) ?? 0\n const getTopicPriority = (topicKey: TopicKey): StateCommitPriority => topicPriorities.get(topicKey) ?? 'normal'\n\n const commitTopicBump = (topicKey: TopicKey, priority: StateCommitPriority): void => {\n const prev = topicVersions.get(topicKey) ?? 0\n topicVersions.set(topicKey, prev + 1)\n topicPriorities.set(topicKey, priority)\n }\n\n const refreshTopicSnapshot = (state: TopicListenersState): void => {\n state.snapshot = Array.from(state.listeners)\n }\n\n const subscribeTopic = (topicKey: TopicKey, listener: () => void): (() => void) => {\n const info = parseTopicKey(topicKey)\n const existing = listenersByTopic.get(topicKey)\n const state = existing ?? { listeners: new Set<() => void>(), snapshot: EMPTY_LISTENER_SNAPSHOT }\n const alreadyHas = state.listeners.has(listener)\n if (!alreadyHas) {\n state.listeners.add(listener)\n refreshTopicSnapshot(state)\n }\n if (!existing) {\n listenersByTopic.set(topicKey, state)\n }\n\n if (!alreadyHas && info) {\n const prev = subscriberCountByModule.get(info.moduleInstanceKey) ?? 0\n subscriberCountByModule.set(info.moduleInstanceKey, prev + 1)\n }\n\n return () => {\n const currentState = listenersByTopic.get(topicKey)\n if (!currentState) return\n const deleted = currentState.listeners.delete(listener)\n if (deleted && info) {\n const prev = subscriberCountByModule.get(info.moduleInstanceKey) ?? 0\n const next = prev - 1\n if (next <= 0) {\n subscriberCountByModule.delete(info.moduleInstanceKey)\n } else {\n subscriberCountByModule.set(info.moduleInstanceKey, next)\n }\n }\n if (currentState.listeners.size === 0) {\n listenersByTopic.delete(topicKey)\n } else if (deleted) {\n refreshTopicSnapshot(currentState)\n }\n }\n }\n\n const getTopicSubscriberCount = (topicKey: TopicKey): number => listenersByTopic.get(topicKey)?.listeners.size ?? 0\n const getModuleSubscriberCount = (moduleInstanceKey: ModuleInstanceKey): number => subscriberCountByModule.get(moduleInstanceKey) ?? 0\n\n const registerModuleInstance = (args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly initialState: unknown\n }): void => {\n moduleStates.set(args.moduleInstanceKey, args.initialState)\n // Ensure the module topic exists with a stable baseline version/priority.\n if (!topicVersions.has(args.moduleInstanceKey)) {\n topicVersions.set(args.moduleInstanceKey, 0)\n topicPriorities.set(args.moduleInstanceKey, 'normal')\n }\n }\n\n const unregisterModuleInstance = (moduleInstanceKey: ModuleInstanceKey): void => {\n moduleStates.delete(moduleInstanceKey)\n // Keep topic versions by default (helps debugging). Subscribers are expected to detach on module destroy.\n }\n\n const commitTick = (args: {\n readonly tickSeq: number\n readonly accepted: RuntimeStorePendingDrain\n readonly onListener?: RuntimeStoreListenerCallback\n }): RuntimeStoreCommitResult => {\n tickSeq = args.tickSeq\n\n for (const [key, commit] of args.accepted.modules) {\n moduleStates.set(key, commit.state)\n }\n\n if (args.accepted.dirtyTopics.size === 0) {\n return {\n changedTopicListeners: NO_CHANGED_TOPIC_LISTENERS,\n }\n }\n\n if (args.onListener) {\n let firstTopicListeners: ReadonlyArray<() => void> | undefined\n let secondTopicListeners: ReadonlyArray<() => void> | undefined\n let restTopicListeners: Array<ReadonlyArray<() => void>> | undefined\n\n for (const [topicKey, priority] of args.accepted.dirtyTopics) {\n commitTopicBump(topicKey, priority)\n const listeners = listenersByTopic.get(topicKey)?.snapshot ?? EMPTY_LISTENER_SNAPSHOT\n if (listeners.length === 0) {\n continue\n }\n if (!firstTopicListeners) {\n firstTopicListeners = listeners\n continue\n }\n if (!secondTopicListeners) {\n secondTopicListeners = listeners\n continue\n }\n if (!restTopicListeners) {\n restTopicListeners = []\n }\n restTopicListeners.push(listeners)\n }\n\n if (firstTopicListeners) {\n for (const listener of firstTopicListeners) {\n try {\n args.onListener(listener)\n } catch {\n // best-effort: never let listener callback break commit tick\n }\n }\n }\n\n if (secondTopicListeners) {\n for (const listener of secondTopicListeners) {\n try {\n args.onListener(listener)\n } catch {\n // best-effort: never let listener callback break commit tick\n }\n }\n }\n\n if (restTopicListeners) {\n for (const listeners of restTopicListeners) {\n for (const listener of listeners) {\n try {\n args.onListener(listener)\n } catch {\n // best-effort: never let listener callback break commit tick\n }\n }\n }\n }\n\n return {\n changedTopicListeners: NO_CHANGED_TOPIC_LISTENERS,\n }\n }\n\n let singleTopicListeners: ReadonlyArray<() => void> | undefined\n let flattenedTopicListeners: Array<() => void> | undefined\n\n for (const [topicKey, priority] of args.accepted.dirtyTopics) {\n commitTopicBump(topicKey, priority)\n const listeners = listenersByTopic.get(topicKey)?.snapshot ?? EMPTY_LISTENER_SNAPSHOT\n if (listeners.length === 0) {\n continue\n }\n if (flattenedTopicListeners) {\n for (const listener of listeners) {\n flattenedTopicListeners.push(listener)\n }\n continue\n }\n if (!singleTopicListeners) {\n singleTopicListeners = listeners\n continue\n }\n flattenedTopicListeners = Array.from(singleTopicListeners)\n for (const listener of listeners) {\n flattenedTopicListeners.push(listener)\n }\n }\n\n return {\n changedTopicListeners: flattenedTopicListeners ?? singleTopicListeners ?? NO_CHANGED_TOPIC_LISTENERS,\n }\n }\n\n const getModuleState = (moduleInstanceKey: ModuleInstanceKey): unknown => moduleStates.get(moduleInstanceKey)\n\n const dispose = (): void => {\n moduleStates.clear()\n topicVersions.clear()\n topicPriorities.clear()\n listenersByTopic.clear()\n subscriberCountByModule.clear()\n }\n\n return {\n getTickSeq: () => tickSeq,\n getModuleState,\n getTopicVersion,\n getTopicPriority,\n subscribeTopic,\n getTopicSubscriberCount,\n getModuleSubscriberCount,\n registerModuleInstance,\n unregisterModuleInstance,\n commitTick,\n dispose,\n }\n}\n","import { Effect } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport type { DeclarativeLinkRuntime } from './DeclarativeLinkRuntime.js'\nimport type { HostScheduler } from './HostScheduler.js'\nimport { makeJobQueue, type JobQueue } from './JobQueue.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport {\n makeReadQueryTopicKey,\n type ModuleInstanceKey,\n type RuntimeStore,\n type RuntimeStoreModuleCommit,\n type RuntimeStorePendingDrain,\n} from './RuntimeStore.js'\nimport type { StateCommitPriority } from './module.js'\n\nexport type TickLane = 'urgent' | 'nonUrgent'\n\nexport type TickDegradeReason = 'budget_steps' | 'cycle_detected' | 'unknown'\n\nexport interface TickSchedulerConfig {\n /**\n * Fixpoint step cap:\n * - Steps count \"work acceptance units\" within a single tick, not time.\n * - Exceeding the budget triggers a soft degrade (stable=false), deferring nonUrgent backlog to the next tick.\n */\n readonly maxSteps?: number\n /**\n * Urgent safety cap:\n * - Even urgent work may be cut when the system appears to be in a cycle (avoid freezing).\n */\n readonly urgentStepCap?: number\n /**\n * Drain-round cap:\n * - Bounds the number of drain rounds while capturing concurrent commits before committing the tick snapshot.\n * - Exceeding the cap is treated as a cycle (stable=false, degradeReason=cycle_detected).\n */\n readonly maxDrainRounds?: number\n /**\n * Microtask starvation protection threshold:\n * - Counts consecutive ticks scheduled on microtask boundaries without yielding to host (best-effort).\n * - Exceeding the limit forces the next tick to start on a macrotask boundary.\n */\n readonly microtaskChainDepthLimit?: number\n /**\n * Optional degraded-tick telemetry (opt-in, sampled):\n * - Runs even when diagnostics=off (Devtools disabled).\n * - Intended for production health signals (frequency of stable=false / forced yield).\n */\n readonly telemetry?: TickSchedulerTelemetryConfig\n}\n\nexport interface TickSchedulerTelemetryEvent {\n readonly tickSeq: number\n readonly stable: boolean\n readonly degradeReason?: TickDegradeReason\n readonly forcedMacrotask?: boolean\n readonly scheduleReason?: TickScheduleReason\n readonly microtaskChainDepth?: number\n readonly deferredWorkCount?: number\n}\n\nexport interface TickSchedulerTelemetryConfig {\n /** Sample rate in [0, 1]. Default: 0 (disabled). */\n readonly sampleRate?: number\n /** Called for ticks that are degraded (stable=false) and/or started on a forced macrotask boundary. */\n readonly onTickDegraded?: (event: TickSchedulerTelemetryEvent) => void\n}\n\nexport interface TickScheduler {\n readonly getTickSeq: () => number\n readonly onModuleCommit: (commit: RuntimeStoreModuleCommit) => Effect.Effect<void, never, never>\n readonly onSelectorChanged: (args: {\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly selectorId: string\n readonly priority: StateCommitPriority\n }) => void\n readonly flushNow: Effect.Effect<void, never, never>\n}\n\n// ---- Runtime.batch (sync boundary) ----\n\ntype BatchWaiter = { readonly resolve: () => void }\n\nlet batchDepth = 0\nconst batchWaiters = new Set<BatchWaiter>()\n\nexport const enterRuntimeBatch = (): void => {\n batchDepth += 1\n}\n\nexport const exitRuntimeBatch = (): void => {\n batchDepth = Math.max(0, batchDepth - 1)\n if (batchDepth !== 0) return\n const waiters = Array.from(batchWaiters)\n batchWaiters.clear()\n for (const w of waiters) {\n try {\n w.resolve()\n } catch {\n // best-effort\n }\n }\n}\n\nconst waitForBatchEndIfNeeded = (): Effect.Effect<void, never, never> =>\n batchDepth === 0\n ? Effect.void\n : Effect.promise<void>((signal) =>\n new Promise<void>((resolve) => {\n\n let done = false\n const cleanup = () => {\n if (done) return\n done = true\n batchWaiters.delete(waiter)\n try {\n signal.removeEventListener('abort', onAbort)\n } catch {\n // best-effort\n }\n }\n\n const onAbort = () => {\n cleanup()\n }\n\n const waiter: BatchWaiter = {\n resolve: () => {\n cleanup()\n resolve()\n },\n }\n\n batchWaiters.add(waiter)\n try {\n signal.addEventListener('abort', onAbort, { once: true })\n } catch {\n // best-effort\n }\n }),\n )\n\n// ---- TickScheduler implementation ----\n\ntype TriggerKind = 'externalStore' | 'dispatch' | 'timer' | 'unknown'\n\ntype TickScheduleStartedAs = 'microtask' | 'macrotask' | 'batch' | 'unknown'\ntype TickScheduleReason = 'budget' | 'cycle_detected' | 'microtask_starvation' | 'unknown'\n\ntype TickSchedule = {\n readonly startedAs?: TickScheduleStartedAs\n readonly microtaskChainDepth?: number\n readonly forcedMacrotask?: boolean\n readonly reason?: TickScheduleReason\n}\n\ntype SchedulingDegradeState = {\n readonly tickSeq: number\n readonly reason: TickDegradeReason\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly txnId: string\n readonly opSeq: number\n readonly configScope: 'builtin' | 'runtime_default' | 'runtime_module' | 'provider'\n readonly limit: number | 'unbounded'\n readonly backlogCount: number\n}\n\ntype SchedulingAnchor = {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly txnId: string\n readonly opSeq: number\n}\n\nconst toSchedulingAnchor = (commit: RuntimeStoreModuleCommit | undefined): SchedulingAnchor | undefined => {\n if (!commit) return undefined\n if (typeof commit.opSeq !== 'number') return undefined\n return {\n moduleId: commit.moduleId,\n instanceId: commit.instanceId,\n txnSeq: commit.meta.txnSeq,\n txnId: commit.meta.txnId,\n opSeq: commit.opSeq,\n }\n}\n\nconst toSchedulingResolvedTxnSeq = (commit: RuntimeStoreModuleCommit): number =>\n commit.schedulingPolicy?.resolvedAtTxnSeq ?? commit.meta.txnSeq\n\nconst toSchedulingResolvedOpSeq = (commit: RuntimeStoreModuleCommit): number =>\n typeof commit.opSeq === 'number' && Number.isFinite(commit.opSeq) ? commit.opSeq : -1\n\nconst pickNewerSchedulingCommit = (\n current: RuntimeStoreModuleCommit | undefined,\n candidate: RuntimeStoreModuleCommit,\n): RuntimeStoreModuleCommit => {\n if (!current) return candidate\n const currentTxnSeq = toSchedulingResolvedTxnSeq(current)\n const candidateTxnSeq = toSchedulingResolvedTxnSeq(candidate)\n if (candidateTxnSeq > currentTxnSeq) return candidate\n if (candidateTxnSeq < currentTxnSeq) return current\n return toSchedulingResolvedOpSeq(candidate) > toSchedulingResolvedOpSeq(current) ? candidate : current\n}\n\nconst clampSampleRate = (sampleRate: number | undefined): number => {\n if (typeof sampleRate !== 'number' || !Number.isFinite(sampleRate)) return 0\n if (sampleRate <= 0) return 0\n if (sampleRate >= 1) return 1\n return sampleRate\n}\n\nconst shouldSampleTick = (tickSeq: number, sampleRate: number): boolean => {\n if (sampleRate <= 0) return false\n if (sampleRate >= 1) return true\n // Deterministic sampling: stable across runs, avoids Math.random() and keeps overhead minimal.\n const x = tickSeq >>> 0\n const h = Math.imul(x ^ 0x9e3779b9, 0x85ebca6b) >>> 0\n return h / 0xffffffff < sampleRate\n}\n\nconst topicKeyResolutionCacheLimit = 1024\n\nconst toTriggerKind = (originKind: string | undefined): TriggerKind => {\n if (originKind === 'action') return 'dispatch'\n if (originKind === 'trait-external-store') return 'externalStore'\n if (originKind?.includes('timer')) return 'timer'\n return 'unknown'\n}\n\nconst toLane = (priority: StateCommitPriority): TickLane => (priority === 'low' ? 'nonUrgent' : 'urgent')\n\nconst maxPriority = (a: StateCommitPriority, b: StateCommitPriority): StateCommitPriority =>\n a === 'normal' || b === 'normal' ? 'normal' : 'low'\n\ntype MutablePendingDrain = {\n readonly modules: Map<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly dirtyTopics: Map<string, StateCommitPriority>\n}\n\nconst mergeDrainInPlace = (base: MutablePendingDrain, next: RuntimeStorePendingDrain): void => {\n for (const [k, commit] of next.modules) {\n const prev = base.modules.get(k)\n if (!prev) {\n base.modules.set(k, commit)\n } else {\n const mergedPriority = maxPriority(prev.meta.priority, commit.meta.priority)\n if (mergedPriority === commit.meta.priority) {\n base.modules.set(k, commit)\n } else {\n base.modules.set(k, {\n ...commit,\n meta: {\n ...commit.meta,\n priority: mergedPriority,\n },\n })\n }\n }\n }\n\n for (const [k, p] of next.dirtyTopics) {\n const prev = base.dirtyTopics.get(k)\n if (!prev) {\n base.dirtyTopics.set(k, p)\n continue\n }\n const mergedPriority = maxPriority(prev, p)\n if (mergedPriority !== prev) {\n base.dirtyTopics.set(k, mergedPriority)\n }\n }\n}\n\nconst emptyDrain = (): MutablePendingDrain => ({ modules: new Map(), dirtyTopics: new Map() })\n\ntype BudgetPartitionResult = {\n readonly acceptedModules: Map<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly deferredModules: Map<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly urgentCapExceeded: boolean\n readonly deferredNonUrgentCount: number\n}\n\nconst partitionModulesForBudget = (args: {\n readonly modules: ReadonlyMap<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly maxSteps: number\n readonly urgentStepCap: number\n}): BudgetPartitionResult => {\n let urgentCount = 0\n for (const commit of args.modules.values()) {\n if (toLane(commit.meta.priority) === 'urgent') {\n urgentCount += 1\n }\n }\n\n const urgentCap = Math.max(0, args.urgentStepCap)\n const urgentCapExceeded = urgentCount > urgentCap\n const nonUrgentBudget = Math.max(0, args.maxSteps)\n\n let acceptedUrgentCount = 0\n let deferredNonUrgentCount = 0\n\n const acceptedModules = new Map<ModuleInstanceKey, RuntimeStoreModuleCommit>()\n const deferredModules = new Map<ModuleInstanceKey, RuntimeStoreModuleCommit>()\n\n for (const commit of args.modules.values()) {\n if (toLane(commit.meta.priority) !== 'urgent') continue\n if (urgentCapExceeded && acceptedUrgentCount >= urgentCap) {\n deferredModules.set(commit.moduleInstanceKey, commit)\n continue\n }\n acceptedModules.set(commit.moduleInstanceKey, commit)\n acceptedUrgentCount += 1\n }\n\n if (urgentCapExceeded) {\n for (const commit of args.modules.values()) {\n if (toLane(commit.meta.priority) === 'urgent') continue\n deferredModules.set(commit.moduleInstanceKey, commit)\n deferredNonUrgentCount += 1\n }\n return {\n acceptedModules,\n deferredModules,\n urgentCapExceeded,\n deferredNonUrgentCount,\n }\n }\n\n let acceptedNonUrgentCount = 0\n for (const commit of args.modules.values()) {\n if (toLane(commit.meta.priority) === 'urgent') continue\n if (acceptedNonUrgentCount >= nonUrgentBudget) {\n deferredModules.set(commit.moduleInstanceKey, commit)\n deferredNonUrgentCount += 1\n continue\n }\n acceptedModules.set(commit.moduleInstanceKey, commit)\n acceptedNonUrgentCount += 1\n }\n\n return {\n acceptedModules,\n deferredModules,\n urgentCapExceeded,\n deferredNonUrgentCount,\n }\n}\n\nexport const makeTickScheduler = (args: {\n readonly runtimeStore: RuntimeStore\n readonly queue?: JobQueue\n readonly hostScheduler: HostScheduler\n readonly config?: TickSchedulerConfig\n readonly declarativeLinkRuntime?: DeclarativeLinkRuntime\n}): TickScheduler => {\n const store = args.runtimeStore\n const hostScheduler = args.hostScheduler\n const declarativeLinks = args.declarativeLinkRuntime\n const queue = args.queue ?? makeJobQueue()\n\n const config: Required<Pick<TickSchedulerConfig, 'maxSteps' | 'urgentStepCap' | 'maxDrainRounds' | 'microtaskChainDepthLimit'>> = {\n maxSteps: args.config?.maxSteps ?? 64,\n urgentStepCap: args.config?.urgentStepCap ?? 512,\n maxDrainRounds: args.config?.maxDrainRounds ?? 8,\n microtaskChainDepthLimit: args.config?.microtaskChainDepthLimit ?? 32,\n }\n const telemetry = args.config?.telemetry\n const telemetrySampleRate = clampSampleRate(telemetry?.sampleRate)\n\n let tickSeq = 0\n let scheduled = false\n let microtaskChainDepth = 0\n let nextForcedReason: TickScheduleReason | undefined\n let lastSchedulingDegrade: SchedulingDegradeState | undefined\n\n let coalescedModules = 0\n let coalescedTopics = 0\n const topicKeyToModuleInstanceKeyCache = new Map<string, ModuleInstanceKey | null>()\n\n const rememberTopicKeyResolution = (topicKey: string, moduleInstanceKey: ModuleInstanceKey | undefined): ModuleInstanceKey | undefined => {\n if (topicKeyToModuleInstanceKeyCache.has(topicKey)) {\n topicKeyToModuleInstanceKeyCache.delete(topicKey)\n } else if (topicKeyToModuleInstanceKeyCache.size >= topicKeyResolutionCacheLimit) {\n const oldestKey = topicKeyToModuleInstanceKeyCache.keys().next().value\n if (oldestKey !== undefined) {\n topicKeyToModuleInstanceKeyCache.delete(oldestKey)\n }\n }\n topicKeyToModuleInstanceKeyCache.set(topicKey, moduleInstanceKey ?? null)\n return moduleInstanceKey\n }\n\n const yieldMicrotask = Effect.promise<void>(() =>\n new Promise<void>((resolve) => {\n hostScheduler.scheduleMicrotask(resolve)\n }),\n )\n const yieldMacrotask = Effect.promise<void>((signal) =>\n new Promise<void>((resolve) => {\n const cancel = hostScheduler.scheduleMacrotask(resolve)\n try {\n signal.addEventListener(\n 'abort',\n () => {\n cancel()\n },\n { once: true },\n )\n } catch {\n // best-effort\n }\n }),\n )\n\n const scheduleTick = (): Effect.Effect<void, never, never> =>\n Effect.gen(function* () {\n if (scheduled) return\n scheduled = true\n\n const waitedForBatch = batchDepth > 0\n\n const forcedReason = nextForcedReason\n nextForcedReason = undefined\n\n const shouldYieldForStarvation =\n forcedReason == null && microtaskChainDepth >= Math.max(1, config.microtaskChainDepthLimit)\n\n const reason: TickScheduleReason | undefined = forcedReason ?? (shouldYieldForStarvation ? 'microtask_starvation' : undefined)\n const boundary: 'microtask' | 'macrotask' = reason ? 'macrotask' : 'microtask'\n const startedAs: TickScheduleStartedAs = waitedForBatch ? 'batch' : boundary\n const depthAtSchedule = microtaskChainDepth\n\n yield* Effect.provideService(Effect.gen(function* () {\n try {\n yield* waitForBatchEndIfNeeded()\n if (boundary === 'microtask') {\n // Always yield at least one microtask tick boundary before flushing:\n // - Keeps tick→notify semantics stable (async flush window) even under Runtime.batch.\n // - Avoids \"denominatorZero\" artifacts in perf budgets when dispatch is synchronous (mr.actions.*).\n if (waitedForBatch) {\n microtaskChainDepth = 0\n }\n yield* yieldMicrotask\n if (!waitedForBatch) {\n microtaskChainDepth += 1\n }\n } else {\n yield* yieldMacrotask\n microtaskChainDepth = 0\n }\n \n const schedule: TickSchedule = {\n startedAs,\n microtaskChainDepth: boundary === 'macrotask' ? depthAtSchedule : microtaskChainDepth,\n ...(boundary === 'macrotask' ? { forcedMacrotask: true, reason: reason ?? 'unknown' } : {}),\n }\n \n const outcome = yield* flushTick(schedule)\n if (!outcome.stable) {\n nextForcedReason =\n outcome.degradeReason === 'budget_steps'\n ? 'budget'\n : outcome.degradeReason === 'cycle_detected'\n ? 'cycle_detected'\n : 'unknown'\n }\n } finally {\n scheduled = false\n // If something was re-queued or arrived after commit, schedule the next tick (best-effort).\n if (queue.hasPending()) {\n yield* scheduleTick()\n } else {\n // Reset chain depth when the system becomes idle (avoid forcing a macrotask on the next unrelated tick).\n microtaskChainDepth = 0\n }\n }\n }), TaskRunner.inSyncTransactionFiber, false).pipe(Effect.forkDetach({ startImmediately: true }))\n })\n\n const flushTick = (schedule: TickSchedule): Effect.Effect<{ stable: boolean; degradeReason?: TickDegradeReason }, never, never> =>\n Effect.gen(function* () {\n if (!queue.hasPending()) {\n return { stable: true }\n }\n\n tickSeq += 1\n const currentTickSeq = tickSeq\n\n const diagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n const traceMode = yield* Effect.service(Debug.currentTraceMode).pipe(Effect.orDie)\n const shouldEmitTrace = traceMode === 'on' && diagnosticsLevel !== 'off'\n const shouldEmitSchedulingDiagnostics = diagnosticsLevel !== 'off'\n\n const captured: {\n drainRounds: number\n stable: boolean\n degradeReason?: TickDegradeReason\n deferred?: RuntimeStorePendingDrain\n accepted: MutablePendingDrain\n } = {\n drainRounds: 0,\n stable: true,\n accepted: emptyDrain(),\n }\n\n // Fixpoint capture: drain -> apply declarative links -> drain (bounded by maxDrainRounds).\n while (captured.drainRounds < config.maxDrainRounds) {\n const drained = queue.drain()\n if (!drained) break\n captured.drainRounds += 1\n mergeDrainInPlace(captured.accepted, drained)\n\n if (declarativeLinks && drained.modules.size > 0) {\n const changedModuleInstanceKeys = Array.from(drained.modules.keys())\n yield* declarativeLinks.applyForSources({\n tickSeq: currentTickSeq,\n acceptedModules: captured.accepted.modules,\n changedModuleInstanceKeys,\n })\n }\n }\n\n if (queue.hasPending()) {\n captured.stable = false\n captured.degradeReason = 'cycle_detected'\n }\n\n // Budget enforcement (defer nonUrgent only; urgent may be cut only in cycle safety-break).\n //\n // Perf: fast-path the common case where a tick is far under both budgets:\n // - Avoid allocating new Maps in partitionModulesForBudget.\n // - Avoid re-walking dirtyTopics to split accepted/deferred topics.\n let acceptedModules: Map<ModuleInstanceKey, RuntimeStoreModuleCommit> = captured.accepted.modules\n let deferredModules: Map<ModuleInstanceKey, RuntimeStoreModuleCommit> | undefined = undefined\n let urgentCapExceeded = false\n let deferredNonUrgentCount = 0\n\n if (!(captured.accepted.modules.size <= config.maxSteps && captured.accepted.modules.size <= config.urgentStepCap)) {\n const partitioned = partitionModulesForBudget({\n modules: captured.accepted.modules,\n maxSteps: config.maxSteps,\n urgentStepCap: config.urgentStepCap,\n })\n acceptedModules = partitioned.acceptedModules\n deferredModules = partitioned.deferredModules\n urgentCapExceeded = partitioned.urgentCapExceeded\n deferredNonUrgentCount = partitioned.deferredNonUrgentCount\n }\n\n if (urgentCapExceeded) {\n captured.stable = false\n captured.degradeReason = 'cycle_detected'\n } else if (deferredNonUrgentCount > 0) {\n captured.stable = false\n captured.degradeReason = captured.degradeReason ?? 'budget_steps'\n }\n\n const canAcceptAllTopics = deferredModules == null || deferredModules.size === 0\n\n const acceptedDrain: RuntimeStorePendingDrain = (() => {\n if (canAcceptAllTopics) {\n // Even when we accept all module topics, we must ignore non-parsable topic keys.\n // Otherwise, arbitrary strings would become \"real\" topics in RuntimeStore and create silent drift.\n if (captured.accepted.dirtyTopics.size === 0) {\n return captured.accepted as unknown as RuntimeStorePendingDrain\n }\n\n let hasNonParsable = false\n for (const topicKey of captured.accepted.dirtyTopics.keys()) {\n if (!storeTopicToModuleInstanceKey(topicKey)) {\n hasNonParsable = true\n break\n }\n }\n\n if (!hasNonParsable) {\n return captured.accepted as unknown as RuntimeStorePendingDrain\n }\n\n const acceptedTopics = new Map<string, StateCommitPriority>()\n for (const [topicKey, priority] of captured.accepted.dirtyTopics) {\n const moduleInstanceKey = storeTopicToModuleInstanceKey(topicKey)\n if (!moduleInstanceKey) continue\n acceptedTopics.set(topicKey, priority)\n }\n\n return {\n modules: acceptedModules,\n dirtyTopics: acceptedTopics,\n } satisfies RuntimeStorePendingDrain\n }\n\n const acceptedTopics = new Map<string, StateCommitPriority>()\n const deferredTopics = new Map<string, StateCommitPriority>()\n\n for (const [topicKey, priority] of captured.accepted.dirtyTopics) {\n const moduleInstanceKey = storeTopicToModuleInstanceKey(topicKey)\n if (!moduleInstanceKey) continue\n if (acceptedModules.has(moduleInstanceKey)) {\n acceptedTopics.set(topicKey, priority)\n } else if (deferredModules && deferredModules.has(moduleInstanceKey)) {\n deferredTopics.set(topicKey, priority)\n } else {\n // Conservative default: treat unknown topics as accepted.\n acceptedTopics.set(topicKey, priority)\n }\n }\n\n return {\n modules: acceptedModules,\n dirtyTopics: acceptedTopics,\n } satisfies RuntimeStorePendingDrain\n })()\n\n const deferredDrain: RuntimeStorePendingDrain | undefined =\n canAcceptAllTopics || !deferredModules\n ? undefined\n : deferredModules.size > 0\n ? {\n modules: deferredModules,\n dirtyTopics: (() => {\n const deferredTopics = new Map<string, StateCommitPriority>()\n for (const [topicKey, priority] of captured.accepted.dirtyTopics) {\n const moduleInstanceKey = storeTopicToModuleInstanceKey(topicKey)\n if (!moduleInstanceKey) continue\n if (deferredModules.has(moduleInstanceKey)) {\n deferredTopics.set(topicKey, priority)\n }\n }\n return deferredTopics\n })(),\n }\n : undefined\n\n captured.deferred = deferredDrain\n\n const anchorCommitForScheduling = (() => {\n let selected: RuntimeStoreModuleCommit | undefined\n for (const commit of acceptedModules.values()) {\n selected = pickNewerSchedulingCommit(selected, commit)\n }\n if (deferredDrain) {\n for (const commit of deferredDrain.modules.values()) {\n selected = pickNewerSchedulingCommit(selected, commit)\n }\n }\n return selected\n })()\n\n const schedulingAnchor = toSchedulingAnchor(anchorCommitForScheduling)\n const schedulingPolicy = anchorCommitForScheduling?.schedulingPolicy\n const schedulingConfigScope = schedulingPolicy?.configScope ?? 'builtin'\n const schedulingLimit = schedulingPolicy?.concurrencyLimit ?? 16\n const schedulingThreshold = schedulingPolicy?.pressureWarningThreshold ?? {\n backlogCount: 1000,\n backlogDurationMs: 5000,\n }\n const schedulingCooldownMs = schedulingPolicy?.warningCooldownMs ?? 30_000\n const backlogCount = deferredDrain ? deferredDrain.modules.size + deferredDrain.dirtyTopics.size : 0\n\n if (!captured.stable && shouldEmitSchedulingDiagnostics && schedulingAnchor && !lastSchedulingDegrade) {\n const reason = captured.degradeReason ?? 'unknown'\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: schedulingAnchor.moduleId,\n instanceId: schedulingAnchor.instanceId,\n txnSeq: schedulingAnchor.txnSeq,\n txnId: schedulingAnchor.txnId,\n opSeq: schedulingAnchor.opSeq,\n code: 'scheduling::degrade',\n severity: 'warning',\n message: 'Scheduling degraded: tick execution deferred part of the backlog.',\n hint:\n 'Inspect reason/backlog and align queue/tick/concurrency knobs through the same scheduling policy surface.',\n kind: 'scheduling:degrade',\n trigger: {\n kind: 'tickScheduler',\n name: 'flushTick',\n details: {\n eventKind: 'degrade',\n reason,\n tickSeq: currentTickSeq,\n backlogCount,\n configScope: schedulingConfigScope,\n limit: schedulingLimit,\n threshold: schedulingThreshold,\n cooldownMs: schedulingCooldownMs,\n schedule: {\n startedAs: schedule.startedAs ?? 'unknown',\n forcedMacrotask: schedule.forcedMacrotask === true,\n reason: schedule.reason ?? 'unknown',\n microtaskChainDepth: schedule.microtaskChainDepth ?? 0,\n },\n },\n },\n })\n\n lastSchedulingDegrade = {\n tickSeq: currentTickSeq,\n reason,\n moduleId: schedulingAnchor.moduleId,\n instanceId: schedulingAnchor.instanceId,\n txnSeq: schedulingAnchor.txnSeq,\n txnId: schedulingAnchor.txnId,\n opSeq: schedulingAnchor.opSeq,\n configScope: schedulingConfigScope,\n limit: schedulingLimit,\n backlogCount,\n }\n } else if (captured.stable && lastSchedulingDegrade) {\n const previous = lastSchedulingDegrade\n if (shouldEmitSchedulingDiagnostics) {\n const recoverAnchor = schedulingAnchor ?? previous\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: recoverAnchor.moduleId,\n instanceId: recoverAnchor.instanceId,\n txnSeq: recoverAnchor.txnSeq,\n txnId: recoverAnchor.txnId,\n opSeq: recoverAnchor.opSeq,\n code: 'scheduling::recover',\n severity: 'info',\n message: 'Scheduling recovered: backlog/degrade condition cleared.',\n hint: 'No immediate action needed unless degrade/recover oscillates frequently.',\n kind: 'scheduling:recover',\n trigger: {\n kind: 'tickScheduler',\n name: 'flushTick',\n details: {\n eventKind: 'recover',\n tickSeq: currentTickSeq,\n fromTickSeq: previous.tickSeq,\n fromReason: previous.reason,\n previousBacklogCount: previous.backlogCount,\n configScope: previous.configScope,\n limit: previous.limit,\n schedule: {\n startedAs: schedule.startedAs ?? 'unknown',\n forcedMacrotask: schedule.forcedMacrotask === true,\n reason: schedule.reason ?? 'unknown',\n microtaskChainDepth: schedule.microtaskChainDepth ?? 0,\n },\n },\n },\n })\n }\n lastSchedulingDegrade = undefined\n }\n\n let startedAtMs: number | undefined\n let triggerSummary: any | undefined\n let anchor: any | undefined\n let backlog: any | undefined\n let result: any | undefined\n\n if (shouldEmitTrace) {\n startedAtMs = Date.now()\n\n let triggerTotal = 0\n let triggerPrimary: any = undefined\n let triggerAnchor: RuntimeStoreModuleCommit | undefined\n const triggerKindsOrder: TriggerKind[] = []\n let externalStoreCount = 0\n let dispatchCount = 0\n let timerCount = 0\n let unknownCount = 0\n\n for (const commit of captured.accepted.modules.values()) {\n if (!triggerAnchor) {\n triggerAnchor = commit\n }\n triggerTotal += 1\n\n const kind = toTriggerKind(commit.meta.originKind)\n if (!triggerPrimary) {\n triggerPrimary = {\n kind,\n moduleId: commit.moduleId,\n instanceId: commit.instanceId,\n fieldPath: kind === 'externalStore' ? commit.meta.originName : undefined,\n actionTag: kind === 'dispatch' ? commit.meta.originName : undefined,\n }\n }\n\n switch (kind) {\n case 'externalStore': {\n if (externalStoreCount === 0) triggerKindsOrder.push(kind)\n externalStoreCount += 1\n break\n }\n case 'dispatch': {\n if (dispatchCount === 0) triggerKindsOrder.push(kind)\n dispatchCount += 1\n break\n }\n case 'timer': {\n if (timerCount === 0) triggerKindsOrder.push(kind)\n timerCount += 1\n break\n }\n default: {\n if (unknownCount === 0) triggerKindsOrder.push(kind)\n unknownCount += 1\n break\n }\n }\n }\n\n const kinds: Array<{ kind: TriggerKind; count: number }> = []\n for (const kind of triggerKindsOrder) {\n switch (kind) {\n case 'externalStore':\n kinds.push({ kind, count: externalStoreCount })\n break\n case 'dispatch':\n kinds.push({ kind, count: dispatchCount })\n break\n case 'timer':\n kinds.push({ kind, count: timerCount })\n break\n default:\n kinds.push({ kind, count: unknownCount })\n break\n }\n }\n\n triggerSummary = {\n total: triggerTotal,\n kinds,\n primary: triggerPrimary,\n coalescedCount: {\n modules: coalescedModules,\n topics: coalescedTopics,\n },\n }\n\n if (triggerAnchor) {\n anchor = {\n moduleId: triggerAnchor.moduleId,\n instanceId: triggerAnchor.instanceId,\n txnSeq: triggerAnchor.meta.txnSeq,\n txnId: triggerAnchor.meta.txnId,\n ...(typeof triggerAnchor.opSeq === 'number' ? { opSeq: triggerAnchor.opSeq } : null),\n }\n }\n\n const deferredWork = captured.deferred\n if (deferredWork) {\n const pendingDeferredWork = deferredWork.modules.size + deferredWork.dirtyTopics.size\n let pendingExternalInputs = 0\n let firstDeferred: RuntimeStoreModuleCommit | undefined\n let firstExternalStoreDeferred: RuntimeStoreModuleCommit | undefined\n\n for (const deferred of deferredWork.modules.values()) {\n if (!firstDeferred) {\n firstDeferred = deferred\n }\n const kind = toTriggerKind(deferred.meta.originKind)\n if (kind === 'externalStore') {\n pendingExternalInputs += 1\n if (!firstExternalStoreDeferred) {\n firstExternalStoreDeferred = deferred\n }\n }\n }\n\n const primaryDeferred = firstExternalStoreDeferred ?? firstDeferred\n let deferredPrimary: any = undefined\n if (primaryDeferred) {\n const kind = toTriggerKind(primaryDeferred.meta.originKind)\n const isExternalStore = kind === 'externalStore'\n deferredPrimary = {\n kind: isExternalStore ? ('externalStore' as const) : ('unknown' as const),\n moduleId: primaryDeferred.moduleId,\n instanceId: primaryDeferred.instanceId,\n fieldPath: isExternalStore ? primaryDeferred.meta.originName : undefined,\n storeId: undefined,\n }\n }\n\n backlog = {\n pendingExternalInputs,\n pendingDeferredWork,\n deferredPrimary,\n }\n }\n\n result = {\n stable: captured.stable,\n ...(captured.stable ? null : { degradeReason: captured.degradeReason ?? 'unknown' }),\n } as const\n }\n\n if (shouldEmitTrace && schedule.forcedMacrotask && schedule.reason === 'microtask_starvation') {\n yield* Debug.record({\n type: 'warn:microtask-starvation',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n tickSeq: currentTickSeq,\n microtaskChainDepth: schedule.microtaskChainDepth,\n })\n }\n\n if (shouldEmitTrace) {\n yield* Debug.record({\n type: 'trace:tick',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n data: {\n tickSeq: currentTickSeq,\n phase: 'start',\n timestampMs: startedAtMs!,\n schedule,\n triggerSummary,\n anchors: anchor,\n budget: {\n maxSteps: config.maxSteps,\n elapsedMs: 0,\n steps: 0,\n txnCount: acceptedModules.size,\n },\n },\n })\n }\n\n if (!captured.stable && shouldEmitTrace) {\n yield* Debug.record({\n type: 'trace:tick',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n data: {\n tickSeq: currentTickSeq,\n phase: 'budgetExceeded',\n timestampMs: Date.now(),\n schedule,\n triggerSummary,\n anchors: anchor,\n budget: {\n maxSteps: config.maxSteps,\n elapsedMs: Math.max(0, Date.now() - startedAtMs!),\n steps: config.maxSteps,\n txnCount: acceptedModules.size,\n },\n backlog,\n result,\n },\n })\n }\n\n // Requeue deferred backlog before committing the tick, so the next tick can pick it up.\n if (deferredDrain) {\n queue.requeue(deferredDrain)\n }\n\n store.commitTick({\n tickSeq: currentTickSeq,\n accepted: acceptedDrain,\n onListener: (listener) => listener(),\n })\n\n if (!captured.stable && shouldEmitTrace && backlog?.deferredPrimary) {\n const primary = backlog.deferredPrimary\n if (primary.kind === 'externalStore') {\n const moduleInstanceKey =\n primary.moduleId && primary.instanceId ? (`${primary.moduleId}::${primary.instanceId}` as ModuleInstanceKey) : undefined\n if (moduleInstanceKey && store.getModuleSubscriberCount(moduleInstanceKey) > 0) {\n yield* Debug.record({\n type: 'warn:priority-inversion',\n moduleId: primary.moduleId,\n instanceId: primary.instanceId,\n tickSeq: currentTickSeq,\n reason: 'deferredBacklog',\n })\n }\n }\n }\n\n if (shouldEmitTrace) {\n yield* Debug.record({\n type: 'trace:tick',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n data: {\n tickSeq: currentTickSeq,\n phase: 'settled',\n timestampMs: Date.now(),\n schedule,\n triggerSummary,\n anchors: anchor,\n budget: {\n maxSteps: config.maxSteps,\n elapsedMs: Math.max(0, Date.now() - startedAtMs!),\n steps: acceptedModules.size,\n txnCount: acceptedModules.size,\n },\n backlog,\n result,\n },\n })\n }\n\n if (telemetry?.onTickDegraded && (schedule.forcedMacrotask || !captured.stable) && shouldSampleTick(currentTickSeq, telemetrySampleRate)) {\n try {\n telemetry.onTickDegraded({\n tickSeq: currentTickSeq,\n stable: captured.stable,\n degradeReason: captured.stable ? undefined : (captured.degradeReason ?? 'unknown'),\n forcedMacrotask: schedule.forcedMacrotask,\n scheduleReason: schedule.reason,\n microtaskChainDepth: schedule.microtaskChainDepth,\n deferredWorkCount: deferredDrain ? deferredDrain.modules.size + deferredDrain.dirtyTopics.size : 0,\n })\n } catch {\n // best-effort: never let user telemetry break the tick\n }\n }\n coalescedModules = 0\n coalescedTopics = 0\n\n return { stable: captured.stable, degradeReason: captured.degradeReason }\n })\n\n const flushNow: TickScheduler['flushNow'] = Effect.gen(function* () {\n const beforeTickSeq = tickSeq\n yield* flushTick({ startedAs: 'unknown' })\n\n if (tickSeq > beforeTickSeq) {\n return\n }\n\n if (!queue.hasPending() && !scheduled) {\n return\n }\n\n yield* Effect.yieldNow\n if (tickSeq > beforeTickSeq) {\n return\n }\n\n if (queue.hasPending()) {\n yield* flushTick({ startedAs: 'unknown' })\n }\n }).pipe(Effect.asVoid)\n\n const storeTopicToModuleInstanceKey = (topicKey: string): ModuleInstanceKey | undefined => {\n const cached = topicKeyToModuleInstanceKeyCache.get(topicKey)\n if (cached !== undefined) {\n return cached === null ? undefined : cached\n }\n\n const idx = topicKey.indexOf('::rq:')\n if (idx > 0) {\n return rememberTopicKeyResolution(topicKey, topicKey.slice(0, idx) as ModuleInstanceKey)\n }\n if (topicKey.includes('::')) {\n return rememberTopicKeyResolution(topicKey, topicKey as ModuleInstanceKey)\n }\n return rememberTopicKeyResolution(topicKey, undefined)\n }\n\n const onSelectorChanged: TickScheduler['onSelectorChanged'] = ({ moduleInstanceKey, selectorId, priority }) => {\n const coalesced = queue.markTopicDirty(makeReadQueryTopicKey(moduleInstanceKey, selectorId), priority)\n if (coalesced) coalescedTopics += 1\n }\n\n const onModuleCommit: TickScheduler['onModuleCommit'] = (commit) =>\n Effect.gen(function* () {\n const coalescedCommit = queue.enqueueModuleCommit(commit)\n if (coalescedCommit) coalescedModules += 1\n const coalescedTopic = queue.markTopicDirty(commit.moduleInstanceKey, commit.meta.priority)\n if (coalescedTopic) coalescedTopics += 1\n yield* scheduleTick()\n })\n\n return {\n getTickSeq: () => tickSeq,\n onModuleCommit,\n onSelectorChanged,\n flushNow,\n }\n}\n","import { Cause, Effect, ServiceMap, Stream } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { isDevEnv } from './env.js'\nimport type * as Logic from './LogicMiddleware.js'\nimport type { AnyModuleShape, LogicEffect } from './module.js'\nimport type { RuntimeInternalsResolvedConcurrencyPolicy } from './RuntimeInternals.js'\nimport type { StateTxnOrigin } from './StateTransaction.js'\nimport * as ModeRunner from './ModeRunner.js'\n\n/**\n * Prevents calling run*Task inside a \"synchronous transaction execution fiber\" (it would deadlock the txnQueue).\n *\n * - ModuleRuntime locally marks it as true while executing each transaction (dispatch/source-refresh/devtools/...).\n * - run*Task checks the flag on start: when true, it emits diagnostics only in dev/test and then no-ops.\n */\nexport const inSyncTransactionFiber = ServiceMap.Reference<boolean>('@logixjs/core/TaskRunner.inSyncTransactionFiber', {\n defaultValue: () => false,\n})\n\n/**\n * Force source.refresh:\n * - Default: when snapshot keyHash is unchanged and a non-idle snapshot already exists, refresh SHOULD be a no-op\n * (avoid redundant IO/writeback).\n * - Exception: explicit refresh (manual refresh) / invalidation-driven refresh needs to \"re-fetch even with the same keyHash\".\n *\n * Note: use a FiberRef to locally pass \"whether this refresh is forced\", avoiding expanding the source refresh handler signature.\n */\nexport const forceSourceRefresh = ServiceMap.Reference<boolean>('@logixjs/core/TaskRunner.forceSourceRefresh', {\n defaultValue: () => false,\n})\n\nlet inSyncTransactionShadowDepth = 0\n\nexport const enterSyncTransactionShadow = (): void => {\n inSyncTransactionShadowDepth += 1\n}\n\nexport const exitSyncTransactionShadow = (): void => {\n inSyncTransactionShadowDepth = Math.max(0, inSyncTransactionShadowDepth - 1)\n}\n\nexport const isInSyncTransactionShadow = (): boolean => inSyncTransactionShadowDepth > 0\n\nexport const enterSyncTransaction = (): void => {\n enterSyncTransactionShadow()\n}\n\nexport const exitSyncTransaction = (): void => {\n exitSyncTransactionShadow()\n}\n\nexport const isInSyncTransaction = (): boolean => isInSyncTransactionShadow()\n\nexport type TaskRunnerMode = ModeRunner.ModeRunnerMode\n\nexport type TaskStatus = 'idle' | 'pending' | 'running' | 'success' | 'failure' | 'interrupted'\n\nexport interface TaskExecution {\n readonly taskId: number\n readonly status: TaskStatus\n readonly acceptedAt: number\n readonly startedAt?: number\n readonly endedAt?: number\n}\n\nexport interface TaskRunnerOrigins {\n readonly pending?: StateTxnOrigin\n readonly success?: StateTxnOrigin\n readonly failure?: StateTxnOrigin\n}\n\ntype TaskHandler<Payload, Sh extends AnyModuleShape, R> =\n | LogicEffect<Sh, R, void, never>\n | ((payload: Payload) => LogicEffect<Sh, R, void, never>)\n\ntype TaskEffect<Payload, Sh extends AnyModuleShape, R, A, E> =\n | LogicEffect<Sh, R, A, E>\n | ((payload: Payload) => LogicEffect<Sh, R, A, E>)\n\nexport interface TaskRunnerConfig<Payload, Sh extends AnyModuleShape, R, A = void, E = never> {\n /**\n * Optional: trigger source name (e.g. actionTag / fieldPath), used as the default pending origin.name.\n * - BoundApiRuntime may fill this in for onAction(\"xxx\") / traits.source.refresh(\"field\"), etc.\n * - Other callers are not required to provide it.\n */\n readonly triggerName?: string\n\n /**\n * pending: synchronous state writes (loading=true / clearing errors, etc.), always a separate transaction entry.\n * - Only executed for tasks that are accepted and actually started (ignored triggers in runExhaustTask do not run pending).\n */\n readonly pending?: TaskHandler<Payload, Sh, R>\n\n /**\n * effect: real IO / async work (must run outside the transaction window).\n */\n readonly effect: TaskEffect<Payload, Sh, R, A, E>\n\n /**\n * success: success writeback (separate transaction entry).\n */\n readonly success?: (result: A, payload: Payload) => LogicEffect<Sh, R, void, never>\n\n /**\n * failure: failure writeback (separate transaction entry).\n *\n * Note: takes a Cause to preserve defect/interrupt semantics; interrupts do not trigger failure writeback by default.\n */\n readonly failure?: (cause: Cause.Cause<E>, payload: Payload) => LogicEffect<Sh, R, void, never>\n\n /**\n * origin: optional override for the three transaction origins.\n * - Default: pending.kind=\"task:pending\"; success/failure.kind=\"service-callback\".\n */\n readonly origin?: TaskRunnerOrigins\n\n /**\n * priority: reserved for future debugging/sorting; does not change transaction boundaries or concurrency semantics.\n */\n readonly priority?: number\n}\n\nexport interface TaskRunnerRuntime {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runWithStateTransaction: (\n origin: StateTxnOrigin,\n body: () => Effect.Effect<void, never, any>,\n ) => Effect.Effect<void, never, any>\n readonly resolveConcurrencyPolicy?: () => Effect.Effect<RuntimeInternalsResolvedConcurrencyPolicy, never, any>\n}\n\nconst resolve = <Payload, Sh extends AnyModuleShape, R, A, E>(\n eff: TaskEffect<Payload, Sh, R, A, E> | TaskHandler<Payload, Sh, R>,\n payload: Payload,\n): any => (typeof eff === 'function' ? (eff as any)(payload) : eff)\n\nconst defaultOrigins = (triggerName: string | undefined): Required<TaskRunnerOrigins> => ({\n pending: {\n kind: 'task:pending',\n name: triggerName,\n },\n success: {\n kind: 'service-callback',\n name: 'task:success',\n },\n failure: {\n kind: 'service-callback',\n name: 'task:failure',\n },\n})\n\nexport const shouldNoopInSyncTransactionFiber = (options: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n}): Effect.Effect<boolean> =>\n Effect.gen(function* () {\n const inTxn = yield* Effect.service(inSyncTransactionFiber)\n if (!inTxn) {\n return false\n }\n // Always no-op regardless of env (otherwise we may deadlock); diagnostics are emitted only in dev/test.\n if (isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: options.moduleId,\n instanceId: options.instanceId,\n code: options.code,\n severity: options.severity,\n message: options.message,\n hint: options.hint,\n actionTag: options.actionTag,\n kind: options.kind,\n })\n }\n return true\n })\n\nconst resolveConcurrencyLimit = (runtime: TaskRunnerRuntime): Effect.Effect<number | 'unbounded', never, any> =>\n runtime.resolveConcurrencyPolicy\n ? runtime.resolveConcurrencyPolicy().pipe(Effect.map((p) => p.concurrencyLimit))\n : Effect.succeed(16)\n\nconst runTaskLifecycle = <Payload, Sh extends AnyModuleShape, R, A, E>(\n payload: Payload,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n getCanWriteBack?: Effect.Effect<boolean>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> =>\n Effect.gen(function* () {\n const noop = yield* shouldNoopInSyncTransactionFiber({\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'logic::invalid_usage',\n severity: 'error',\n message: 'run*Task is not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint:\n 'Call run*Task from the run section of a watcher (e.g. $.onAction/$.onState/$.on); ' +\n 'do not call it directly inside a reducer / trait.run / synchronous transaction body. For long-lived flows, use a multi-entry pattern (pending → IO → writeback).',\n kind: 'run_task_in_transaction',\n })\n if (noop) {\n return\n }\n\n const defaults = defaultOrigins(config.triggerName)\n const origins: Required<TaskRunnerOrigins> = {\n pending: config.origin?.pending ?? defaults.pending,\n success: config.origin?.success ?? defaults.success,\n failure: config.origin?.failure ?? defaults.failure,\n }\n\n // 1) pending: separate transaction entry; once started it should not be interrupted by runLatest.\n const pending = config.pending\n if (pending) {\n yield* Effect.uninterruptible(\n runtime.runWithStateTransaction(origins.pending, () => Effect.asVoid(resolve(pending, payload))),\n )\n }\n\n // 2) IO: runs outside the transaction window.\n const io = resolve(config.effect, payload) as Effect.Effect<A, E, Logic.Env<Sh, R>>\n const exit = yield* Effect.exit(io)\n\n // 3) writeback: use the guard to confirm it's still the current task (runLatestTask).\n if (getCanWriteBack) {\n const ok = yield* getCanWriteBack\n if (!ok) {\n return\n }\n }\n\n if (exit._tag === 'Success') {\n const success = config.success\n if (success) {\n yield* runtime.runWithStateTransaction(origins.success, () => Effect.asVoid(success(exit.value, payload)))\n }\n return\n }\n\n // Failure: interruptions do not trigger failure writeback (e.g. runLatestTask cancellation, Scope ending).\n const cause = exit.cause as Cause.Cause<E>\n if (Cause.hasInterruptsOnly(cause)) {\n return\n }\n\n const failure = config.failure\n if (failure) {\n yield* runtime.runWithStateTransaction(origins.failure, () => Effect.asVoid(failure(cause, payload)))\n }\n }).pipe(\n // Watchers must not crash as a whole due to a single task failure: swallow errors, but keep them diagnosable.\n Effect.catchCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'task_runner::unhandled_failure',\n severity: 'error',\n message: 'TaskRunner encountered an unhandled failure (pending/IO/writeback).',\n hint: 'Add a failure writeback for this task or handle errors explicitly upstream; avoid fire-and-forget swallowing errors.',\n actionTag: config.triggerName,\n kind: 'task_runner_unhandled_failure',\n trigger: {\n kind: 'task',\n name: config.triggerName,\n },\n }).pipe(Effect.flatMap(() => Effect.logError('TaskRunner error', cause)))),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n\n/**\n * runTask:\n * - Uses shared ModeRunner semantics (task/parallel/latest/exhaust).\n * - Splits a single trigger into: pending (separate txn) → IO → success/failure (separate txn).\n */\nexport interface RunTaskConfig<Payload, Sh extends AnyModuleShape, R, A = void, E = never> {\n readonly stream: Stream.Stream<Payload>\n readonly mode?: TaskRunnerMode\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>\n}\n\nexport const runTask = <Payload, Sh extends AnyModuleShape, R, A = void, E = never>(\n args: RunTaskConfig<Payload, Sh, R, A, E>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> => {\n const mode = args.mode ?? 'task'\n const runtime = args.runtime\n const config = args.config\n\n return ModeRunner.runByMode<Payload, never, Logic.Env<Sh, R>>({\n stream: args.stream,\n mode,\n run: (payload) => runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config),\n runLatest: (payload, context) =>\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config, context.isCurrent),\n resolveConcurrencyLimit: resolveConcurrencyLimit(runtime),\n latest: {\n strategy: 'fiber-slot',\n // Keep TaskRunner behavior: triggers are acknowledged once started; no need to await final IO on stream completion.\n awaitLatestOnEnd: false,\n },\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n}\n\n/**\n * makeTaskRunner:\n * - Backward-compatible thin alias for runTask.\n * - Signature stays stable for existing Bound API callsites.\n */\nexport const makeTaskRunner = <Payload, Sh extends AnyModuleShape, R, A = void, E = never>(\n stream: Stream.Stream<Payload>,\n mode: TaskRunnerMode,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> =>\n runTask<Payload, Sh, R, A, E>({\n stream,\n mode,\n runtime,\n config,\n })\n","import { Effect, Fiber, Ref, Stream } from 'effect'\nimport * as LatestFiberSlot from './LatestFiberSlot.js'\n\nconst EXHAUST_ACQUIRE_BUSY = [true, true] as const\nconst EXHAUST_REJECT_BUSY = [false, true] as const\n\nexport type ModeRunnerMode =\n | 'task' // sequential\n | 'parallel'\n | 'latest'\n | 'exhaust'\n\nexport type ModeRunnerLatestStrategy = 'switch' | 'fiber-slot'\n\nexport interface ModeRunnerLatestContext {\n readonly runId: number\n readonly isCurrent: Effect.Effect<boolean>\n}\n\nexport interface ModeRunnerConfig<Payload, E, R> {\n readonly stream: Stream.Stream<Payload>\n readonly mode: ModeRunnerMode\n readonly run: (payload: Payload) => Effect.Effect<void, E, R>\n readonly runLatest?: (payload: Payload, context: ModeRunnerLatestContext) => Effect.Effect<void, E, R>\n readonly resolveConcurrencyLimit: Effect.Effect<number | 'unbounded', never, any>\n readonly latest?: {\n readonly strategy?: ModeRunnerLatestStrategy\n readonly awaitLatestOnEnd?: boolean\n }\n}\n\ntype SwitchLatestState = {\n runningId: number\n nextId: number\n}\n\nconst beginSwitchLatestRun = (stateRef: Ref.Ref<SwitchLatestState>) =>\n Ref.modify(stateRef, (state) => {\n const runId = state.nextId + 1\n state.nextId = runId\n state.runningId = runId\n return [runId, state] as const\n })\n\nconst clearSwitchLatestIfCurrent = (stateRef: Ref.Ref<SwitchLatestState>, runId: number) =>\n Ref.update(stateRef, (state) => {\n if (state.runningId === runId) {\n state.runningId = 0\n }\n return state\n })\n\nconst runLatestSwitch = <Payload, E, R>(\n stream: Stream.Stream<Payload>,\n runLatest: (payload: Payload, context: ModeRunnerLatestContext) => Effect.Effect<void, E, R>,\n): Effect.Effect<void, E, R> =>\n Effect.gen(function* () {\n const stateRef = yield* Ref.make<SwitchLatestState>({\n runningId: 0,\n nextId: 0,\n })\n\n const makeEffect = (payload: Payload) =>\n Effect.gen(function* () {\n const runId = yield* beginSwitchLatestRun(stateRef)\n const isCurrent = Ref.get(stateRef).pipe(Effect.map((state) => state.runningId === runId))\n yield* runLatest(payload, { runId, isCurrent }).pipe(Effect.ensuring(clearSwitchLatestIfCurrent(stateRef, runId)))\n })\n\n return yield* Stream.runDrain(\n Stream.map(stream, makeEffect).pipe(Stream.switchMap((effect) => Stream.fromEffect(effect))),\n )\n })\n\nconst runLatestFiberSlot = <Payload, E, R>(\n stream: Stream.Stream<Payload>,\n runLatest: (payload: Payload, context: ModeRunnerLatestContext) => Effect.Effect<void, E, R>,\n awaitLatestOnEnd: boolean,\n): Effect.Effect<void, E, R> =>\n Effect.gen(function* () {\n const stateRef = yield* LatestFiberSlot.make<E>()\n\n const start = (payload: Payload) =>\n Effect.gen(function* () {\n const [prevFiber, prevRunningId, runId] = yield* LatestFiberSlot.beginRun(stateRef)\n\n if (prevFiber && prevRunningId !== 0) {\n // Do not wait for full shutdown of old work; stale writes are guarded by runId.\n yield* Fiber.interrupt(prevFiber)\n }\n\n const isCurrent = Ref.get(stateRef).pipe(Effect.map((state) => state.runningId === runId))\n const fiber = yield* Effect.forkChild(\n runLatest(payload, { runId, isCurrent }).pipe(Effect.ensuring(LatestFiberSlot.clearIfCurrent(stateRef, runId))),\n )\n yield* LatestFiberSlot.setFiberIfCurrent(stateRef, runId, fiber)\n })\n\n yield* Stream.runForEach(stream, start)\n\n if (!awaitLatestOnEnd) {\n return\n }\n\n const finalState = yield* Ref.get(stateRef)\n const finalFiber = finalState.runningId !== 0 ? finalState.fiber : undefined\n if (finalFiber) {\n yield* Fiber.join(finalFiber)\n }\n })\n\nconst runExhaust = <Payload, E, R>(\n stream: Stream.Stream<Payload>,\n run: (payload: Payload) => Effect.Effect<void, E, R>,\n resolveConcurrencyLimit: Effect.Effect<number | 'unbounded', never, any>,\n): Effect.Effect<void, E, R> =>\n Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit\n const busyRef = yield* Ref.make(false)\n\n const mapper = (payload: Payload) =>\n Effect.gen(function* () {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? EXHAUST_REJECT_BUSY : EXHAUST_ACQUIRE_BUSY,\n )\n if (!acquired) {\n return\n }\n try {\n yield* run(payload)\n } finally {\n yield* Ref.set(busyRef, false)\n }\n })\n\n return yield* Stream.runDrain(stream.pipe(Stream.mapEffect(mapper, { concurrency })))\n })\n\nconst runParallel = <Payload, E, R>(\n stream: Stream.Stream<Payload>,\n run: (payload: Payload) => Effect.Effect<void, E, R>,\n resolveConcurrencyLimit: Effect.Effect<number | 'unbounded', never, any>,\n): Effect.Effect<void, E, R> =>\n Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit\n return yield* Stream.runDrain(stream.pipe(Stream.mapEffect(run, { concurrency })))\n })\n\nexport const runByMode = <Payload, E, R>(config: ModeRunnerConfig<Payload, E, R>): Effect.Effect<void, E, R> => {\n const runLatest = config.runLatest ?? ((payload: Payload) => config.run(payload))\n if (config.mode === 'latest') {\n const strategy = config.latest?.strategy ?? 'switch'\n if (strategy === 'fiber-slot') {\n return runLatestFiberSlot(config.stream, runLatest, config.latest?.awaitLatestOnEnd ?? false)\n }\n return runLatestSwitch(config.stream, runLatest)\n }\n\n if (config.mode === 'exhaust') {\n return runExhaust(config.stream, config.run, config.resolveConcurrencyLimit)\n }\n\n if (config.mode === 'parallel') {\n return runParallel(config.stream, config.run, config.resolveConcurrencyLimit)\n }\n\n return Stream.runForEach(config.stream, config.run)\n}\n","import { Fiber, Ref } from 'effect'\n\nexport type LatestFiberSlotState<E = never> = {\n fiber: Fiber.Fiber<void, E> | undefined\n runningId: number\n nextId: number\n}\n\nexport const make = <E = never>() =>\n Ref.make<LatestFiberSlotState<E>>({\n fiber: undefined,\n runningId: 0,\n nextId: 0,\n })\n\nexport const beginRun = <E>(slotRef: Ref.Ref<LatestFiberSlotState<E>>) =>\n Ref.modify(slotRef, (state) => {\n const runId = state.nextId + 1\n const prevFiber = state.fiber\n const prevRunningId = state.runningId\n state.nextId = runId\n state.runningId = runId\n return [[prevFiber, prevRunningId, runId] as const, state] as const\n })\n\nexport const setFiberIfCurrent = <E>(\n slotRef: Ref.Ref<LatestFiberSlotState<E>>,\n runId: number,\n fiber: Fiber.Fiber<void, E>,\n) =>\n Ref.update(slotRef, (state) => {\n if (state.runningId === runId) {\n state.fiber = fiber\n }\n return state\n })\n\nexport const clearIfCurrent = <E>(slotRef: Ref.Ref<LatestFiberSlotState<E>>, runId: number) =>\n Ref.update(slotRef, (state) => {\n if (state.runningId === runId) {\n state.runningId = 0\n state.fiber = undefined\n }\n return state\n })\n","import { Effect } from 'effect'\nimport type { ReadQueryCompiled } from './ReadQuery.js'\nimport type { DeclarativeLinkIR, DeclarativeLinkNodeId } from './DeclarativeLinkIR.js'\nimport type { ModuleInstanceKey, RuntimeStoreModuleCommit } from './RuntimeStore.js'\n\nexport interface ModuleAsSourceLink {\n readonly id: string\n readonly sourceModuleInstanceKey: ModuleInstanceKey\n readonly readQuery: ReadQueryCompiled<any, any>\n readonly computeValue: (snapshot: unknown) => unknown\n readonly equalsValue: (a: unknown, b: unknown) => boolean\n readonly applyValue: (next: unknown) => Effect.Effect<void, never, never>\n}\n\nexport interface DeclarativeLinkRegistration {\n readonly linkId: string\n readonly ir: DeclarativeLinkIR\n readonly readNodes: ReadonlyArray<{\n readonly nodeId: DeclarativeLinkNodeId\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly readQuery: ReadQueryCompiled<any, any>\n }>\n readonly dispatchNodes: ReadonlyArray<{\n readonly nodeId: DeclarativeLinkNodeId\n readonly dispatch: (payload: unknown) => Effect.Effect<void, never, never>\n }>\n}\n\nexport interface DeclarativeLinkRuntime {\n readonly registerModuleAsSourceLink: (link: ModuleAsSourceLink) => () => void\n readonly registerDeclarativeLink: (link: DeclarativeLinkRegistration) => () => void\n readonly applyForSources: (args: {\n readonly tickSeq: number\n readonly acceptedModules: ReadonlyMap<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly changedModuleInstanceKeys: ReadonlyArray<ModuleInstanceKey>\n }) => Effect.Effect<{ readonly scheduled: boolean }, never, never>\n}\n\ntype StoredModuleAsSourceLink = ModuleAsSourceLink & {\n hasValue: boolean\n lastValue: unknown\n}\n\ntype StoredDeclarativeLink = DeclarativeLinkRegistration & {\n readonly readNodeById: ReadonlyMap<string, { readonly nodeId: DeclarativeLinkNodeId; readonly moduleInstanceKey: ModuleInstanceKey; readonly readQuery: ReadQueryCompiled<any, any> }>\n readonly dispatchNodeById: ReadonlyMap<string, { readonly nodeId: DeclarativeLinkNodeId; readonly dispatch: (payload: unknown) => Effect.Effect<void, never, never> }>\n readonly dispatchTargetsByReadNode: ReadonlyMap<string, ReadonlyArray<DeclarativeLinkNodeId>>\n readonly readNodeState: Map<string, { hasValue: boolean; lastValue: unknown }>\n}\n\nexport const makeDeclarativeLinkRuntime = (): DeclarativeLinkRuntime => {\n const moduleAsSourceById = new Map<string, StoredModuleAsSourceLink>()\n const moduleAsSourceIdsBySource = new Map<ModuleInstanceKey, Set<string>>()\n\n const declarativeById = new Map<string, StoredDeclarativeLink>()\n const declarativeReadNodesBySource = new Map<ModuleInstanceKey, Array<{ readonly linkId: string; readonly nodeId: DeclarativeLinkNodeId }>>()\n\n const registerModuleAsSourceLink: DeclarativeLinkRuntime['registerModuleAsSourceLink'] = (link) => {\n const stored: StoredModuleAsSourceLink = {\n ...link,\n hasValue: false,\n lastValue: undefined,\n }\n\n moduleAsSourceById.set(link.id, stored)\n const set = moduleAsSourceIdsBySource.get(link.sourceModuleInstanceKey) ?? new Set<string>()\n set.add(link.id)\n moduleAsSourceIdsBySource.set(link.sourceModuleInstanceKey, set)\n\n return () => {\n moduleAsSourceById.delete(link.id)\n const current = moduleAsSourceIdsBySource.get(link.sourceModuleInstanceKey)\n if (!current) return\n current.delete(link.id)\n if (current.size === 0) {\n moduleAsSourceIdsBySource.delete(link.sourceModuleInstanceKey)\n }\n }\n }\n\n const registerDeclarativeLink: DeclarativeLinkRuntime['registerDeclarativeLink'] = (link) => {\n const readNodeById = new Map<string, { readonly nodeId: DeclarativeLinkNodeId; readonly moduleInstanceKey: ModuleInstanceKey; readonly readQuery: ReadQueryCompiled<any, any> }>()\n for (const n of link.readNodes) {\n readNodeById.set(n.nodeId, n)\n }\n\n const dispatchNodeById = new Map<\n string,\n { readonly nodeId: DeclarativeLinkNodeId; readonly dispatch: (payload: unknown) => Effect.Effect<void, never, never> }\n >()\n for (const n of link.dispatchNodes) {\n dispatchNodeById.set(n.nodeId, n)\n }\n\n // V1 constraint: dispatch must have at most one incoming edge, interpreted as \"payload flow\".\n const incomingByDispatch = new Map<string, number>()\n for (const e of link.ir.edges) {\n const to = e.to\n const isDispatch = dispatchNodeById.has(to)\n if (!isDispatch) continue\n incomingByDispatch.set(to, (incomingByDispatch.get(to) ?? 0) + 1)\n const count = incomingByDispatch.get(to) ?? 0\n if (count > 1) {\n throw new Error(\n `[DeclarativeLinkRuntime] Invalid DeclarativeLinkIR: dispatch node has multiple incoming edges (linkId=${link.linkId}, nodeId=${to}).`,\n )\n }\n }\n\n const dispatchTargetsByReadNode = new Map<string, Array<DeclarativeLinkNodeId>>()\n for (const e of link.ir.edges) {\n const from = e.from\n const to = e.to\n if (!readNodeById.has(from)) continue\n if (!dispatchNodeById.has(to)) continue\n const list = dispatchTargetsByReadNode.get(from) ?? []\n list.push(to)\n dispatchTargetsByReadNode.set(from, list)\n }\n\n const stored: StoredDeclarativeLink = {\n ...link,\n readNodeById,\n dispatchNodeById,\n dispatchTargetsByReadNode,\n readNodeState: new Map(),\n }\n\n declarativeById.set(link.linkId, stored)\n\n for (const n of link.readNodes) {\n const list = declarativeReadNodesBySource.get(n.moduleInstanceKey) ?? []\n list.push({ linkId: link.linkId, nodeId: n.nodeId })\n declarativeReadNodesBySource.set(n.moduleInstanceKey, list)\n }\n\n return () => {\n declarativeById.delete(link.linkId)\n for (const n of link.readNodes) {\n const list = declarativeReadNodesBySource.get(n.moduleInstanceKey)\n if (!list) continue\n const next = list.filter((x) => !(x.linkId === link.linkId && x.nodeId === n.nodeId))\n if (next.length === 0) {\n declarativeReadNodesBySource.delete(n.moduleInstanceKey)\n } else {\n declarativeReadNodesBySource.set(n.moduleInstanceKey, next)\n }\n }\n }\n }\n\n const applyForSources: DeclarativeLinkRuntime['applyForSources'] = (args) =>\n Effect.gen(function* () {\n let scheduled = false\n\n // ---- Module-as-Source edges (module readQuery -> externalStore trait writeback) ----\n for (const sourceKey of args.changedModuleInstanceKeys) {\n const ids = moduleAsSourceIdsBySource.get(sourceKey)\n if (!ids || ids.size === 0) continue\n const commit = args.acceptedModules.get(sourceKey)\n if (!commit) continue\n\n for (const id of ids) {\n const link = moduleAsSourceById.get(id)\n if (!link) continue\n\n let selected: unknown\n try {\n selected = link.readQuery.select(commit.state as any)\n } catch {\n continue\n }\n\n const nextValue = link.computeValue(selected)\n if (link.hasValue && link.equalsValue(link.lastValue, nextValue)) {\n continue\n }\n\n link.hasValue = true\n link.lastValue = nextValue\n scheduled = true\n yield* link.applyValue(nextValue)\n }\n }\n\n // ---- DeclarativeLinkIR edges (module readQuery -> dispatch) ----\n for (const sourceKey of args.changedModuleInstanceKeys) {\n const refs = declarativeReadNodesBySource.get(sourceKey)\n if (!refs || refs.length === 0) continue\n const commit = args.acceptedModules.get(sourceKey)\n if (!commit) continue\n\n for (const ref of refs) {\n const link = declarativeById.get(ref.linkId)\n if (!link) continue\n const readNode = link.readNodeById.get(ref.nodeId)\n if (!readNode) continue\n\n let value: unknown\n try {\n value = readNode.readQuery.select(commit.state as any)\n } catch {\n continue\n }\n\n const state = link.readNodeState.get(ref.nodeId) ?? { hasValue: false, lastValue: undefined }\n const changed = !state.hasValue || !Object.is(state.lastValue, value)\n if (!changed) continue\n\n state.hasValue = true\n state.lastValue = value\n link.readNodeState.set(ref.nodeId, state)\n\n const targets = link.dispatchTargetsByReadNode.get(ref.nodeId) ?? []\n for (const dispatchNodeId of targets) {\n const node = link.dispatchNodeById.get(dispatchNodeId)\n if (!node) continue\n scheduled = true\n yield* node.dispatch(value)\n }\n }\n }\n\n return { scheduled } as const\n })\n\n return {\n registerModuleAsSourceLink,\n registerDeclarativeLink,\n applyForSources,\n }\n}\n","import { Effect, Option, ServiceMap } from 'effect'\nimport { isDevEnv } from './env.js'\n\n/**\n * Stable identifier for a kernel variant (requested kernel family).\n *\n * - Recommended: `[a-z0-9-]+` (lower-kebab).\n * - Recommended reserved names: `core` (builtin semantics), `core-ng` (history/comparison).\n */\nexport type KernelId = 'core' | 'core-ng' | (string & {})\n\nconst isKernelId = (value: unknown): value is KernelId =>\n typeof value === 'string' && value.length > 0 && /^[a-z0-9-]+$/.test(value)\n\nexport interface KernelImplementationRef {\n /**\n * Requested kernel family id (not a version number).\n * Actual activation / fallback must be interpreted via RuntimeServicesEvidence.\n */\n readonly kernelId: KernelId\n /** The npm package that provides the kernel implementation. */\n readonly packageName: string\n /** Optional semver for explainability (not used as a semantic anchor). */\n readonly packageVersion?: string\n /** Optional build hash/id for evidence diff explainability. */\n readonly buildId?: string\n /** Explainability-only labels; must not become semantic switches. */\n readonly capabilities?: ReadonlyArray<string>\n}\n\nexport const defaultKernelImplementationRef = {\n kernelId: 'core',\n packageName: '@logixjs/core',\n} as const satisfies KernelImplementationRef\n\nexport const isKernelImplementationRef = (value: unknown): value is KernelImplementationRef => {\n if (typeof value !== 'object' || value === null || Array.isArray(value)) return false\n\n const keys = Object.keys(value)\n for (const k of keys) {\n if (k !== 'kernelId' && k !== 'packageName' && k !== 'packageVersion' && k !== 'buildId' && k !== 'capabilities') {\n return false\n }\n }\n\n const v: any = value\n if (!isKernelId(v.kernelId)) return false\n if (typeof v.packageName !== 'string' || v.packageName.length === 0) return false\n\n if (v.packageVersion !== undefined && (typeof v.packageVersion !== 'string' || v.packageVersion.length === 0)) {\n return false\n }\n if (v.buildId !== undefined && (typeof v.buildId !== 'string' || v.buildId.length === 0)) {\n return false\n }\n if (v.capabilities !== undefined) {\n if (!Array.isArray(v.capabilities)) return false\n if (!v.capabilities.every((c: unknown) => typeof c === 'string')) return false\n }\n\n return true\n}\n\nexport const normalizeKernelImplementationRef = (\n value: unknown,\n fallback: KernelImplementationRef = defaultKernelImplementationRef,\n): KernelImplementationRef => {\n if (typeof value !== 'object' || value === null || Array.isArray(value)) return fallback\n\n const v: any = value\n if (!isKernelId(v.kernelId)) return fallback\n if (typeof v.packageName !== 'string' || v.packageName.length === 0) return fallback\n\n return {\n kernelId: v.kernelId,\n packageName: v.packageName,\n ...(typeof v.packageVersion === 'string' && v.packageVersion.length > 0\n ? { packageVersion: v.packageVersion }\n : {}),\n ...(typeof v.buildId === 'string' && v.buildId.length > 0 ? { buildId: v.buildId } : {}),\n ...(Array.isArray(v.capabilities) && v.capabilities.every((c: unknown) => typeof c === 'string')\n ? { capabilities: v.capabilities as ReadonlyArray<string> }\n : {}),\n }\n}\n\nclass KernelImplementationRefTagImpl extends ServiceMap.Service<\n KernelImplementationRefTagImpl,\n KernelImplementationRef\n>()('@logixjs/core/KernelImplementationRef') {}\n\nexport const KernelImplementationRefTag = KernelImplementationRefTagImpl\n\nexport const resolveKernelImplementationRef = (): Effect.Effect<KernelImplementationRef, never, any> =>\n Effect.gen(function* () {\n const opt = yield* Effect.serviceOption(KernelImplementationRefTag)\n return normalizeKernelImplementationRef(Option.isSome(opt) ? opt.value : undefined)\n })\n\nconst KERNEL_IMPLEMENTATION_REF = Symbol.for('@logixjs/core/kernelImplementationRef')\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nconst formatScope = (moduleId: unknown, instanceId: unknown): string => {\n const m = typeof moduleId === 'string' && moduleId.length > 0 ? moduleId : 'unknown'\n const i = typeof instanceId === 'string' && instanceId.length > 0 ? instanceId : 'unknown'\n return `moduleId=${m}, instanceId=${i}`\n}\n\nexport const setKernelImplementationRef = (runtime: object, ref: KernelImplementationRef): void => {\n defineHidden(runtime, KERNEL_IMPLEMENTATION_REF, ref)\n}\n\nexport const getKernelImplementationRef = (runtime: object): KernelImplementationRef => {\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n const ref = (runtime as any)[KERNEL_IMPLEMENTATION_REF] as KernelImplementationRef | undefined\n if (!ref) {\n const msg = isDevEnv()\n ? [\n '[MissingKernelImplementationRef] KernelImplementationRef not installed on ModuleRuntime instance.',\n `scope: ${formatScope(scope.moduleId, scope.instanceId)}`,\n 'fix:',\n '- Ensure ModuleRuntime.make installs KernelImplementationRef (045 kernel contract).',\n '- If you created a mock runtime for tests, attach KernelImplementationRef or avoid calling kernel-only APIs.',\n ].join('\\n')\n : 'KernelImplementationRef not installed'\n throw new Error(msg)\n }\n return ref\n}\n","import { Effect, ServiceMap } from 'effect'\n\nexport interface RuntimeServiceBuiltins {\n /**\n * Returns the make Effect of a builtin implementation (provided by ModuleRuntime during assembly, avoiding external code\n * capturing internal closures/state).\n * - For kernel implementors only (e.g. core-ng) to implement behavior-equivalent replacements or thin wrappers.\n * - Not an app-facing contract; must not be depended on from business Flow/Logic.\n */\n readonly getBuiltinMake: (serviceId: string) => Effect.Effect<unknown, never, any>\n}\n\nexport class RuntimeServiceBuiltinsTag extends ServiceMap.Service<\n RuntimeServiceBuiltinsTag,\n RuntimeServiceBuiltins\n>()('@logixjs/core/RuntimeServiceBuiltins') {}\n","import { Effect, Layer, ServiceMap } from 'effect'\nimport type { JsonValue } from './jsonValue.js'\nimport { projectJsonValue } from './jsonValue.js'\nimport type { EvidencePackage } from './evidence.js'\nimport { collectEvidenceExport, reExportEvidencePackage, summarizeEvidenceExport } from './evidenceExportPipeline.js'\nimport type { RunSession } from './runSession.js'\nimport { makeEvidenceSink } from './runSession.js'\nimport {\n currentDiagnosticsLevel,\n toRuntimeDebugEventRef,\n type Event as DebugEvent,\n type Sink as DebugSink,\n} from '../runtime/core/DebugSink.js'\n\nexport interface EvidenceCollector {\n readonly session: RunSession\n readonly debugSink: DebugSink\n readonly registerConvergeStaticIr: (staticIr: unknown) => void\n readonly setKernelImplementationRef: (ref: unknown) => void\n readonly setRuntimeServicesEvidence: (evidence: unknown) => void\n readonly exportEvidencePackage: (options?: { readonly maxEvents?: number }) => EvidencePackage\n readonly clear: () => void\n}\n\nclass EvidenceCollectorTagImpl extends ServiceMap.Service<\n EvidenceCollectorTagImpl,\n EvidenceCollector\n>()('@logixjs/core/EvidenceCollector') {}\n\nexport const EvidenceCollectorTag = EvidenceCollectorTagImpl\n\nexport const evidenceCollectorLayer = (\n collector: EvidenceCollector,\n): Layer.Layer<EvidenceCollectorTagImpl, never, never> =>\n Layer.succeed(EvidenceCollectorTag, collector) as Layer.Layer<EvidenceCollectorTagImpl, never, never>\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\nexport const makeEvidenceCollector = (session: RunSession): EvidenceCollector => {\n const sink = makeEvidenceSink(session)\n\n const convergeStaticIrByDigest = new Map<string, JsonValue>()\n let kernelImplementationRef: JsonValue | undefined\n let runtimeServicesEvidence: JsonValue | undefined\n\n const exportBudget = {\n dropped: 0,\n oversized: 0,\n nonSerializable: 0,\n }\n\n const debugSink: DebugSink = {\n record: (event: DebugEvent) =>\n Effect.gen(function* () {\n const level = yield* Effect.service(currentDiagnosticsLevel)\n const instanceIdRaw = (event as any).instanceId\n const instanceId = typeof instanceIdRaw === 'string' && instanceIdRaw.length > 0 ? instanceIdRaw : 'unknown'\n const eventSeq = level === 'off' ? undefined : session.local.nextSeq('eventSeq', instanceId)\n const ref = toRuntimeDebugEventRef(event, {\n diagnosticsLevel: level,\n eventSeq,\n onMetaProjection: ({ stats }) => {\n exportBudget.dropped += stats.dropped\n exportBudget.oversized += stats.oversized\n },\n })\n if (!ref) return\n\n const projected = projectJsonValue(ref)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n\n sink.record('debug:event', projected.value, {\n timestamp: ref.timestamp,\n })\n }),\n }\n\n const registerConvergeStaticIr = (staticIr: unknown): void => {\n if (!isRecord(staticIr)) return\n const digest = staticIr.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return\n const projected = projectJsonValue(staticIr)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n convergeStaticIrByDigest.set(digest, projected.value)\n }\n\n const setKernelImplementationRef = (ref: unknown): void => {\n const projected = projectJsonValue(ref)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n kernelImplementationRef = projected.value\n }\n\n const setRuntimeServicesEvidence = (evidence: unknown): void => {\n const projected = projectJsonValue(evidence)\n exportBudget.dropped += projected.stats.dropped\n exportBudget.oversized += projected.stats.oversized\n exportBudget.nonSerializable += projected.stats.nonSerializable\n runtimeServicesEvidence = projected.value\n }\n\n const exportEvidencePackage = (options?: { readonly maxEvents?: number }): EvidencePackage => {\n const collection = collectEvidenceExport({\n convergeStaticIrByDigest,\n kernelImplementationRef,\n runtimeServicesEvidence,\n })\n const summary = summarizeEvidenceExport(collection)\n\n return reExportEvidencePackage({\n sink,\n maxEvents: options?.maxEvents,\n summary,\n })\n }\n\n const clear = (): void => {\n sink.clear()\n convergeStaticIrByDigest.clear()\n kernelImplementationRef = undefined\n runtimeServicesEvidence = undefined\n exportBudget.dropped = 0\n exportBudget.oversized = 0\n exportBudget.nonSerializable = 0\n }\n\n return {\n session,\n debugSink,\n registerConvergeStaticIr,\n setKernelImplementationRef,\n setRuntimeServicesEvidence,\n exportEvidencePackage,\n clear,\n }\n}\n","import { Layer, ServiceMap } from 'effect'\nimport type { JsonValue } from './jsonValue.js'\nimport type { EvidencePackage, EvidencePackageSource, ObservationEnvelope } from './evidence.js'\nimport { exportEvidencePackage, OBSERVABILITY_PROTOCOL_VERSION } from './evidence.js'\n\nexport type RunId = string\n\nexport interface RunSessionLocalState {\n /**\n * once: a de-dup key set for \"emit only once\" behavior (must be isolated per session to avoid cross-session pollution).\n * Returns true if it's the first occurrence, false if the key has been seen before.\n */\n readonly once: (key: string) => boolean\n\n /**\n * seq: allocate monotonic sequences by key (e.g. opSeq/eventSeq), must be isolated per session.\n */\n readonly nextSeq: (namespace: string, key: string) => number\n\n /** Tests/reset only: clear this session's local state. */\n readonly clear: () => void\n}\n\nexport interface RunSession {\n readonly runId: RunId\n readonly source: EvidencePackageSource\n readonly startedAt: number\n readonly local: RunSessionLocalState\n}\n\nclass RunSessionTagImpl extends ServiceMap.Service<RunSessionTagImpl, RunSession>()('@logixjs/core/RunSession') {}\n\nexport const RunSessionTag = RunSessionTagImpl\n\nexport interface EvidenceSink {\n readonly record: (type: string, payload: JsonValue, options?: { readonly timestamp?: number }) => void\n readonly export: (options?: {\n readonly protocolVersion?: string\n readonly createdAt?: number\n readonly summary?: JsonValue\n readonly maxEvents?: number\n }) => EvidencePackage\n readonly clear: () => void\n}\n\nconst NEXT_RUN_SEQ_KEY = Symbol.for('@logixjs/core/runSession/nextRunSeq')\nlet fallbackNextRunSeq = 0\n\nconst nextRunSeq = (): number => {\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const g: any = globalThis as any\n const prev = typeof g[NEXT_RUN_SEQ_KEY] === 'number' ? (g[NEXT_RUN_SEQ_KEY] as number) : 0\n const next = prev + 1\n g[NEXT_RUN_SEQ_KEY] = next\n return next\n } catch {\n fallbackNextRunSeq += 1\n return fallbackNextRunSeq\n }\n}\n\nconst makeRunId = (startedAt: number): RunId => `run-${startedAt}.${nextRunSeq()}`\n\nexport const makeRunSessionLocalState = (): RunSessionLocalState => {\n const onceKeys = new Set<string>()\n const seqByNamespace = new Map<string, Map<string, number>>()\n\n return {\n once: (key) => {\n if (onceKeys.has(key)) return false\n onceKeys.add(key)\n return true\n },\n nextSeq: (namespace, key) => {\n const byKey = seqByNamespace.get(namespace) ?? new Map<string, number>()\n if (!seqByNamespace.has(namespace)) seqByNamespace.set(namespace, byKey)\n const prev = byKey.get(key) ?? 0\n const next = prev + 1\n byKey.set(key, next)\n return next\n },\n clear: () => {\n onceKeys.clear()\n seqByNamespace.clear()\n },\n }\n}\n\nexport const makeRunSession = (options?: {\n readonly runId?: RunId\n readonly source?: EvidencePackageSource\n readonly startedAt?: number\n readonly local?: RunSessionLocalState\n}): RunSession => {\n const startedAt = options?.startedAt ?? Date.now()\n return {\n runId: options?.runId ?? makeRunId(startedAt),\n source: options?.source ?? { host: 'unknown' },\n startedAt,\n local: options?.local ?? makeRunSessionLocalState(),\n }\n}\n\nexport const makeEvidenceSink = (session: RunSession): EvidenceSink => {\n const events: ObservationEnvelope[] = []\n let nextSeq = 1\n\n return {\n record: (type, payload, options) => {\n events.push({\n protocolVersion: OBSERVABILITY_PROTOCOL_VERSION,\n runId: session.runId,\n seq: nextSeq++,\n timestamp: options?.timestamp ?? Date.now(),\n type,\n payload,\n })\n },\n export: (options) => {\n const protocolVersion = options?.protocolVersion ?? OBSERVABILITY_PROTOCOL_VERSION\n const maxEvents = options?.maxEvents\n\n const selected =\n typeof maxEvents === 'number' && Number.isFinite(maxEvents) && maxEvents > 0\n ? events.slice(Math.max(0, events.length - Math.floor(maxEvents)))\n : events.slice()\n\n return exportEvidencePackage({\n protocolVersion,\n runId: session.runId,\n source: session.source,\n createdAt: options?.createdAt,\n events: selected,\n summary: options?.summary,\n })\n },\n clear: () => {\n events.length = 0\n nextSeq = 1\n },\n }\n}\n\nexport const runSessionLayer = (session?: RunSession): Layer.Layer<RunSessionTagImpl, never, never> =>\n Layer.succeed(RunSessionTag, session ?? makeRunSession()) as Layer.Layer<RunSessionTagImpl, never, never>\n","import { Effect, Option } from 'effect'\nimport type { StateTxnContext } from './StateTransaction.js'\nimport * as Debug from './DebugSink.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport * as EffectOp from '../../effect-op.js'\nimport type { RunSession } from '../../observability/runSession.js'\nimport { RunSessionTag } from '../../observability/runSession.js'\nimport { EffectOpMiddlewareTag } from './EffectOpCore.js'\n\nexport interface OperationRuntimeServices {\n readonly middlewareStack: EffectOp.MiddlewareStack\n readonly runSession: RunSession | undefined\n}\n\nconst readMiddlewareEnv = (): Effect.Effect<Option.Option<EffectOpCore.EffectOpMiddlewareEnv>, never, any> =>\n Effect.serviceOption(EffectOpMiddlewareTag as any).pipe(\n Effect.map((option) => option as Option.Option<EffectOpCore.EffectOpMiddlewareEnv>),\n )\n\nconst readRunSession = (): Effect.Effect<Option.Option<RunSession>, never, any> =>\n Effect.serviceOption(RunSessionTag as any).pipe(Effect.map((option) => option as Option.Option<RunSession>))\n\nexport const resolveOperationRuntimeServices = (): Effect.Effect<OperationRuntimeServices, never, any> =>\n Effect.gen(function* () {\n const middlewareOpt = yield* readMiddlewareEnv()\n const runSessionOpt = yield* readRunSession()\n return {\n middlewareStack: Option.isSome(middlewareOpt) ? middlewareOpt.value.stack : [],\n runSession: Option.isSome(runSessionOpt) ? runSessionOpt.value : undefined,\n }\n })\n\nexport const getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack, never, any> =>\n resolveOperationRuntimeServices().pipe(Effect.map((runtimeServices) => runtimeServices.middlewareStack))\n\nexport const assignOperationOpSeq = (\n meta: EffectOp.EffectOp['meta'] | undefined,\n runSession: RunSession | undefined,\n): number | undefined => {\n const metaAny = meta as any\n if (typeof metaAny?.opSeq === 'number' && Number.isFinite(metaAny.opSeq)) {\n return Math.floor(metaAny.opSeq)\n }\n\n if (!runSession || !metaAny) {\n return undefined\n }\n\n const key = metaAny.instanceId ?? 'global'\n const opSeq = runSession.local.nextSeq('opSeq', key)\n metaAny.opSeq = opSeq\n return opSeq\n}\n\nexport type RunOperation = <A, E, R>(\n kind: EffectOp.EffectOp['kind'],\n name: string,\n params: {\n readonly payload?: unknown\n readonly meta?: EffectOp.EffectOp['meta']\n },\n eff: Effect.Effect<A, E, R>,\n) => Effect.Effect<A, E, R>\n\nexport const makeRunOperation = (args: {\n readonly optionsModuleId: string | undefined\n readonly instanceId: string\n readonly runtimeLabel: string | undefined\n readonly txnContext: StateTxnContext<any>\n}): RunOperation => {\n const { optionsModuleId, instanceId, runtimeLabel, txnContext } = args\n\n const runOperation: RunOperation = <A2, E2, R2>(\n kind: EffectOp.EffectOp['kind'],\n name: string,\n params: {\n readonly payload?: unknown\n readonly meta?: EffectOp.EffectOp['meta']\n },\n eff: Effect.Effect<A2, E2, R2>,\n ): Effect.Effect<A2, E2, R2> =>\n Effect.gen(function* () {\n const { middlewareStack, runSession } = yield* resolveOperationRuntimeServices()\n const existingLinkId = yield* Effect.service(EffectOpCore.currentLinkId).pipe(Effect.orDie)\n\n const currentTxnId = txnContext.current?.txnId\n\n // NOTE: linkId is generated/propagated by the Runtime:\n // - Boundary entrypoints create a new linkId.\n // - Nested operations reuse the current FiberRef.linkId.\n // - Never default to randomness/time to avoid non-replayable implicit identifiers.\n const { linkId: _ignoredLinkId, ...restMeta } = (params.meta ?? {}) as any\n\n const baseMeta: EffectOp.EffectOp['meta'] = {\n ...restMeta,\n // Filled by the runtime.\n moduleId: (params.meta as any)?.moduleId ?? optionsModuleId,\n instanceId: (params.meta as any)?.instanceId ?? instanceId,\n runtimeLabel: (params.meta as any)?.runtimeLabel ?? runtimeLabel,\n txnSeq: (params.meta as any)?.txnSeq ?? txnContext.current?.txnSeq,\n txnId: (params.meta as any)?.txnId ?? currentTxnId,\n }\n\n const opSeq = assignOperationOpSeq(baseMeta, runSession)\n\n const op = EffectOp.make<A2, E2, R2>({\n kind,\n name,\n payload: params.payload,\n effect: eff,\n meta: baseMeta,\n })\n\n const linkId = existingLinkId ?? op.id\n const program = middlewareStack.length ? EffectOp.run(op, middlewareStack) : op.effect\n\n // linkId: created at the boundary, reused for nested ops (shared across modules via a FiberRef).\n return yield* Effect.provideService(Effect.provideService(program, Debug.currentOpSeq, opSeq), EffectOpCore.currentLinkId, linkId)\n })\n\n return runOperation\n}\n","// Internal EffectOp API (for internal implementation code).\n//\n// Goal:\n// - Internal modules must never import root public submodules (e.g. `../EffectOp`).\n// - This file hosts the shared implementation; public `src/EffectOp.ts` delegates to it.\n\nimport { Effect, Option } from 'effect'\nimport * as Core from './runtime/core/EffectOpCore.js'\nimport { RunSessionTag } from './observability/runSession.js'\n\nexport type EffectOp<Out = unknown, Err = unknown, Env = unknown> = Core.EffectOp<Out, Err, Env>\n\nexport type OperationPolicy = Core.OperationPolicy\n\nexport type OperationRejected = Core.OperationRejected\n\nexport type OperationError<E> = Core.OperationError<E>\n\nexport type Middleware = Core.Middleware\n\nexport type MiddlewareStack = Core.MiddlewareStack\n\nexport const composeMiddleware = Core.composeMiddleware\n\nexport const makeOperationRejected = Core.makeOperationRejected\n\n/**\n * Generate a stable id for identifying an EffectOp.\n * - Uses a monotonic sequence by default to avoid non-replayability from randomness/time.\n * - If meta.instanceId is available, prefer deriving `${instanceId}::o${opSeq}`.\n */\nlet nextGlobalOpSeq = 0\n\nconst nextOpSeq = (): number => {\n nextGlobalOpSeq += 1\n return nextGlobalOpSeq\n}\n\nconst makeId = (instanceId: string | undefined, opSeq: number): string =>\n instanceId ? `${instanceId}::o${opSeq}` : `o${opSeq}`\n\n/**\n * EffectOp.make:\n * - Create an EffectOp with basic meta.\n * - Generates a stable id by default (based on `instanceId` + monotonic `opSeq`); callers may override externally.\n */\nexport const make = <A, E, R>(params: {\n readonly kind: EffectOp['kind']\n readonly name: string\n readonly effect: Effect.Effect<A, E, R>\n readonly payload?: unknown\n readonly meta?: EffectOp['meta']\n readonly id?: string\n}): EffectOp<A, E, R> => ({\n ...(params.id\n ? { id: params.id, meta: params.meta }\n : (() => {\n const meta: any = params.meta ?? {}\n const instanceId: string | undefined = meta.instanceId\n const opSeq: number =\n typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq) ? Math.floor(meta.opSeq) : nextOpSeq()\n return {\n id: makeId(instanceId, opSeq),\n meta: meta.opSeq === opSeq ? meta : { ...meta, opSeq },\n }\n })()),\n kind: params.kind,\n name: params.name,\n payload: params.payload,\n effect: params.effect,\n})\n\n/**\n * EffectOp.makeInRunSession:\n * - Allocate a stable `opSeq` within a RunSession scope (per-session + per-instance).\n * - If RunSession is missing from Env, fall back to a process-wide monotonic sequence (no process-wide Map).\n */\nexport const makeInRunSession = <A, E, R>(params: {\n readonly kind: EffectOp['kind']\n readonly name: string\n readonly effect: Effect.Effect<A, E, R>\n readonly payload?: unknown\n readonly meta?: EffectOp['meta']\n readonly id?: string\n}): Effect.Effect<EffectOp<A, E, R>, never, any> =>\n Effect.gen(function* () {\n if (params.id) {\n return {\n id: params.id,\n kind: params.kind,\n name: params.name,\n payload: params.payload,\n meta: params.meta,\n effect: params.effect,\n } satisfies EffectOp<A, E, R>\n }\n\n const meta: any = params.meta ?? {}\n const instanceId: string | undefined = meta.instanceId\n\n let opSeq: number\n if (typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq)) {\n opSeq = Math.floor(meta.opSeq)\n } else {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const key = instanceId ?? 'global'\n opSeq = sessionOpt.value.local.nextSeq('opSeq', key)\n } else {\n opSeq = nextOpSeq()\n }\n }\n\n return {\n id: makeId(instanceId, opSeq),\n kind: params.kind,\n name: params.name,\n payload: params.payload,\n meta: meta.opSeq === opSeq ? meta : { ...meta, opSeq },\n effect: params.effect,\n } satisfies EffectOp<A, E, R>\n })\n\n/**\n * EffectOp.withMeta:\n * - Append or override meta fields on an existing EffectOp.\n * - Does not change the effect itself.\n */\nexport const withMeta = <A, E, R>(\n op: EffectOp<A, E, R>,\n meta: Partial<NonNullable<EffectOp['meta']>>,\n): EffectOp<A, E, R> => ({\n ...op,\n meta: { ...(op.meta ?? {}), ...meta },\n})\n\n/**\n * EffectOp.run:\n * - Execute an EffectOp using the given MiddlewareStack.\n * - If the stack is empty, return op.effect directly.\n */\nexport const run = <A, E, R>(op: EffectOp<A, E, R>, stack: MiddlewareStack): Effect.Effect<A, E, R> =>\n Core.runWithMiddleware(op, stack)\n","import { Effect, PubSub } from 'effect'\nimport type { StateChangeWithMeta, StateCommitMeta, StateCommitMode, StateCommitPriority } from './module.js'\nimport * as Debug from './DebugSink.js'\nimport type { ConcurrencyDiagnostics } from './ConcurrencyDiagnostics.js'\nimport * as ReducerDiagnostics from './ReducerDiagnostics.js'\nimport * as StateTransaction from './StateTransaction.js'\nimport { mutateWithPatchPaths } from './mutativePatches.js'\nimport type { TxnOriginOverride } from './TxnOriginOverride.js'\nimport type { RunOperation } from './ModuleRuntime.operation.js'\nimport type { RunWithStateTransaction, SetStateInternal } from './ModuleRuntime.transaction.js'\nimport type { EnqueueTransaction } from './ModuleRuntime.txnQueue.js'\nimport type { ResolvedConcurrencyPolicy } from './ModuleRuntime.concurrencyPolicy.js'\n\ntype DispatchEntryPoint = 'dispatch' | 'dispatchBatch' | 'dispatchLowPriority'\n\nconst readClockMs = (): number => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === 'function') {\n return perf.now()\n }\n return Date.now()\n}\n\ntype ActionAnalysis = {\n readonly actionTag: string | undefined\n readonly actionTagNormalized: string\n readonly topicTagPrimary: string | undefined\n readonly topicTagSecondary: string | undefined\n readonly originOp: 'remove' | 'insert' | 'unset' | 'set'\n}\n\ntype ActionPropagationTopicTarget<A> = {\n readonly topicTag: string\n readonly hub: PubSub.PubSub<A>\n}\n\ntype ActionPropagationEntry<A> = {\n readonly action: A\n readonly analysis: ActionAnalysis\n readonly topicTargets: ReadonlyArray<ActionPropagationTopicTarget<A>>\n readonly fanoutCount: number\n}\n\ntype ActionStateWritebackHandler<S, A> =\n | { readonly kind: 'update'; readonly run: (state: S, action: A) => S }\n | { readonly kind: 'mutate'; readonly run: (draft: S, action: A) => void }\n | { readonly kind: 'effect'; readonly run: (action: A) => Effect.Effect<void, never, any> }\n\ntype ActionTopicBatch<A> = {\n readonly topicTag: string\n readonly hub: PubSub.PubSub<A>\n readonly actions: ReadonlyArray<A>\n readonly actionTag: string | undefined\n readonly fanoutCount: number\n}\n\ntype ActionPressureSource = {\n readonly dispatchEntry: DispatchEntryPoint\n readonly channel: 'main' | 'topic'\n readonly topicTag?: string\n readonly actionTag?: string\n readonly batchSize?: number\n readonly fanoutCount?: number\n}\n\nexport const makeDispatchOps = <S, A>(args: {\n readonly optionsModuleId: string | undefined\n readonly instanceId: string\n readonly declaredActionTags?: ReadonlySet<string>\n readonly initialReducers?: Readonly<\n Record<string, (state: S, action: A, sink?: (path: StateTransaction.StatePatchPath) => void) => S>\n >\n readonly txnContext: StateTransaction.StateTxnContext<S>\n readonly readState: Effect.Effect<S>\n readonly setStateInternal: SetStateInternal<S>\n readonly recordStatePatch: (\n path: StateTransaction.StatePatchPath | undefined,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n readonly actionHub: PubSub.PubSub<A>\n readonly actionTagHubsByTag?: ReadonlyMap<string, PubSub.PubSub<A>>\n readonly actionCommitHub: PubSub.PubSub<StateChangeWithMeta<A>>\n readonly diagnostics: ConcurrencyDiagnostics\n readonly enqueueTransaction: EnqueueTransaction\n readonly resolveConcurrencyPolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>\n readonly runOperation: RunOperation\n readonly runWithStateTransaction: RunWithStateTransaction\n readonly isDevEnv: () => boolean\n}): {\n readonly registerReducer: (tag: string, fn: (state: S, action: A) => S) => void\n readonly registerActionStateWriteback: (tag: string, handler: ActionStateWritebackHandler<S, A>) => void\n readonly dispatchWithOriginOverride: (action: A, override?: TxnOriginOverride) => Effect.Effect<void, never, any>\n readonly dispatchBatchWithOriginOverride: (\n actions: ReadonlyArray<A>,\n override?: TxnOriginOverride,\n ) => Effect.Effect<void, never, any>\n readonly dispatchLowPriorityWithOriginOverride: (\n action: A,\n override?: TxnOriginOverride,\n ) => Effect.Effect<void, never, any>\n readonly dispatch: (action: A) => Effect.Effect<void, never, any>\n readonly dispatchBatch: (actions: ReadonlyArray<A>) => Effect.Effect<void, never, any>\n readonly dispatchLowPriority: (action: A) => Effect.Effect<void, never, any>\n} => {\n const {\n optionsModuleId,\n instanceId,\n declaredActionTags,\n initialReducers,\n txnContext,\n readState,\n setStateInternal,\n recordStatePatch,\n actionHub,\n actionTagHubsByTag,\n actionCommitHub,\n diagnostics,\n enqueueTransaction,\n resolveConcurrencyPolicy,\n runOperation,\n runWithStateTransaction,\n isDevEnv,\n } = args\n\n const resolveActionOriginOp = (tag: string): ActionAnalysis['originOp'] => {\n if (tag.includes('Remove') || tag.includes('remove')) return 'remove'\n if (\n tag.includes('Append') ||\n tag.includes('Prepend') ||\n tag.includes('Insert') ||\n tag.includes('Swap') ||\n tag.includes('Move') ||\n tag.includes('append') ||\n tag.includes('prepend') ||\n tag.includes('insert') ||\n tag.includes('swap') ||\n tag.includes('move')\n ) {\n return 'insert'\n }\n if (tag.includes('Unset') || tag.includes('unset')) return 'unset'\n return 'set'\n }\n\n const analyzeAction = (action: A): ActionAnalysis => {\n const tag = (action as any)?._tag\n const type = (action as any)?.type\n\n const actionTag =\n typeof tag === 'string' && tag.length > 0\n ? tag\n : typeof type === 'string' && type.length > 0\n ? type\n : tag != null\n ? String(tag)\n : type != null\n ? String(type)\n : undefined\n\n let topicTagPrimary: string | undefined\n if (typeof tag === 'string' && tag.length > 0) {\n topicTagPrimary = tag\n }\n\n let topicTagSecondary: string | undefined\n if (typeof type === 'string' && type.length > 0) {\n if (topicTagPrimary == null) {\n topicTagPrimary = type\n } else if (type !== topicTagPrimary) {\n topicTagSecondary = type\n }\n }\n\n if (topicTagPrimary == null && actionTag) {\n topicTagPrimary = actionTag\n }\n\n return {\n actionTag,\n actionTagNormalized: typeof actionTag === 'string' && actionTag.length > 0 ? actionTag : 'unknown',\n topicTagPrimary,\n topicTagSecondary,\n originOp: resolveActionOriginOp(actionTag ?? ''),\n }\n }\n\n // Primary reducer map: initial values come from options.reducers and can be extended at runtime via internal hooks (for $.reducer sugar).\n const reducerMap = new Map<string, (state: S, action: A) => S>()\n if (initialReducers) {\n for (const [key, fn] of Object.entries(initialReducers)) {\n reducerMap.set(key, fn as (state: S, action: A) => S)\n }\n }\n\n // Track whether an Action tag has been dispatched, for diagnosing config issues like late reducer registration.\n const dispatchedTags = new Set<string>()\n const actionStateWritebacks = new Map<string, Array<ActionStateWritebackHandler<S, A>>>()\n\n const registerReducer = (tag: string, fn: (state: S, action: A) => S): void => {\n if (reducerMap.has(tag)) {\n // Duplicate registration: throw a config error with extra context; catchAllCause emits diagnostics.\n throw ReducerDiagnostics.makeReducerError('ReducerDuplicateError', tag, optionsModuleId)\n }\n if (dispatchedTags.has(tag)) {\n // Registering after the tag has already been dispatched is a risky config; surfaced via a custom error type for diagnostics.\n throw ReducerDiagnostics.makeReducerError('ReducerLateRegistrationError', tag, optionsModuleId)\n }\n reducerMap.set(tag, fn)\n }\n\n const registerActionStateWriteback = (tag: string, handler: ActionStateWritebackHandler<S, A>): void => {\n if (dispatchedTags.has(tag)) {\n throw ReducerDiagnostics.makeReducerError('ReducerLateRegistrationError', tag, optionsModuleId)\n }\n const existing = actionStateWritebacks.get(tag)\n if (existing) {\n existing.push(handler)\n return\n }\n actionStateWritebacks.set(tag, [handler])\n }\n\n const applyPrimaryReducer = (action: A, analysis: ActionAnalysis) => {\n const tag = analysis.actionTag\n if (tag == null || reducerMap.size === 0) {\n return Effect.void\n }\n const tagKey = tag.length > 0 ? tag : 'unknown'\n dispatchedTags.add(tagKey)\n const reducer = reducerMap.get(tagKey)\n if (!reducer) {\n return Effect.void\n }\n\n return readState.pipe(\n Effect.flatMap((prev) =>\n Effect.gen(function* () {\n const patchPaths: Array<StateTransaction.StatePatchPath> = []\n const sink = (path: StateTransaction.StatePatchPath): void => {\n if (typeof path === 'string') {\n if (path.length > 0) patchPaths.push(path)\n return\n }\n if (typeof path === 'number') {\n if (Number.isFinite(path)) patchPaths.push(Math.floor(path))\n return\n }\n if (path.length > 0) patchPaths.push(path)\n }\n\n const next = (reducer as any)(prev, action, sink) as S\n\n // No-op reducer: avoid dirty evidence to prevent redundant converge/validate full paths.\n if (Object.is(next, prev)) {\n return\n }\n\n // Prefer the traceable in-transaction path:\n // - If the reducer provides patchPaths (e.g. generated by Logix.Module.Reducer.mutate), record field-level patches.\n // - Otherwise record a whole-state replace marker (path=\"*\") and infer best-effort dirty evidence at commit time.\n if (txnContext.current) {\n if (patchPaths.length > 0) {\n StateTransaction.updateDraft(txnContext, next)\n for (const path of patchPaths) {\n recordStatePatch(path, 'reducer')\n }\n return\n }\n\n StateTransaction.updateDraft(txnContext, next)\n recordStatePatch('*', 'reducer', undefined, next)\n\n if (isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq: txnContext.current?.txnSeq,\n txnId: txnContext.current?.txnId,\n trigger: txnContext.current?.origin,\n code: 'state_transaction::dirty_evidence_inferred',\n severity: 'warning',\n message:\n 'Reducer writeback did not provide field-level patchPaths; using commit-time best-effort inference for dirty evidence.',\n hint: 'Prefer Logix.Module.Reducer.mutate(...) or $.state.mutate(...) to produce exact patchPaths; inference may degrade incremental scheduling under complex mutations.',\n kind: 'dirty_evidence_inferred:reducer',\n })\n }\n\n return\n }\n\n yield* setStateInternal(next, '*', 'reducer', undefined, next)\n }),\n ),\n )\n }\n\n\t\n const applyActionStateWritebacks = (action: A, analysis: ActionAnalysis): Effect.Effect<void, never, any> => {\n const tag = analysis.actionTag\n if (tag == null || actionStateWritebacks.size === 0) {\n return Effect.void\n }\n const handlers = actionStateWritebacks.get(tag.length > 0 ? tag : 'unknown')\n if (!handlers || handlers.length === 0) {\n return Effect.void\n }\n\n return Effect.gen(function* () {\n let currentState: S | undefined\n let pendingState: S | undefined\n let pendingWholeStateWrite = false\n let pendingChanged = false\n const pendingPatchPaths: Array<StateTransaction.StatePatchPath> = []\n\n const clearPending = (): void => {\n pendingState = undefined\n pendingWholeStateWrite = false\n pendingChanged = false\n pendingPatchPaths.length = 0\n }\n\n const flushPending = (): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n if (!pendingChanged || pendingState === undefined) {\n clearPending()\n return\n }\n\n if (pendingWholeStateWrite) {\n yield* setStateInternal(pendingState, '*', 'unknown', undefined, pendingState)\n } else {\n for (const path of pendingPatchPaths) {\n recordStatePatch(path, 'unknown')\n }\n StateTransaction.updateDraft(txnContext, pendingState)\n }\n\n currentState = pendingState\n clearPending()\n })\n\n const getCurrentState = (): Effect.Effect<S, never, any> =>\n currentState === undefined\n ? readState.pipe(\n Effect.tap((state) =>\n Effect.sync(() => {\n currentState = state\n }),\n ),\n )\n : Effect.succeed(currentState)\n\n for (const handler of handlers) {\n if (handler.kind === 'effect') {\n yield* flushPending()\n yield* handler.run(action)\n currentState = undefined\n continue\n }\n\n const prev = pendingState ?? (yield* getCurrentState())\n\n if (handler.kind === 'update') {\n const next = handler.run(prev, action)\n if (Object.is(next, prev)) {\n continue\n }\n pendingState = next\n pendingChanged = true\n pendingWholeStateWrite = true\n continue\n }\n\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as S, (draft) => handler.run(draft as S, action))\n if (Object.is(nextState, prev)) {\n continue\n }\n\n pendingState = nextState\n pendingChanged = true\n\n if (!pendingWholeStateWrite) {\n for (const path of patchPaths) {\n pendingPatchPaths.push(path)\n }\n }\n }\n\n yield* flushPending()\n })\n }\n\n const makeActionOrigin = (\n originName: string,\n action: A,\n analysis: ActionAnalysis,\n override?: TxnOriginOverride,\n ): StateTransaction.StateTxnOrigin => ({\n kind: override?.kind ?? 'action',\n name: override?.name ?? originName,\n details: {\n _tag: analysis.actionTagNormalized,\n path: typeof (action as any)?.payload?.path === 'string' ? ((action as any).payload.path as string) : undefined,\n op: analysis.originOp,\n },\n })\n\n const dispatchInTransaction = (action: A, analysis: ActionAnalysis): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n // Apply the primary reducer first (may be a no-op).\n yield* applyPrimaryReducer(action, analysis)\n yield* applyActionStateWritebacks(action, analysis)\n\n const unknownAction = declaredActionTags ? !declaredActionTags.has(analysis.actionTagNormalized) : false\n const current: any = txnContext.current\n const phaseTimingEnabled = current?.dispatchPhaseTimingEnabled === true\n\n // Record action dispatch (for Devtools/diagnostics).\n const actionRecordStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* Debug.record({\n type: 'action:dispatch',\n moduleId: optionsModuleId,\n action,\n actionTag: analysis.actionTagNormalized,\n ...(unknownAction ? { unknownAction: true } : {}),\n instanceId,\n txnSeq: txnContext.current?.txnSeq,\n txnId: txnContext.current?.txnId,\n })\n if (phaseTimingEnabled) {\n current.dispatchActionRecordMs =\n (typeof current.dispatchActionRecordMs === 'number' ? current.dispatchActionRecordMs : 0) +\n Math.max(0, readClockMs() - actionRecordStartedAtMs)\n }\n\n // actionsWithMeta$: provides stable txnSeq/txnId anchors for higher-level subscriptions (e.g. Process).\n if (current) {\n const meta: StateCommitMeta = {\n txnSeq: current.txnSeq,\n txnId: current.txnId,\n commitMode: ((current as any).commitMode ?? 'normal') as StateCommitMode,\n priority: ((current as any).priority ?? 'normal') as StateCommitPriority,\n originKind: current.origin.kind,\n originName: current.origin.name,\n }\n const actionCommitStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* PubSub.publish(actionCommitHub, { value: action, meta })\n if (phaseTimingEnabled) {\n current.dispatchActionCommitHubMs =\n (typeof current.dispatchActionCommitHubMs === 'number' ? current.dispatchActionCommitHubMs : 0) +\n Math.max(0, readClockMs() - actionCommitStartedAtMs)\n current.dispatchActionCount =\n (typeof current.dispatchActionCount === 'number' ? current.dispatchActionCount : 0) + 1\n }\n }\n })\n\n const runDispatch = (action: A, analysis: ActionAnalysis, override?: TxnOriginOverride): Effect.Effect<void> =>\n runOperation(\n 'action',\n 'action:dispatch',\n {\n payload: action,\n meta: { moduleId: optionsModuleId, instanceId },\n },\n runWithStateTransaction(makeActionOrigin('dispatch', action, analysis, override), () =>\n dispatchInTransaction(action, analysis) as Effect.Effect<void, never, never>,\n ),\n ).pipe(Effect.asVoid)\n\n const runDispatchLowPriority = (action: A, analysis: ActionAnalysis, override?: TxnOriginOverride): Effect.Effect<void> =>\n runOperation(\n 'action',\n 'action:dispatchLowPriority',\n {\n payload: action,\n meta: { moduleId: optionsModuleId, instanceId },\n },\n runWithStateTransaction(makeActionOrigin('dispatchLowPriority', action, analysis, override), () =>\n Effect.gen(function* () {\n if (txnContext.current) {\n ;(txnContext.current as any).commitMode = 'lowPriority' as StateCommitMode\n ;(txnContext.current as any).priority = 'low' as StateCommitPriority\n }\n yield* (dispatchInTransaction(action, analysis) as Effect.Effect<void, never, never>)\n }),\n ),\n ).pipe(Effect.asVoid)\n\n const runDispatchBatch = (\n actions: ReadonlyArray<A>,\n analyses: ReadonlyArray<ActionAnalysis>,\n override?: TxnOriginOverride,\n ): Effect.Effect<void> => {\n if (actions.length === 0) return Effect.void\n\n return runOperation(\n 'action',\n 'action:dispatchBatch',\n {\n payload: actions,\n meta: { moduleId: optionsModuleId, instanceId },\n },\n runWithStateTransaction(\n { kind: override?.kind ?? 'action', name: override?.name ?? 'dispatchBatch', details: { count: actions.length } } as any,\n () =>\n Effect.gen(function* () {\n if (txnContext.current) {\n ;(txnContext.current as any).commitMode = 'batch' as StateCommitMode\n ;(txnContext.current as any).priority = 'normal' as StateCommitPriority\n }\n for (let index = 0; index < actions.length; index += 1) {\n const action = actions[index] as A\n const analysis = analyses[index] as ActionAnalysis\n yield* (dispatchInTransaction(action, analysis) as Effect.Effect<void, never, never>)\n }\n }),\n ),\n ).pipe(Effect.asVoid)\n }\n\n const publishWithPressureDiagnostics = (\n publish: Effect.Effect<unknown>,\n trigger: () => Debug.TriggerRef,\n resolvePolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>,\n ): Effect.Effect<void> =>\n Effect.gen(function* () {\n const startedAt = Date.now()\n yield* publish\n const elapsedMs = Date.now() - startedAt\n\n // fast-path: treat 0ms as \"no backpressure wait observed\" to avoid parsing policy per dispatch.\n if (elapsedMs <= 0) {\n return\n }\n\n const policy = yield* resolvePolicy()\n yield* diagnostics.emitPressureIfNeeded({\n policy,\n trigger: trigger(),\n saturatedDurationMs: elapsedMs,\n })\n })\n\n const publishActionWithPressureDiagnostics = (\n hub: PubSub.PubSub<A>,\n action: A,\n trigger: () => Debug.TriggerRef,\n resolvePolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>,\n ): Effect.Effect<void> => publishWithPressureDiagnostics(PubSub.publish(hub, action), trigger, resolvePolicy)\n\n const publishActionBatchWithPressureDiagnostics = (\n hub: PubSub.PubSub<A>,\n actions: ReadonlyArray<A>,\n trigger: () => Debug.TriggerRef,\n resolvePolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>,\n ): Effect.Effect<void> => {\n if (actions.length === 0) {\n return Effect.void\n }\n if (actions.length === 1) {\n return publishActionWithPressureDiagnostics(hub, actions[0] as A, trigger, resolvePolicy)\n }\n return publishWithPressureDiagnostics(PubSub.publishAll(hub, actions), trigger, resolvePolicy)\n }\n\n const makeLazyPolicyResolver = (): (() => Effect.Effect<ResolvedConcurrencyPolicy>) => {\n let cached: ResolvedConcurrencyPolicy | undefined\n return () =>\n cached\n ? Effect.succeed(cached)\n : resolveConcurrencyPolicy().pipe(\n Effect.tap((policy) =>\n Effect.sync(() => {\n cached = policy\n }),\n ),\n )\n }\n\n const makeActionPressureSource = (args: ActionPressureSource): Record<string, unknown> => ({\n dispatchEntry: args.dispatchEntry,\n channel: args.channel,\n ...(typeof args.topicTag === 'string' ? { topicTag: args.topicTag } : {}),\n ...(typeof args.actionTag === 'string' ? { actionTag: args.actionTag } : {}),\n ...(typeof args.batchSize === 'number' ? { batchSize: args.batchSize } : {}),\n ...(typeof args.fanoutCount === 'number' ? { fanoutCount: args.fanoutCount } : {}),\n })\n\n const makeActionPropagationEntry = (action: A, analysis: ActionAnalysis): ActionPropagationEntry<A> => {\n const topicTargets: Array<ActionPropagationTopicTarget<A>> = []\n const primaryTopicTag = analysis.topicTagPrimary\n const primaryTopicHub = primaryTopicTag ? actionTagHubsByTag?.get(primaryTopicTag) : undefined\n if (primaryTopicHub && primaryTopicTag) {\n topicTargets.push({ topicTag: primaryTopicTag, hub: primaryTopicHub })\n }\n\n const secondaryTopicTag = analysis.topicTagSecondary\n const secondaryTopicHub = secondaryTopicTag ? actionTagHubsByTag?.get(secondaryTopicTag) : undefined\n if (secondaryTopicHub && secondaryTopicTag) {\n topicTargets.push({ topicTag: secondaryTopicTag, hub: secondaryTopicHub })\n }\n\n return {\n action,\n analysis,\n topicTargets,\n fanoutCount: topicTargets.length,\n }\n }\n\n const resolveSharedActionTag = (entries: ReadonlyArray<ActionPropagationEntry<A>>): string | undefined => {\n if (entries.length === 0) {\n return undefined\n }\n const first = entries[0]!.analysis.actionTagNormalized\n for (let index = 1; index < entries.length; index += 1) {\n if (entries[index]!.analysis.actionTagNormalized !== first) {\n return undefined\n }\n }\n return first\n }\n\n const groupTopicBatches = (entries: ReadonlyArray<ActionPropagationEntry<A>>): ReadonlyArray<ActionTopicBatch<A>> => {\n const grouped = new Map<\n string,\n {\n topicTag: string\n hub: PubSub.PubSub<A>\n actions: Array<A>\n actionTag: string | undefined\n fanoutCount: number\n }\n >()\n\n for (let index = 0; index < entries.length; index += 1) {\n const entry = entries[index]!\n for (let topicIndex = 0; topicIndex < entry.topicTargets.length; topicIndex += 1) {\n const topicTarget = entry.topicTargets[topicIndex]!\n let topicBatch = grouped.get(topicTarget.topicTag)\n if (!topicBatch) {\n topicBatch = {\n topicTag: topicTarget.topicTag,\n hub: topicTarget.hub,\n actions: [],\n actionTag: entry.analysis.actionTagNormalized,\n fanoutCount: 0,\n }\n grouped.set(topicTarget.topicTag, topicBatch)\n }\n\n topicBatch.actions.push(entry.action)\n topicBatch.fanoutCount += entry.fanoutCount\n if (topicBatch.actionTag && topicBatch.actionTag !== entry.analysis.actionTagNormalized) {\n topicBatch.actionTag = undefined\n }\n }\n }\n\n return Array.from(grouped.values())\n }\n\n const publishActionPropagationBus = (\n entries: ReadonlyArray<ActionPropagationEntry<A>>,\n dispatchEntry: DispatchEntryPoint,\n resolvePolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>,\n ): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (entries.length === 0) {\n return\n }\n\n const batchSize = entries.length\n let batchFanoutCount = 0\n for (let index = 0; index < entries.length; index += 1) {\n batchFanoutCount += entries[index]!.fanoutCount\n }\n const batchActionTag = batchSize === 1 ? entries[0]!.analysis.actionTagNormalized : resolveSharedActionTag(entries)\n if (batchSize === 1) {\n yield* publishActionWithPressureDiagnostics(\n actionHub,\n entries[0]!.action,\n () => ({\n kind: 'actionHub',\n name: 'publish',\n details: makeActionPressureSource({\n dispatchEntry,\n channel: 'main',\n actionTag: batchActionTag,\n batchSize,\n fanoutCount: batchFanoutCount,\n }),\n }),\n resolvePolicy,\n )\n } else {\n const batchActions = new Array<A>(batchSize)\n for (let index = 0; index < batchSize; index += 1) {\n batchActions[index] = entries[index]!.action\n }\n yield* publishActionBatchWithPressureDiagnostics(\n actionHub,\n batchActions,\n () => ({\n kind: 'actionHub',\n name: 'publish',\n details: makeActionPressureSource({\n dispatchEntry,\n channel: 'main',\n actionTag: batchActionTag,\n batchSize,\n fanoutCount: batchFanoutCount,\n }),\n }),\n resolvePolicy,\n )\n }\n\n // Keep original order per topic stream while using publishAll for batch fan-out.\n const topicBatches = groupTopicBatches(entries)\n for (let topicIndex = 0; topicIndex < topicBatches.length; topicIndex += 1) {\n const topicBatch = topicBatches[topicIndex]!\n yield* publishActionBatchWithPressureDiagnostics(\n topicBatch.hub,\n topicBatch.actions,\n () => ({\n kind: 'actionTopicHub',\n name: 'publish',\n details: makeActionPressureSource({\n dispatchEntry,\n channel: 'topic',\n topicTag: topicBatch.topicTag,\n actionTag: topicBatch.actionTag,\n batchSize: topicBatch.actions.length,\n fanoutCount: topicBatch.fanoutCount,\n }),\n }),\n resolvePolicy,\n )\n }\n })\n\n return {\n registerReducer,\n registerActionStateWriteback,\n dispatchWithOriginOverride: (action, override) => {\n const analysis = analyzeAction(action)\n const propagationEntry = makeActionPropagationEntry(action, analysis)\n const resolvePolicy = makeLazyPolicyResolver()\n return enqueueTransaction(runDispatch(action, analysis, override)).pipe(\n Effect.flatMap(() => publishActionPropagationBus([propagationEntry], 'dispatch', resolvePolicy)),\n )\n },\n dispatchBatchWithOriginOverride: (actions, override) => {\n const analyses = new Array<ActionAnalysis>(actions.length)\n for (let index = 0; index < actions.length; index += 1) {\n analyses[index] = analyzeAction(actions[index] as A)\n }\n const propagationEntries = new Array<ActionPropagationEntry<A>>(actions.length)\n for (let index = 0; index < actions.length; index += 1) {\n propagationEntries[index] = makeActionPropagationEntry(actions[index] as A, analyses[index] as ActionAnalysis)\n }\n const resolvePolicy = makeLazyPolicyResolver()\n return enqueueTransaction(runDispatchBatch(actions, analyses, override)).pipe(\n Effect.flatMap(() => publishActionPropagationBus(propagationEntries, 'dispatchBatch', resolvePolicy)),\n )\n },\n dispatchLowPriorityWithOriginOverride: (action, override) => {\n const analysis = analyzeAction(action)\n const propagationEntry = makeActionPropagationEntry(action, analysis)\n const resolvePolicy = makeLazyPolicyResolver()\n return enqueueTransaction(runDispatchLowPriority(action, analysis, override)).pipe(\n Effect.flatMap(() => publishActionPropagationBus([propagationEntry], 'dispatchLowPriority', resolvePolicy)),\n )\n },\n // Note: publish is a lossless/backpressure channel and may wait.\n // Must run outside the transaction window (FR-012) and must not block the txnQueue consumer fiber (avoid deadlock).\n dispatch: (action) => {\n const analysis = analyzeAction(action)\n const propagationEntry = makeActionPropagationEntry(action, analysis)\n const resolvePolicy = makeLazyPolicyResolver()\n return enqueueTransaction(runDispatch(action, analysis)).pipe(\n Effect.flatMap(() => publishActionPropagationBus([propagationEntry], 'dispatch', resolvePolicy)),\n )\n },\n dispatchBatch: (actions) => {\n const analyses = new Array<ActionAnalysis>(actions.length)\n for (let index = 0; index < actions.length; index += 1) {\n analyses[index] = analyzeAction(actions[index] as A)\n }\n const propagationEntries = new Array<ActionPropagationEntry<A>>(actions.length)\n for (let index = 0; index < actions.length; index += 1) {\n propagationEntries[index] = makeActionPropagationEntry(actions[index] as A, analyses[index] as ActionAnalysis)\n }\n const resolvePolicy = makeLazyPolicyResolver()\n return enqueueTransaction(runDispatchBatch(actions, analyses)).pipe(\n Effect.flatMap(() => publishActionPropagationBus(propagationEntries, 'dispatchBatch', resolvePolicy)),\n )\n },\n dispatchLowPriority: (action) => {\n const analysis = analyzeAction(action)\n const propagationEntry = makeActionPropagationEntry(action, analysis)\n const resolvePolicy = makeLazyPolicyResolver()\n return enqueueTransaction(runDispatchLowPriority(action, analysis)).pipe(\n Effect.flatMap(() => publishActionPropagationBus([propagationEntry], 'dispatchLowPriority', resolvePolicy)),\n )\n },\n }\n}\n","import { Cause, Effect } from 'effect'\nimport * as Debug from './DebugSink.js'\n\n/**\n * Reducer diagnostic error types:\n * - ReducerDuplicateError: multiple primary reducers registered for the same tag.\n * - ReducerLateRegistrationError: reducer registered after actions with this tag have already been dispatched.\n *\n * These errors are internal to Runtime and are converted into Debug diagnostic events in the catch phase.\n */\nexport interface ReducerDiagnosticError extends Error {\n readonly _tag: 'ReducerDuplicateError' | 'ReducerLateRegistrationError'\n readonly tag: string\n readonly moduleId?: string\n}\n\nexport const makeReducerError = (\n _tag: ReducerDiagnosticError['_tag'],\n tag: string,\n moduleId?: string,\n): ReducerDiagnosticError =>\n Object.assign(\n new Error(\n _tag === 'ReducerDuplicateError'\n ? `[ModuleRuntime] Duplicate primary reducer for tag \"${tag}\". Each action tag must have at most one primary reducer.`\n : `[ModuleRuntime] Late primary reducer registration for tag \"${tag}\". Reducers must be registered before the first dispatch of this tag.`,\n ),\n {\n _tag,\n tag,\n moduleId,\n },\n ) as ReducerDiagnosticError\n\n/**\n * Extracts Reducer diagnostic errors from a Logic-forked Cause and emits them as Debug events.\n *\n * Notes:\n * - Emits diagnostic events only when ReducerDiagnosticError is present.\n * - moduleId prefers the error object's moduleId, falling back to the caller-provided moduleId.\n */\nexport const emitDiagnosticsFromCause = (\n cause: Cause.Cause<unknown>,\n moduleIdFromContext?: string,\n): Effect.Effect<void, never, any> =>\n Effect.sync(() => {\n const defects = cause.reasons.filter(Cause.isDieReason).map((reason) => reason.defect)\n\n let duplicate: ReducerDiagnosticError | undefined\n let late: ReducerDiagnosticError | undefined\n\n for (const defect of defects) {\n if (!defect || typeof defect !== 'object') continue\n const error = defect as any\n if (error._tag === 'ReducerDuplicateError') {\n duplicate = error as ReducerDiagnosticError\n } else if (error._tag === 'ReducerLateRegistrationError') {\n late = error as ReducerDiagnosticError\n }\n }\n\n const effects: Array<Effect.Effect<void>> = []\n\n if (duplicate) {\n effects.push(\n Debug.record({\n type: 'diagnostic',\n moduleId: duplicate.moduleId ?? moduleIdFromContext,\n code: 'reducer::duplicate',\n severity: 'error',\n message: `Primary reducer for tag \"${duplicate.tag}\" is already registered and cannot be redefined.`,\n hint: 'Ensure each Action tag defines a single primary reducer. If it is defined in both Module.reducers and $.reducer, keep the Module.reducers version or merge into one definition.',\n actionTag: duplicate.tag,\n }),\n )\n }\n\n if (late) {\n effects.push(\n Debug.record({\n type: 'diagnostic',\n moduleId: late.moduleId ?? moduleIdFromContext,\n code: 'reducer::late_registration',\n severity: 'error',\n message: `Primary reducer for tag \"${late.tag}\" was registered after actions with this tag had already been dispatched.`,\n hint: 'Move this reducer to Module.make({ reducers }), or ensure $.reducer(\"tag\", ...) runs before the first dispatch.',\n actionTag: late.tag,\n }),\n )\n }\n\n if (effects.length === 0) {\n return Effect.void\n }\n\n let combined: Effect.Effect<void> = Effect.void\n for (const eff of effects) {\n combined = combined.pipe(Effect.flatMap(() => eff))\n }\n return combined\n }).pipe(Effect.flatten)\n","import { create, type Patches } from 'mutative'\nimport type { FieldPath } from '../../field-path.js'\nimport { isFieldPathSegment, toKey } from '../../field-path.js'\n\nexport type PatchPath = FieldPath\n\nexport const mutateWithoutPatches = <S>(base: S, mutator: (draft: S) => void): S => {\n return create(base, mutator as any) as unknown as S\n}\n\n// Patch path evidence (from mutative) is allowed to include list indices (number / digit strings).\n// Field-level dirty ids are still derived by filtering out index segments via normalizeFieldPath.\nconst isNonNegativeIntString = (text: string): boolean => {\n if (!text) return false\n for (let i = 0; i < text.length; i++) {\n const c = text.charCodeAt(i)\n if (c < 48 /* '0' */ || c > 57 /* '9' */) return false\n }\n return true\n}\n\nconst toPatchFieldPath = (path: unknown): PatchPath | '*' | undefined => {\n if (typeof path === 'string') {\n const trimmed = path.trim()\n return trimmed.length > 0 ? '*' : undefined\n }\n\n if (!Array.isArray(path)) return undefined\n\n // Fast path: patch path is already a pure string path evidence array.\n let allValidString = true\n for (let i = 0; i < path.length; i++) {\n const seg = path[i]\n if (typeof seg !== 'string' || !(isFieldPathSegment(seg) || isNonNegativeIntString(seg))) {\n allValidString = false\n break\n }\n }\n if (allValidString) return path as PatchPath\n\n // Structural path:\n // - keep valid field segments (non-numeric), and\n // - keep list indices (numbers / digit strings) for listIndexEvidence (C-1 / D-1).\n const parts: Array<string> = []\n for (let i = 0; i < path.length; i++) {\n const seg = path[i]\n if (typeof seg === 'string') {\n if (isFieldPathSegment(seg) || isNonNegativeIntString(seg)) {\n parts.push(seg)\n }\n continue\n }\n\n if (typeof seg === 'number' && Number.isFinite(seg)) {\n const n = Math.floor(seg)\n if (n >= 0 && n <= 2_147_483_647) {\n parts.push(String(n))\n }\n }\n }\n\n return parts.length === 0 ? '*' : parts\n}\n\nexport const mutateWithPatchPaths = <S>(\n base: S,\n mutator: (draft: S) => void,\n): { readonly nextState: S; readonly patchPaths: ReadonlyArray<PatchPath | '*'> } => {\n const out = create(base, mutator as any, {\n enablePatches: {\n pathAsArray: true,\n arrayLengthAssignment: false,\n },\n }) as unknown\n\n if (!Array.isArray(out)) {\n return { nextState: out as S, patchPaths: [] }\n }\n\n const nextState = out[0] as S\n const patches = (out[1] ?? []) as Patches<{ pathAsArray: true; arrayLengthAssignment: false }>\n\n // Large patch bursts (e.g. reducers mutating hundreds/thousands of flat fields) are typically prefix-free and\n // already unique. In this case, string-key dedup becomes pure overhead and can cause GC jitter in perf workloads.\n // We keep only the '*' guard and let downstream dirty-set tracking handle any rare duplicates.\n if (patches.length > 256) {\n let sawStar = false\n const patchPaths: Array<PatchPath | '*'> = []\n for (let i = 0; i < patches.length; i++) {\n const patch = patches[i]\n const p = toPatchFieldPath((patch as any)?.path)\n if (!p) continue\n if (p === '*') {\n if (sawStar) continue\n sawStar = true\n }\n patchPaths.push(p)\n }\n return {\n nextState,\n patchPaths,\n }\n }\n\n // Perf note:\n // - Avoid JSON.stringify-based dedup keys (alloc-heavy, can cause GC spikes in perf workloads).\n // - Use segment keys for single-segment paths; fall back to a stable toKey() digest for multi-segment paths.\n let sawStar = false\n const singleSeg = new Set<string>()\n const multiSeg = new Set<string>()\n const patchPaths: Array<PatchPath | '*'> = []\n\n for (let i = 0; i < patches.length; i++) {\n const patch = patches[i]\n const p = toPatchFieldPath((patch as any)?.path)\n if (!p) continue\n\n if (p === '*') {\n if (sawStar) continue\n sawStar = true\n patchPaths.push(p)\n continue\n }\n\n if (p.length === 1) {\n const seg = p[0]!\n if (singleSeg.has(seg)) continue\n singleSeg.add(seg)\n patchPaths.push(p)\n continue\n }\n\n const key = toKey(p)\n if (multiSeg.has(key)) continue\n multiSeg.add(key)\n patchPaths.push(p)\n }\n\n return {\n nextState,\n patchPaths,\n }\n}\n","import { Effect, Stream } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { toSerializableErrorSummary } from './errorSummary.js'\n\ntype Phase = 'setup' | 'run'\n\ntype AnyEffectHandler = (payload: unknown) => Effect.Effect<void, any, any>\n\ntype HandlerEntry = {\n readonly actionTag: string\n readonly sourceKey: string\n readonly handler: AnyEffectHandler\n readonly phase: Phase\n readonly logicUnitId: string\n readonly logicUnitLabel: string\n readonly logicUnitPath?: string\n}\n\ntype LogicUnitState = {\n nextHandlerSeq: number\n handlerIds: WeakMap<AnyEffectHandler, string>\n}\n\ntype ActionTagState = {\n readonly handlers: Map<string, HandlerEntry>\n handlerSnapshot: ReadonlyArray<HandlerEntry>\n}\n\nconst resolveActionTag = (action: unknown): string | undefined => {\n const tag = (action as any)?._tag\n if (typeof tag === 'string' && tag.length > 0) return tag\n const type = (action as any)?.type\n if (typeof type === 'string' && type.length > 0) return type\n if (tag != null) return String(tag)\n if (type != null) return String(type)\n return undefined\n}\n\nconst getOrCreateLogicUnitState = (states: Map<string, LogicUnitState>, logicUnitId: string): LogicUnitState => {\n const existing = states.get(logicUnitId)\n if (existing) return existing\n const next: LogicUnitState = { nextHandlerSeq: 0, handlerIds: new WeakMap() }\n states.set(logicUnitId, next)\n return next\n}\n\nconst getOrAssignHandlerId = (state: LogicUnitState, handler: AnyEffectHandler): string => {\n const existing = state.handlerIds.get(handler)\n if (existing) return existing\n state.nextHandlerSeq += 1\n const id = `h${state.nextHandlerSeq}`\n state.handlerIds.set(handler, id)\n return id\n}\n\nexport type RegisterEffectArgs = {\n readonly actionTag: string\n readonly handler: AnyEffectHandler\n readonly phase: Phase\n readonly logicUnit?: {\n readonly logicUnitId: string\n readonly logicUnitLabel: string\n readonly path?: string\n }\n}\n\nexport const makeEffectsRegistry = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n readonly actions$: Stream.Stream<unknown>\n}): {\n readonly registerEffect: (\n params: RegisterEffectArgs,\n ) => Effect.Effect<{ readonly sourceKey: string; readonly duplicate: boolean }, never, any>\n} => {\n const { moduleId, instanceId, actions$ } = args\n\n const logicUnitStates = new Map<string, LogicUnitState>()\n const tagStates = new Map<string, ActionTagState>()\n let watcherStarted = false\n\n const getOrCreateTagState = (actionTag: string): ActionTagState => {\n const existing = tagStates.get(actionTag)\n if (existing) return existing\n const next: ActionTagState = { handlers: new Map(), handlerSnapshot: [] }\n tagStates.set(actionTag, next)\n return next\n }\n\n const refreshHandlerSnapshot = (state: ActionTagState): void => {\n state.handlerSnapshot = Array.from(state.handlers.values())\n }\n\n const startWatcherIfNeeded = (): Effect.Effect<void, never, any> => {\n if (watcherStarted) return Effect.void\n watcherStarted = true\n\n const program = Stream.runForEach(actions$, (action) =>\n Effect.gen(function* () {\n const actionTag = resolveActionTag(action)\n if (!actionTag) return\n\n const state = tagStates.get(actionTag)\n if (!state) return\n\n const entries = state.handlerSnapshot\n if (entries.length === 0) return\n\n const payload = (action as any)?.payload\n\n const dispatchEntry = (entry: HandlerEntry) =>\n Effect.forkScoped(\n Effect.gen(function* () {\n const exit = yield* Effect.exit(entry.handler(payload))\n if (exit._tag === 'Success') return\n\n const { errorSummary, downgrade } = toSerializableErrorSummary(exit.cause)\n const downgradeHint = downgrade ? ` (downgrade=${downgrade})` : ''\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::handler_failure',\n severity: 'error',\n message: `Effect handler failed for actionTag=\"${entry.actionTag}\" sourceKey=\"${entry.sourceKey}\".${downgradeHint}`,\n hint: `${errorSummary.name ? `${errorSummary.name}: ` : ''}${errorSummary.message}`,\n actionTag: entry.actionTag,\n kind: 'effect_handler_failure',\n trigger: {\n kind: 'effect',\n name: 'handler',\n details: {\n actionTag: entry.actionTag,\n sourceKey: entry.sourceKey,\n logicUnitId: entry.logicUnitId,\n },\n },\n })\n }),\n )\n\n if (entries.length === 1) {\n yield* dispatchEntry(entries[0]!)\n return\n }\n\n yield* Effect.forEach(entries, dispatchEntry, { discard: true })\n }),\n ).pipe(\n Effect.catchCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::watcher_crashed',\n severity: 'error',\n message: 'Effect watcher crashed while routing action handlers.',\n hint: toSerializableErrorSummary(cause).errorSummary.message,\n actionTag: '*',\n kind: 'effect_watcher_crashed',\n })),\n )\n\n return Effect.forkScoped(program).pipe(Effect.asVoid)\n }\n\n const registerEffect = (params: RegisterEffectArgs) =>\n Effect.gen(function* () {\n const actionTag = params.actionTag\n const handler = params.handler\n\n const logicUnitId = params.logicUnit?.logicUnitId ?? 'unknown'\n const logicUnitLabel = params.logicUnit?.logicUnitLabel ?? `logicUnit:${logicUnitId}`\n const logicUnitPath = params.logicUnit?.path\n\n const unitState = getOrCreateLogicUnitState(logicUnitStates, logicUnitId)\n const handlerId = getOrAssignHandlerId(unitState, handler)\n const sourceKey = `${logicUnitId}::${handlerId}`\n\n const tagState = getOrCreateTagState(actionTag)\n\n const duplicate = tagState.handlers.has(sourceKey)\n if (duplicate) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::duplicate_registration',\n severity: 'warning',\n message: `Duplicate effect registration ignored for actionTag=\"${actionTag}\" sourceKey=\"${sourceKey}\".`,\n hint:\n 'The runtime de-duplicates effect handlers by (actionTag, sourceKey). ' +\n 'If you see this unexpectedly, check repeated setup registration or accidental double-mounting.',\n actionTag,\n kind: 'effect_duplicate_registration',\n trigger: {\n kind: 'effect',\n name: 'register',\n details: {\n actionTag,\n sourceKey,\n phase: params.phase,\n logicUnitId,\n logicUnitLabel,\n logicUnitPath,\n },\n },\n })\n return { sourceKey, duplicate: true } as const\n }\n\n tagState.handlers.set(sourceKey, {\n actionTag,\n sourceKey,\n handler,\n phase: params.phase,\n logicUnitId,\n logicUnitLabel,\n logicUnitPath,\n })\n refreshHandlerSnapshot(tagState)\n\n if (params.phase === 'run') {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code: 'effects::dynamic_registration',\n severity: 'warning',\n message: `Effect registered in run phase for actionTag=\"${actionTag}\" sourceKey=\"${sourceKey}\".`,\n hint: 'Run-phase registration only affects future actions; prefer registering effects during setup for deterministic behavior.',\n actionTag,\n kind: 'effect_dynamic_registration',\n trigger: {\n kind: 'effect',\n name: 'register:run',\n details: { actionTag, sourceKey, logicUnitId, logicUnitLabel, logicUnitPath },\n },\n })\n }\n\n yield* startWatcherIfNeeded()\n return { sourceKey, duplicate: false } as const\n })\n\n return { registerEffect }\n}\n","import { Cause, Effect, Exit, Fiber, Option, PubSub, Queue, SubscriptionRef } from 'effect'\nimport type { StateChangeWithMeta, StateCommitMeta, StateCommitMode, StateCommitPriority } from './module.js'\nimport type {\n StateTraitProgram,\n TraitConvergeGenerationEvidence,\n TraitConvergePlanCacheEvidence,\n} from '../../state-trait/model.js'\nimport type { DirtyAllReason, DirtySet, FieldPathIdRegistry } from '../../field-path.js'\nimport * as Debug from './DebugSink.js'\nimport * as StateTransaction from './StateTransaction.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport * as StateTraitConverge from '../../state-trait/converge.js'\nimport * as StateTraitValidate from '../../state-trait/validate.js'\nimport * as StateTraitSource from '../../state-trait/source.js'\nimport * as RowId from '../../state-trait/rowid.js'\nimport type { RunOperation } from './ModuleRuntime.operation.js'\nimport type { ResolvedTraitConvergeConfig } from './ModuleRuntime.traitConvergeConfig.js'\nimport {\n currentTxnQueuePhaseTiming,\n type CapturedTxnRuntimeScope,\n type EnqueueTransaction,\n type TxnQueuePhaseTiming,\n} from './ModuleRuntime.txnQueue.js'\nimport { StateTransactionOverridesTag, type StateTransactionOverrides } from './env.js'\nimport { runSyncExitWithServices } from './runner/SyncEffectRunner.js'\n\nconst DIRTY_ALL_SET_STATE_HINT = Symbol.for('@logixjs/core/dirtyAllSetStateHint')\nconst ASYNC_ESCAPE_DIAGNOSTIC_CODE = 'state_transaction::async_escape'\nconst ASYNC_ESCAPE_MESSAGE = 'Synchronous StateTransaction body escaped the transaction window (async/await detected).'\nconst ASYNC_ESCAPE_HINT =\n 'No IO/await/sleep/promises inside the transaction window; use run*Task (pending → IO → writeback) or move async logic outside the transaction.'\nconst ASYNC_ESCAPE_KIND = 'async_in_transaction'\n\nconst makeAsyncEscapeError = (): Error =>\n Object.assign(new Error(ASYNC_ESCAPE_MESSAGE), {\n code: ASYNC_ESCAPE_DIAGNOSTIC_CODE,\n hint: ASYNC_ESCAPE_HINT,\n kind: ASYNC_ESCAPE_KIND,\n })\n\nconst findAsyncEscapeFiber = (cause: unknown): Fiber.Fiber<unknown, unknown> | undefined => {\n const defect = Cause.findDefect(cause as any) as\n | { readonly _tag: 'Success'; readonly success: Fiber.Fiber<unknown, unknown> }\n | { readonly _tag: 'Failure' }\n\n if (defect?._tag !== 'Success') return undefined\n const value: any = defect.success\n if (typeof value !== 'object' || value == null) return undefined\n return typeof value.id === 'number' && typeof value._yielded === 'function' ? (value as Fiber.Fiber<unknown, unknown>) : undefined\n}\n\nconst readDeferredFlushSlice = (details: unknown): { readonly start: number; readonly end: number } | undefined => {\n if (!details || typeof details !== 'object') return undefined\n const raw = details as any\n const start = raw.sliceStart\n const end = raw.sliceEnd\n if (typeof start !== 'number' || typeof end !== 'number') return undefined\n if (!Number.isFinite(start) || !Number.isFinite(end)) return undefined\n const s = Math.floor(start)\n const e = Math.floor(end)\n if (s < 0 || e <= s) return undefined\n return { start: s, end: e }\n}\n\ntype TxnPostCommitPhaseTiming = {\n readonly totalMs: number\n readonly rowIdSyncMs: number\n readonly publishCommitMs: number\n readonly stateUpdateDebugRecordMs: number\n readonly onCommitBeforeStateUpdateMs: number\n readonly onCommitAfterStateUpdateMs: number\n}\n\ntype TxnPhaseTraceData = {\n readonly kind: 'txn-phase'\n readonly originKind: string\n readonly originName?: string\n readonly commitMode: StateCommitMode\n readonly priority: StateCommitPriority\n readonly txnPreludeMs: number\n readonly queue?: TxnQueuePhaseTiming\n readonly dispatchActionRecordMs: number\n readonly dispatchActionCommitHubMs: number\n readonly dispatchActionCount: number\n readonly bodyShellMs: number\n readonly asyncEscapeGuardMs: number\n readonly traitConvergeMs: number\n readonly scopedValidateMs: number\n readonly sourceSyncMs: number\n readonly commit: TxnPostCommitPhaseTiming\n}\n\nconst readClockMs = (): number => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === \"function\") {\n return perf.now()\n }\n return Date.now()\n}\n\nexport type RunWithStateTransaction = <E>(\n origin: StateTransaction.StateTxnOrigin,\n body: () => Effect.Effect<void, E, never>,\n) => Effect.Effect<void, E, never>\n\nexport type SetStateInternal<S> = (\n next: S,\n path: StateTransaction.StatePatchPath,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n) => Effect.Effect<void>\n\nexport type TraitRuntimeAccess = {\n readonly getProgram: () => StateTraitProgram<any> | undefined\n readonly getConvergeStaticIrDigest: () => string | undefined\n readonly getConvergePlanCache: () => StateTraitConverge.ConvergePlanCache | undefined\n readonly getConvergeGeneration: () => TraitConvergeGenerationEvidence\n readonly getPendingCacheMissReason: () => TraitConvergePlanCacheEvidence['missReason'] | undefined\n readonly getPendingCacheMissReasonCount: () => number\n readonly setPendingCacheMissReason: (next: TraitConvergePlanCacheEvidence['missReason'] | undefined) => void\n readonly rowIdStore: RowId.RowIdStore\n readonly getListConfigs: () => ReadonlyArray<RowId.ListConfig>\n}\n\nexport type TraitConvergeTimeSlicingState = {\n readonly signal: Queue.Queue<void>\n readonly backlogDirtyPaths: Set<StateTransaction.StatePatchPath>\n readonly ensureWorkerStarted: () => Effect.Effect<void, never, never>\n backlogDirtyAllReason?: DirtyAllReason\n firstPendingAtMs: number | undefined\n lastTouchedAtMs: number | undefined\n latestConvergeConfig: ResolvedTraitConvergeConfig | undefined\n capturedContext: CapturedTxnRuntimeScope | undefined\n}\n\nexport const makeTransactionOps = <S>(args: {\n readonly moduleId: string\n readonly optionsModuleId: string | undefined\n readonly instanceId: string\n readonly stateRef: SubscriptionRef.SubscriptionRef<S>\n readonly commitHub: PubSub.PubSub<StateChangeWithMeta<S>>\n readonly shouldPublishCommitHub?: () => boolean\n readonly recordStatePatch: (\n path: StateTransaction.StatePatchPath | undefined,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n readonly onCommit?: (args: {\n readonly state: S\n readonly meta: StateCommitMeta\n readonly transaction: StateTransaction.StateTransaction<S>\n readonly diagnosticsLevel: Debug.DiagnosticsLevel\n }) => Effect.Effect<void>\n readonly enqueueTransaction: EnqueueTransaction\n readonly runOperation: RunOperation\n readonly txnContext: StateTransaction.StateTxnContext<S>\n readonly traitConvergeTimeSlicing: TraitConvergeTimeSlicingState\n readonly traitRuntime: TraitRuntimeAccess\n readonly resolveTraitConvergeConfig: () => Effect.Effect<ResolvedTraitConvergeConfig, never, never>\n readonly isDevEnv: () => boolean\n readonly txnHistory: {\n readonly buffer: Array<StateTransaction.StateTransaction<S> | undefined>\n start: number\n size: number\n readonly capacity: number\n }\n readonly txnById: Map<string, StateTransaction.StateTransaction<S>>\n}): {\n readonly readState: Effect.Effect<S>\n readonly setStateInternal: SetStateInternal<S>\n readonly runWithStateTransaction: RunWithStateTransaction\n readonly __logixGetExecVmAssemblyEvidence?: () => unknown\n} => {\n const {\n moduleId,\n optionsModuleId,\n instanceId,\n stateRef,\n commitHub,\n shouldPublishCommitHub,\n recordStatePatch,\n onCommit,\n enqueueTransaction,\n runOperation,\n txnContext,\n traitConvergeTimeSlicing,\n traitRuntime,\n resolveTraitConvergeConfig,\n isDevEnv,\n txnHistory,\n txnById,\n } = args\n\n /**\n * Read current state:\n * - If a transaction is active, return the transaction draft.\n * - Otherwise, fall back to the underlying SubscriptionRef snapshot.\n */\n const readState: Effect.Effect<S> = Effect.gen(function* () {\n const inTxn = yield* Effect.service(TaskRunner.inSyncTransactionFiber).pipe(Effect.orDie)\n const current = txnContext.current\n if (inTxn && current) return current.draft\n return yield* SubscriptionRef.get(stateRef)\n })\n\n const runPostCommitPhases = (args: {\n readonly txn: StateTransaction.StateTransaction<S>\n readonly nextState: S\n readonly replayEvent: unknown\n readonly commitMode: StateCommitMode\n readonly priority: StateCommitPriority\n readonly fieldPathIdRegistry: FieldPathIdRegistry | undefined\n readonly dirtyAllSetStateHint: boolean\n readonly traitSummary: unknown\n readonly phaseTimingEnabled: boolean\n }): Effect.Effect<TxnPostCommitPhaseTiming | undefined> =>\n Effect.gen(function* () {\n const {\n txn,\n nextState,\n replayEvent,\n commitMode,\n priority,\n fieldPathIdRegistry,\n dirtyAllSetStateHint,\n traitSummary,\n phaseTimingEnabled,\n } = args\n const phaseStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n let rowIdSyncMs = 0\n let publishCommitMs = 0\n let stateUpdateDebugRecordMs = 0\n let onCommitBeforeStateUpdateMs = 0\n let onCommitAfterStateUpdateMs = 0\n const shouldWarnDirtyAllSetState =\n dirtyAllSetStateHint || (txn.origin.kind === 'state' && txn.origin.name === 'setState')\n\n if (shouldWarnDirtyAllSetState && isDevEnv() && txn.dirty.dirtyAll === true) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq: txn.txnSeq,\n txnId: txn.txnId,\n trigger: txn.origin,\n code: 'state_transaction::dirty_all_fallback',\n severity: 'warning',\n message:\n 'setState/state.update did not provide field-level dirty-set evidence; falling back to dirtyAll scheduling.',\n hint: 'Prefer $.state.mutate(...) or Logix.Module.Reducer.mutate(...) to produce field-level patchPaths; otherwise converge/validate degrades to full-path scheduling.',\n kind: 'dirty_all_fallback:set_state',\n })\n }\n\n // Record txn history: only for dev/test or explicit full instrumentation (devtools/debugging).\n // In production (default light), keep zero retention to avoid turning \"txn history\" into an implicit memory tax.\n if (isDevEnv() || txnContext.config.instrumentation === 'full') {\n txnById.set(txn.txnId, txn)\n const cap = txnHistory.capacity\n if (cap > 0) {\n const buf = txnHistory.buffer\n if (txnHistory.size < cap) {\n buf[(txnHistory.start + txnHistory.size) % cap] = txn\n txnHistory.size += 1\n } else {\n const evicted = buf[txnHistory.start]\n buf[txnHistory.start] = txn\n txnHistory.start = (txnHistory.start + 1) % cap\n if (evicted) {\n txnById.delete(evicted.txnId)\n }\n }\n }\n }\n\n // RowID virtual identity layer: align mappings after each observable commit\n // so in-flight gates and cache reuse remain stable under insert/remove/reorder.\n const listConfigs = traitRuntime.getListConfigs()\n if (listConfigs.length > 0) {\n const rowIdSyncStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const shouldSyncRowIds = RowId.shouldReconcileListConfigsByDirtyEvidence({\n dirty: txn.dirty,\n listConfigs,\n fieldPathIdRegistry,\n })\n if (shouldSyncRowIds) {\n traitRuntime.rowIdStore.updateAll(nextState as any, listConfigs)\n }\n if (phaseTimingEnabled) {\n rowIdSyncMs = Math.max(0, readClockMs() - rowIdSyncStartedAtMs)\n }\n }\n\n const meta: StateCommitMeta = {\n txnSeq: txn.txnSeq,\n txnId: txn.txnId,\n commitMode,\n priority,\n originKind: txn.origin.kind,\n originName: txn.origin.name,\n }\n\n // Always publish:\n // - PubSub already optimizes for 0 subscribers.\n // - Skipping here can drop the first commit due to subscription start races (strictGate/process triggers/tests).\n const publishCommitStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* PubSub.publish(commitHub, {\n value: nextState,\n meta,\n })\n if (phaseTimingEnabled) {\n publishCommitMs = Math.max(0, readClockMs() - publishCommitStartedAtMs)\n }\n\n // Perf-sensitive ordering:\n // - In diagnostics=off mode (default for production/perf runs), allow selectorGraph notifications to be published\n // before state:update debug recording so React external store subscribers can start flushing earlier.\n // - When traceMode=off (production default), treat it as a perf mode even under diagnostics=light/full:\n // publish onCommit before state:update so TickScheduler can schedule the tick flush earlier (yieldMicrotask),\n // reducing end-to-end latency and full/off variance on externalStore ingest workloads.\n // - When traceMode=on, keep the original ordering so any selector eval trace stays after state:update\n // (preserves a more intuitive txn → selector → render causal chain in devtools).\n const diagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n let shouldCommitBeforeStateUpdate = false\n if (onCommit) {\n if (diagnosticsLevel === 'off') {\n shouldCommitBeforeStateUpdate = true\n } else {\n const traceMode = yield* Effect.service(Debug.currentTraceMode).pipe(Effect.orDie)\n shouldCommitBeforeStateUpdate = traceMode === 'off'\n }\n }\n\n if (onCommit && shouldCommitBeforeStateUpdate) {\n const onCommitStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* onCommit({\n state: nextState,\n meta,\n transaction: txn,\n diagnosticsLevel,\n })\n if (phaseTimingEnabled) {\n onCommitBeforeStateUpdateMs = Math.max(0, readClockMs() - onCommitStartedAtMs)\n }\n }\n\n const debugSinks = yield* Effect.service(Debug.currentDebugSinks).pipe(Effect.orDie)\n const shouldRecordStateUpdate = debugSinks.length > 0 && !Debug.isErrorOnlyOnlySinks(debugSinks)\n\n if (shouldRecordStateUpdate) {\n const stateUpdateDebugRecordStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const shouldComputeEvidence = diagnosticsLevel !== 'off'\n\n const staticIrDigest = shouldComputeEvidence ? traitRuntime.getConvergeStaticIrDigest() : undefined\n\n const dirtySetEvidence = shouldComputeEvidence\n ? (() => {\n const pathIdsTopK = diagnosticsLevel === 'full' ? 32 : 3\n\n if (txn.dirty.dirtyAll) {\n return {\n dirtyAll: true,\n reason: txn.dirty.dirtyAllReason ?? 'unknownWrite',\n pathIds: [],\n pathCount: 0,\n keySize: 0,\n keyHash: 0,\n pathIdsTruncated: false,\n }\n }\n\n const fullPathIds = txn.dirty.dirtyPathIds\n const topK = fullPathIds.slice(0, pathIdsTopK)\n return {\n dirtyAll: false,\n // Keep diff anchors (count/hash/size) for the full set; only truncate the pathIds payload.\n pathIds: topK,\n pathCount: fullPathIds.length,\n keySize: txn.dirty.dirtyPathsKeySize,\n keyHash: txn.dirty.dirtyPathsKeyHash,\n pathIdsTruncated: fullPathIds.length > pathIdsTopK,\n }\n })()\n : undefined\n\n yield* Debug.record({\n type: 'state:update',\n moduleId: optionsModuleId,\n state: nextState,\n instanceId,\n txnSeq: txn.txnSeq,\n txnId: txn.txnId,\n staticIrDigest,\n dirtySet: dirtySetEvidence,\n patchCount: txn.patchCount,\n patchesTruncated: txn.patchesTruncated,\n ...(txn.patchesTruncated ? { patchesTruncatedReason: txn.patchesTruncatedReason } : null),\n commitMode,\n priority,\n originKind: txn.origin.kind,\n originName: txn.origin.name,\n traitSummary,\n replayEvent: replayEvent as any,\n })\n if (phaseTimingEnabled) {\n stateUpdateDebugRecordMs = Math.max(0, readClockMs() - stateUpdateDebugRecordStartedAtMs)\n }\n }\n\n if (onCommit && !shouldCommitBeforeStateUpdate) {\n const onCommitStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* onCommit({\n state: nextState,\n meta,\n transaction: txn,\n diagnosticsLevel,\n })\n if (phaseTimingEnabled) {\n onCommitAfterStateUpdateMs = Math.max(0, readClockMs() - onCommitStartedAtMs)\n }\n }\n\n return phaseTimingEnabled\n ? {\n totalMs: Math.max(0, readClockMs() - phaseStartedAtMs),\n rowIdSyncMs,\n publishCommitMs,\n stateUpdateDebugRecordMs,\n onCommitBeforeStateUpdateMs,\n onCommitAfterStateUpdateMs,\n }\n : undefined\n })\n\n /**\n * runWithStateTransaction:\n * - Open a transaction for a single logic entrypoint (dispatch / source-refresh / future extensions).\n * - Aggregate all state writes within body; at the end commit once and emit a state:update debug event.\n * - The caller must ensure body does not cross long IO boundaries (see the spec constraints on the transaction window).\n */\n const runWithStateTransaction: RunWithStateTransaction = <E2>(\n origin: StateTransaction.StateTxnOrigin,\n body: () => Effect.Effect<void, E2, never>,\n ): Effect.Effect<void, E2, never> =>\n (Effect.provideService(Effect.gen(function* () {\n const txnPreludeStartedAtMs = readClockMs()\n const phaseDiagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n const phaseTimingEnabled = phaseDiagnosticsLevel !== 'off'\n const queuePhaseTiming = yield* Effect.service(currentTxnQueuePhaseTiming).pipe(Effect.orDie)\n const baseState = yield* SubscriptionRef.get(stateRef)\n\n StateTransaction.beginTransaction(txnContext, origin, baseState)\n const txnCurrent: any = txnContext.current\n txnCurrent.stateTraitValidateRequests = []\n txnCurrent.commitMode = 'normal' as StateCommitMode\n txnCurrent.priority = 'normal' as StateCommitPriority\n txnCurrent.dispatchPhaseTimingEnabled = phaseTimingEnabled\n txnCurrent.dispatchActionRecordMs = 0\n txnCurrent.dispatchActionCommitHubMs = 0\n txnCurrent.dispatchActionCount = 0\n const txnPreludeMs = phaseTimingEnabled ? Math.max(0, readClockMs() - txnPreludeStartedAtMs) : 0\n\n const stateCommitPriority = (origin as any)?.details?.stateCommit?.priority\n if (stateCommitPriority === 'low' || stateCommitPriority === 'normal') {\n txnCurrent.priority = stateCommitPriority as StateCommitPriority\n }\n \n const txnId = txnContext.current?.txnId\n const txnSeq = txnContext.current?.txnSeq\n \n TaskRunner.enterSyncTransactionShadow()\n let exit: Exit.Exit<void, E2> | undefined\n\n try {\n exit = yield* Effect.exit(\n Effect.provideService(\n Effect.gen(function* () {\n // Trait summary inside the transaction window (for devtools/diagnostics).\n let traitSummary: unknown | undefined\n \n // Execute logic inside the transaction window (reducer / watcher writeback / traits, etc.).\n // Contract: no IO/await/sleep/promises inside the transaction window.\n //\n // Fail-fast when async escapes the window (even in production), without blocking on cleanup.\n // - Sync bodies typically finish before/within the first few polls.\n // - Async bodies (sleep/await/IO) suspend and will not complete within the budget.\n // Use daemon fiber to avoid supervision-induced blocking:\n // - An uninterruptible async escape must not block abort/next transaction.\n const bodyShellStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const asyncEscapeGuardStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const currentServices = yield* Effect.services<any>()\n const bodyExit = yield* Effect.sync(\n () => runSyncExitWithServices(body() as Effect.Effect<void, E2, any>, currentServices) as Exit.Exit<void, E2>,\n )\n const asyncEscapeGuardMs = phaseTimingEnabled\n ? Math.max(0, readClockMs() - asyncEscapeGuardStartedAtMs)\n : 0\n\n if (Exit.isFailure(bodyExit)) {\n const asyncEscapeFiber = findAsyncEscapeFiber(bodyExit.cause)\n\n if (asyncEscapeFiber) {\n if (phaseDiagnosticsLevel !== 'off') {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n txnId,\n trigger: origin,\n code: ASYNC_ESCAPE_DIAGNOSTIC_CODE,\n severity: 'error',\n message: ASYNC_ESCAPE_MESSAGE,\n hint: ASYNC_ESCAPE_HINT,\n kind: ASYNC_ESCAPE_KIND,\n })\n }\n\n yield* Fiber.interrupt(asyncEscapeFiber).pipe(Effect.asVoid, Effect.forkDetach({ startImmediately: true }))\n return yield* Effect.die(makeAsyncEscapeError())\n }\n\n return yield* Effect.failCause(bodyExit.cause)\n }\n const bodyShellMs = phaseTimingEnabled ? Math.max(0, readClockMs() - bodyShellStartedAtMs) : 0\n\n const stateTraitProgram = traitRuntime.getProgram()\n let traitConvergeMs = 0\n let scopedValidateMs = 0\n let sourceSyncMs = 0\n \n // StateTrait: converge derived fields (computed/link, etc.) before commit to ensure 0/1 commit per window.\n if (stateTraitProgram && txnContext.current) {\n const convergeConfig = yield* resolveTraitConvergeConfig()\n traitConvergeTimeSlicing.latestConvergeConfig = convergeConfig\n const timeSlicingConfig = convergeConfig.traitConvergeTimeSlicing\n const isDeferredFlushTxn = origin.kind === 'trait:deferred_flush'\n const hasDeferredSteps =\n stateTraitProgram.convergeExecIr != null &&\n stateTraitProgram.convergeExecIr.topoOrderDeferredInt32.length > 0\n const canTimeSlice = timeSlicingConfig.enabled === true && hasDeferredSteps\n const schedulingScope: StateTraitConverge.ConvergeContext<any>['schedulingScope'] = isDeferredFlushTxn\n ? 'deferred'\n : canTimeSlice\n ? 'immediate'\n : 'all'\n \n const deferredSlice = isDeferredFlushTxn ? readDeferredFlushSlice(origin.details) : undefined\n const deferredScopeStepIds =\n deferredSlice && stateTraitProgram.convergeExecIr\n ? stateTraitProgram.convergeExecIr.topoOrderDeferredInt32.subarray(\n deferredSlice.start,\n deferredSlice.end,\n )\n : undefined\n \n const traitConvergeStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const convergeExit = yield* Effect.exit(\n StateTraitConverge.convergeInTransaction(\n stateTraitProgram as any,\n {\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n txnId,\n configScope: convergeConfig.configScope,\n now: txnContext.config.now,\n budgetMs: convergeConfig.traitConvergeBudgetMs,\n decisionBudgetMs: convergeConfig.traitConvergeDecisionBudgetMs,\n requestedMode: isDeferredFlushTxn ? 'full' : convergeConfig.traitConvergeMode,\n schedulingScope,\n ...(deferredScopeStepIds ? { schedulingScopeStepIds: deferredScopeStepIds } : {}),\n dirtyAllReason: (txnContext.current as any)?.dirtyAllReason,\n dirtyPaths: txnContext.current?.dirtyPathIds,\n dirtyPathsKeyHash: (txnContext.current as any)?.dirtyPathIdsKeyHash,\n dirtyPathsKeySize: (txnContext.current as any)?.dirtyPathIdsKeySize,\n allowInPlaceDraft:\n txnContext.current != null &&\n !Object.is(txnContext.current.draft, txnContext.current.baseState),\n \t planCache: traitRuntime.getConvergePlanCache(),\n \t generation: traitRuntime.getConvergeGeneration(),\n \t cacheMissReasonHint: traitRuntime.getPendingCacheMissReason(),\n \t cacheMissReasonHintCount: traitRuntime.getPendingCacheMissReasonCount(),\n \t getDraft: () => txnContext.current!.draft as any,\n \t setDraft: (next) => {\n \t StateTransaction.updateDraft(txnContext, next as any)\n \t },\n recordPatch: (path, reason, from, to, traitNodeId, stepId) =>\n recordStatePatch(path, reason, from, to, traitNodeId, stepId),\n } as StateTraitConverge.ConvergeContext<any>,\n ),\n )\n if (phaseTimingEnabled) {\n traitConvergeMs = Math.max(0, readClockMs() - traitConvergeStartedAtMs)\n }\n \n if (traitRuntime.getPendingCacheMissReason() === 'generation_bumped') {\n traitRuntime.setPendingCacheMissReason(undefined)\n }\n \n if (convergeExit._tag === 'Failure') {\n const errors = convergeExit.cause.reasons\n .filter((reason) => Cause.isFailReason(reason) || Cause.isDieReason(reason))\n .map((reason) => (Cause.isFailReason(reason) ? reason.error : reason.defect))\n const configError = errors.find(\n (err): err is StateTraitConverge.StateTraitConfigError =>\n err instanceof StateTraitConverge.StateTraitConfigError,\n )\n \n if (configError) {\n const fields = configError.fields ?? []\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n txnId,\n trigger: origin,\n code: 'state_trait::config_error',\n severity: 'error',\n message: configError.message,\n hint:\n configError.code === 'CYCLE_DETECTED'\n ? `computed/link graph has a cycle: ${fields.join(', ')}`\n : `multiple writers detected for the same field: ${fields.join(', ')}`,\n kind: `state_trait_config_error:${configError.code}`,\n })\n }\n \n return yield* Effect.failCause(convergeExit.cause)\n }\n \n const outcome = convergeExit.value\n \n const dirtyAllReasonForDeferred: DirtyAllReason | undefined = (txnContext.current as any)?.dirtyAllReason\n const dirtyPathIdsForDeferred: ReadonlySet<StateTransaction.StatePatchPath> | undefined =\n canTimeSlice && !isDeferredFlushTxn && !dirtyAllReasonForDeferred\n ? txnContext.current.dirtyPathIds\n : undefined\n \n if (\n canTimeSlice &&\n !isDeferredFlushTxn &&\n outcome._tag !== 'Degraded' &&\n (dirtyAllReasonForDeferred != null ||\n (dirtyPathIdsForDeferred != null && dirtyPathIdsForDeferred.size > 0))\n ) {\n const nowMs = Date.now()\n traitConvergeTimeSlicing.firstPendingAtMs = traitConvergeTimeSlicing.firstPendingAtMs ?? nowMs\n traitConvergeTimeSlicing.lastTouchedAtMs = nowMs\n \n if (dirtyAllReasonForDeferred != null) {\n traitConvergeTimeSlicing.backlogDirtyAllReason = dirtyAllReasonForDeferred\n traitConvergeTimeSlicing.backlogDirtyPaths.clear()\n } else if (!traitConvergeTimeSlicing.backlogDirtyAllReason && dirtyPathIdsForDeferred) {\n for (const p of dirtyPathIdsForDeferred) {\n traitConvergeTimeSlicing.backlogDirtyPaths.add(p)\n }\n }\n \n const runtimeLabel = yield* Effect.service(Debug.currentRuntimeLabel).pipe(Effect.orDie)\n const diagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n const debugSinks = yield* Effect.service(Debug.currentDebugSinks).pipe(Effect.orDie)\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n const overrides = Option.isSome(overridesOpt) ? overridesOpt.value : undefined\n \n traitConvergeTimeSlicing.capturedContext = {\n runtimeLabel,\n diagnosticsLevel,\n debugSinks,\n overrides,\n }\n \n yield* traitConvergeTimeSlicing.ensureWorkerStarted()\n }\n \n traitSummary = outcome.decision ? { converge: outcome.decision } : undefined\n \n if (outcome._tag === 'Degraded') {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq,\n code: outcome.reason === 'budget_exceeded' ? 'trait::budget_exceeded' : 'trait::runtime_error',\n severity: 'warning',\n message:\n outcome.reason === 'budget_exceeded'\n ? 'Trait converge exceeded budget; derived fields are frozen for this operation window.'\n : 'Trait converge failed at runtime; derived fields are frozen for this operation window.',\n hint:\n outcome.reason === 'budget_exceeded'\n ? 'Check whether computed/check contains heavy computation; move it to source/task or split into cacheable derived pieces.'\n : 'Check computed/link/check for invalid inputs or impure logic; add equals or guards if needed.',\n kind: 'trait_degraded',\n })\n }\n }\n \n // TraitLifecycle scoped validate: flush after converge so validation reads the latest derived state.\n if (stateTraitProgram && txnContext.current) {\n const dedupeScopedValidateRequests = (\n requests: ReadonlyArray<StateTraitValidate.ScopedValidateRequest>,\n ): ReadonlyArray<StateTraitValidate.ScopedValidateRequest> => {\n if (requests.length <= 1) return requests\n \n const priorities: Record<StateTraitValidate.ValidateMode, number> = {\n submit: 4,\n blur: 3,\n valueChange: 2,\n manual: 1,\n }\n \n let bestMode: StateTraitValidate.ValidateMode = 'manual'\n let bestP = priorities[bestMode]\n let hasRoot = false\n \n for (const r of requests) {\n const p = priorities[r.mode]\n if (p > bestP) {\n bestP = p\n bestMode = r.mode\n }\n if (r.target.kind === 'root') {\n hasRoot = true\n }\n }\n \n if (hasRoot) {\n return [{ mode: bestMode, target: { kind: 'root' } }]\n }\n \n const makeKey = (target: StateTraitValidate.ValidateTarget): string => {\n switch (target.kind) {\n case 'field':\n return `field:${target.path}`\n case 'list':\n return `list:${target.path}`\n case 'item':\n return `item:${target.path}:${target.index}:${target.field ?? ''}`\n case 'root':\n return 'root'\n }\n }\n \n const order: Array<string> = []\n const byKey = new Map<string, StateTraitValidate.ScopedValidateRequest>()\n \n for (const req of requests) {\n const key = makeKey(req.target)\n const existing = byKey.get(key)\n if (!existing) {\n byKey.set(key, req)\n order.push(key)\n continue\n }\n if (priorities[req.mode] > priorities[existing.mode]) {\n byKey.set(key, { ...existing, mode: req.mode })\n }\n }\n \n return order.map((k) => byKey.get(k)!).filter(Boolean)\n }\n \n const pending = (txnContext.current as any).stateTraitValidateRequests as\n | ReadonlyArray<StateTraitValidate.ScopedValidateRequest>\n | undefined\n \n if (pending && pending.length > 0) {\n const scopedValidateStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const deduped = dedupeScopedValidateRequests(pending)\n yield* StateTraitValidate.validateInTransaction(\n \t stateTraitProgram as any,\n \t {\n \t moduleId: optionsModuleId,\n \t instanceId,\n \t txnSeq: txnContext.current!.txnSeq,\n \t txnId: txnContext.current!.txnId,\n \t origin: txnContext.current!.origin,\n \t rowIdStore: traitRuntime.rowIdStore,\n \t listConfigs: traitRuntime.getListConfigs(),\n \t txnDirtyEvidence: StateTransaction.readDirtyEvidence(txnContext),\n \t getDraft: () => txnContext.current!.draft as any,\n \t setDraft: (next) => {\n \t StateTransaction.updateDraft(txnContext, next as any)\n \t },\n recordPatch: (path, reason, from, to, traitNodeId, stepId) =>\n recordStatePatch(path, reason, from, to, traitNodeId, stepId),\n } as StateTraitValidate.ValidateContext<any>,\n deduped,\n )\n if (phaseTimingEnabled) {\n scopedValidateMs = Math.max(0, readClockMs() - scopedValidateStartedAtMs)\n }\n }\n }\n\n // If a source key becomes empty, synchronously recycle it back to idle (avoid tearing / ghost data).\n if (stateTraitProgram && txnContext.current) {\n const sourceSyncStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* StateTraitSource.syncIdleInTransaction(\n stateTraitProgram as any,\n {\n moduleId: optionsModuleId,\n instanceId,\n getDraft: () => txnContext.current!.draft as any,\n setDraft: (next) => {\n StateTransaction.updateDraft(txnContext, next as any)\n },\n recordPatch: (path, reason, from, to, traitNodeId, stepId) =>\n recordStatePatch(path, reason, from, to, traitNodeId, stepId),\n } as StateTraitSource.SourceSyncContext<any>,\n )\n if (phaseTimingEnabled) {\n sourceSyncMs = Math.max(0, readClockMs() - sourceSyncStartedAtMs)\n }\n }\n \n // Commit the transaction: write to the underlying state once, and emit a single aggregated state:update event.\n yield* runOperation(\n 'state',\n 'state:update',\n { meta: { moduleId: optionsModuleId, instanceId } },\n Effect.gen(function* () {\n const replayEvent = (txnContext.current as any)?.lastReplayEvent as unknown\n const commitMode = ((txnContext.current as any)?.commitMode ?? 'normal') as StateCommitMode\n const priority = ((txnContext.current as any)?.priority ?? 'normal') as StateCommitPriority\n const fieldPathIdRegistry = txnContext.current?.fieldPathIdRegistry\n const dispatchPhaseTiming =\n phaseTimingEnabled && txnContext.current\n ? {\n dispatchActionRecordMs:\n typeof (txnContext.current as any).dispatchActionRecordMs === 'number'\n ? (txnContext.current as any).dispatchActionRecordMs\n : 0,\n dispatchActionCommitHubMs:\n typeof (txnContext.current as any).dispatchActionCommitHubMs === 'number'\n ? (txnContext.current as any).dispatchActionCommitHubMs\n : 0,\n dispatchActionCount:\n typeof (txnContext.current as any).dispatchActionCount === 'number'\n ? (txnContext.current as any).dispatchActionCount\n : 0,\n }\n : undefined\n const dirtyAllSetStateHint =\n txnContext.current != null && (txnContext.current as any)[DIRTY_ALL_SET_STATE_HINT] === true\n const commitResult = yield* StateTransaction.commitWithState(txnContext, stateRef)\n\n if (commitResult) {\n const commitPhaseTiming = yield* runPostCommitPhases({\n txn: commitResult.transaction,\n nextState: commitResult.finalState,\n replayEvent,\n commitMode,\n priority,\n fieldPathIdRegistry,\n dirtyAllSetStateHint,\n traitSummary,\n phaseTimingEnabled,\n })\n\n if (phaseTimingEnabled && commitPhaseTiming) {\n const trace: TxnPhaseTraceData = {\n kind: 'txn-phase',\n originKind: commitResult.transaction.origin.kind,\n originName: commitResult.transaction.origin.name,\n commitMode,\n priority,\n txnPreludeMs,\n ...(queuePhaseTiming ? { queue: queuePhaseTiming } : null),\n dispatchActionRecordMs: dispatchPhaseTiming?.dispatchActionRecordMs ?? 0,\n dispatchActionCommitHubMs: dispatchPhaseTiming?.dispatchActionCommitHubMs ?? 0,\n dispatchActionCount: dispatchPhaseTiming?.dispatchActionCount ?? 0,\n bodyShellMs,\n asyncEscapeGuardMs,\n traitConvergeMs,\n scopedValidateMs,\n sourceSyncMs,\n commit: commitPhaseTiming,\n }\n yield* Debug.record({\n type: 'trace:txn-phase',\n moduleId: optionsModuleId,\n instanceId,\n txnSeq: commitResult.transaction.txnSeq,\n txnId: commitResult.transaction.txnId,\n data: trace,\n })\n }\n }\n }),\n )\n }),\n Debug.currentTxnId,\n txnId,\n ),\n )\n } finally {\n TaskRunner.exitSyncTransactionShadow()\n }\n \n if (exit!._tag === 'Failure') {\n // Always clear the transaction context on failure to avoid leaking into subsequent entrypoints.\n StateTransaction.abort(txnContext)\n return yield* Effect.failCause(exit!.cause)\n }\n }), TaskRunner.inSyncTransactionFiber, true) as Effect.Effect<void, E2, never>)\n\n /**\n * setStateInternal:\n * - Inside an active transaction: only update the draft and record patches (whole-State granularity), without writing to the underlying Ref.\n * - Outside a transaction: keep legacy behavior, write to SubscriptionRef directly and emit a state:update Debug event.\n *\n * Notes:\n * - When path=\"*\" and field-level evidence is missing, the transaction attempts a best-effort commit-time inference\n * (diff baseState -> finalState) to produce field-level dirty evidence. If inference is not possible, it degrades to dirtyAll.\n * - Prefer `$.state.mutate(...)` / `Logix.Module.Reducer.mutate(...)` to produce exact field-level patchPaths.\n * - Perf harness can still force dirtyAll via `recordStatePatch('*', 'perf')` (explicit contract).\n */\n const setStateInternal: SetStateInternal<S> = (\n next: S,\n path: StateTransaction.StatePatchPath,\n reason: StateTransaction.PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ): Effect.Effect<void> =>\n Effect.gen(function* () {\n const inTxn = yield* Effect.service(TaskRunner.inSyncTransactionFiber).pipe(Effect.orDie)\n if (inTxn && txnContext.current) {\n const current: any = txnContext.current\n\n StateTransaction.updateDraft(txnContext, next)\n // Soft dirtyAll hint for `runtime.setState(...)` inside an active transaction:\n // - `setState` is a whole-state write and does not carry field-level dirty evidence by itself.\n // - Advanced callers (perf harness / internal integrators) may provide precise dirty evidence via\n // `InternalContracts.recordStatePatch(...)` after `setState`.\n // - We must not permanently degrade the txn to dirtyAll before that evidence arrives.\n if (path === '*' && reason === 'unknown') {\n current[DIRTY_ALL_SET_STATE_HINT] = true\n current.inferReplaceEvidence = true\n return\n }\n\n recordStatePatch(path, reason, from, to, traitNodeId, stepId)\n\n if (path === '*') {\n current[DIRTY_ALL_SET_STATE_HINT] = true\n }\n return\n }\n\n // Writes from non-transaction fibers must be queued to avoid bypassing txnQueue with concurrent updates.\n yield* enqueueTransaction(\n runOperation(\n 'state',\n 'state:update',\n {\n payload: next,\n meta: { moduleId, instanceId },\n },\n runWithStateTransaction(\n {\n kind: 'state',\n name: 'setState',\n },\n () =>\n Effect.sync(() => {\n // baseState is injected by runWithStateTransaction at txn start; we only need to update the draft here.\n StateTransaction.updateDraft(txnContext, next)\n recordStatePatch(path, reason, from, to, traitNodeId, stepId)\n }),\n ),\n ),\n )\n })\n\n const getExecVmAssemblyEvidence = (): unknown => {\n const digest = traitRuntime.getConvergeStaticIrDigest()\n if (!digest) return undefined\n return {\n convergeStaticIrDigest: digest,\n convergeGeneration: traitRuntime.getConvergeGeneration().generation,\n }\n }\n\n return {\n readState,\n setStateInternal,\n runWithStateTransaction,\n __logixGetExecVmAssemblyEvidence: getExecVmAssemblyEvidence,\n }\n}\n","import { Effect } from 'effect'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport {\n toSerializableErrorSummary,\n} from '../runtime/core/errorSummary.js'\nimport { dirtyPathsToRootIds, type DirtyAllReason, type FieldPath } from '../field-path.js'\nimport { getConvergeStaticIrDigest } from './converge-ir.js'\nimport { CowDraft, ShallowInPlaceDraft } from './converge-draft.js'\nimport { emitSchemaMismatch } from './converge-diagnostics.js'\nimport { currentExecVmMode } from './exec-vm-mode.js'\nimport { makeConvergeExecIr, type ConvergeExecIr } from './converge-exec-ir.js'\nimport { getMiddlewareStack, runWriterStep, runWriterStepOffFast } from './converge-step.js'\nimport {\n StateTraitConfigError,\n type ConvergeContext,\n type ConvergeOutcome,\n type ConvergeStepSummary,\n type ConvergeSummary,\n type ConvergeMode,\n} from './converge.types.js'\nimport type {\n StateTraitEntry,\n StateTraitProgram,\n TraitConvergeConfigScope,\n TraitConvergeDecisionSummary,\n TraitConvergeDiagnosticsSamplingSummary,\n TraitConvergeDirtySummary,\n TraitConvergeGenerationEvidence,\n TraitConvergeHotspot,\n TraitConvergeOutcome as TraitConvergeOutcomeTag,\n TraitConvergePlanCacheEvidence,\n TraitConvergeReason,\n TraitConvergeRequestedMode,\n TraitConvergeStaticIrEvidence,\n TraitConvergeStepStats,\n} from './model.js'\n\nconst EMPTY_INT32 = new Int32Array(0)\n\n// Inline dirty plan computation for off-fast-path:\n// - Must remain allocation-free in steady-state (no objects/closures per invocation).\n// - Writes plan ids into execIr.scratch.planStepIds[0..planLen).\n// - Returns:\n// -1 = invalid/unknown dirty path ids (fallback)\n// -2 = near-full (run full instead)\n// >=0 = plan length\nconst computeInlineDirtyPlanLenFromDirtyPathIdsSet = (\n execIr: ConvergeExecIr,\n dirtyPathIds: ReadonlySet<unknown>,\n scopeStepIds: Int32Array,\n scopeStepCount: number,\n schedulingScope: 'all' | 'immediate' | 'deferred',\n nearFullPlanRatioThreshold: number,\n): number => {\n const prefixFieldPathIdsByPathId = execIr.prefixFieldPathIdsByPathId\n const prefixOffsetsByPathId = execIr.prefixOffsetsByPathId\n const triggerStepIdsByFieldPathId = execIr.triggerStepIdsByFieldPathId\n const triggerStepOffsetsByFieldPathId = execIr.triggerStepOffsetsByFieldPathId\n const stepOutFieldPathIdByStepId = execIr.stepOutFieldPathIdByStepId\n const stepSchedulingByStepId = execIr.stepSchedulingByStepId\n\n const dirtyPrefixBitSet = execIr.scratch.dirtyPrefixBitSet\n const reachableStepBitSet = execIr.scratch.reachableStepBitSet\n const dirtyPrefixQueue = execIr.scratch.dirtyPrefixQueue\n const planScratch = execIr.scratch.planStepIds\n\n dirtyPrefixBitSet.clear()\n reachableStepBitSet.clear()\n\n const fieldPathCount = execIr.fieldPathsById.length\n\n let queueLen = 0\n for (const raw of dirtyPathIds) {\n if (typeof raw !== 'number' || !Number.isFinite(raw)) {\n return -1\n }\n const id = Math.floor(raw)\n if (id < 0 || id >= fieldPathCount) {\n return -1\n }\n\n const start = prefixOffsetsByPathId[id]\n const end = prefixOffsetsByPathId[id + 1]\n if (start == null || end == null) continue\n\n for (let i = start; i < end; i++) {\n const prefixId = prefixFieldPathIdsByPathId[i]!\n if (dirtyPrefixBitSet.has(prefixId)) continue\n dirtyPrefixBitSet.add(prefixId)\n dirtyPrefixQueue[queueLen] = prefixId\n queueLen += 1\n }\n }\n\n const nearFullThreshold = Math.ceil(scopeStepCount * nearFullPlanRatioThreshold)\n\n let cursor = 0\n let reachableCount = 0\n while (cursor < queueLen) {\n const prefixId = dirtyPrefixQueue[cursor]!\n cursor += 1\n\n const start = triggerStepOffsetsByFieldPathId[prefixId]\n const end = triggerStepOffsetsByFieldPathId[prefixId + 1]\n if (start == null || end == null) continue\n\n for (let i = start; i < end; i++) {\n const stepId = triggerStepIdsByFieldPathId[i]!\n if (schedulingScope !== 'all') {\n const flag = stepSchedulingByStepId[stepId]\n if (schedulingScope === 'immediate') {\n if (flag !== 0) continue\n } else {\n if (flag !== 1) continue\n }\n }\n if (reachableStepBitSet.has(stepId)) continue\n reachableStepBitSet.add(stepId)\n reachableCount += 1\n\n // If we're going to run almost everything anyway, bail out early and just run full.\n if (reachableCount >= nearFullThreshold) {\n return -2\n }\n\n const outId = stepOutFieldPathIdByStepId[stepId]!\n const start2 = prefixOffsetsByPathId[outId]\n const end2 = prefixOffsetsByPathId[outId + 1]\n if (start2 == null || end2 == null) continue\n\n for (let j = start2; j < end2; j++) {\n const prefixId2 = prefixFieldPathIdsByPathId[j]!\n if (dirtyPrefixBitSet.has(prefixId2)) continue\n dirtyPrefixBitSet.add(prefixId2)\n dirtyPrefixQueue[queueLen] = prefixId2\n queueLen += 1\n }\n }\n }\n\n let planLen = 0\n for (let i = 0; i < scopeStepIds.length; i++) {\n const stepId = scopeStepIds[i]!\n if (!reachableStepBitSet.has(stepId)) continue\n planScratch[planLen] = stepId\n planLen += 1\n }\n\n // NOTE: plan ids are written into execIr.scratch.planStepIds[0..planLen).\n // Safe because inline-dirty uses the plan only for the current converge pass.\n return planLen\n}\n\nconst pickTop3Steps = (steps: ReadonlyArray<ConvergeStepSummary>): ReadonlyArray<ConvergeStepSummary> => {\n let first: ConvergeStepSummary | undefined\n let second: ConvergeStepSummary | undefined\n let third: ConvergeStepSummary | undefined\n\n for (const step of steps) {\n const d = step.durationMs\n if (!first || d > first.durationMs) {\n third = second\n second = first\n first = step\n continue\n }\n if (!second || d > second.durationMs) {\n third = second\n second = step\n continue\n }\n if (!third || d > third.durationMs) {\n third = step\n }\n }\n\n if (!first) return []\n if (!second) return [first]\n if (!third) return [first, second]\n return [first, second, third]\n}\n\nconst normalizePositiveInt = (value: unknown): number | undefined => {\n if (typeof value !== 'number' || !Number.isFinite(value)) return undefined\n const n = Math.floor(value)\n return n > 0 ? n : undefined\n}\n\nconst insertTopKHotspot = (args: {\n readonly hotspots: Array<TraitConvergeHotspot>\n readonly next: TraitConvergeHotspot\n readonly topK: number\n}): void => {\n const { hotspots, next, topK } = args\n if (topK <= 0) return\n\n const idx = (() => {\n for (let i = 0; i < hotspots.length; i++) {\n if (next.durationMs > hotspots[i]!.durationMs) return i\n }\n return hotspots.length\n })()\n\n if (idx >= topK) return\n hotspots.splice(idx, 0, next)\n if (hotspots.length > topK) {\n hotspots.length = topK\n }\n}\n\n/**\n * convergeInTransaction:\n * - Execute one derived converge pass within an already-started StateTransaction context.\n * - Currently covers computed/link only (check/source will be added in later phases).\n */\nexport const convergeInTransaction = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: ConvergeContext<S>,\n): Effect.Effect<ConvergeOutcome> =>\n Effect.gen(function* () {\n yield* emitSchemaMismatch(program, ctx)\n\n const decisionStartedAt = ctx.now()\n let decisionDurationMs: number | undefined\n let executionStartedAt = decisionStartedAt\n const base = ctx.getDraft()\n const requestedMode: TraitConvergeRequestedMode = ctx.requestedMode ?? 'auto'\n const reasons: Array<TraitConvergeReason> = []\n let mode: ConvergeMode = requestedMode === 'dirty' ? 'dirty' : requestedMode === 'full' ? 'full' : 'full'\n\n const ir = program.convergeIr\n if (!ir) {\n return { _tag: 'Noop' } as const\n }\n\n // 049: Exec IR must be tied to the generation lifecycle and should not be rebuilt on every txn window.\n let execIr = program.convergeExecIr\n if (!execIr || execIr.generation !== ir.generation) {\n // Carry over off-fast-path perf hints across generation bumps when step count is unchanged.\n // Motivation: avoid repeated warmup / misclassification after graph-change invalidation,\n // while still allowing the EWMA to adapt when the actual cost shifts.\n const prev = execIr\n const next = makeConvergeExecIr(ir)\n\n if (prev && prev.topoOrderInt32.length === next.topoOrderInt32.length) {\n next.perf.fullCommitEwmaOffMs = prev.perf.fullCommitEwmaOffMs\n next.perf.fullCommitSampleCountOff = prev.perf.fullCommitSampleCountOff\n next.perf.fullCommitLastTxnSeqOff = prev.perf.fullCommitLastTxnSeqOff\n // NOTE: do not carry over fullCommitMinOffMs; it never increases and can become stale across rebuilds.\n }\n\n execIr = next\n ;(program as any).convergeExecIr = execIr\n }\n\n if (ir.configError) {\n throw new StateTraitConfigError(ir.configError.code, ir.configError.message, ir.configError.fields)\n }\n\n const stepsInTopoOrder = (ir.stepsById ?? []) as ReadonlyArray<StateTraitEntry<any, string>>\n const totalSteps = stepsInTopoOrder.length\n\n if (totalSteps === 0) {\n return { _tag: 'Noop' } as const\n }\n\n const stack = yield* getMiddlewareStack()\n const diagnosticsLevel: Debug.DiagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n const debugSinks = yield* Effect.service(Debug.currentDebugSinks).pipe(Effect.orDie)\n // Decision / TraitSummary gate is based on \"will it be consumed\" (sinks), not diagnosticsLevel.\n // diagnosticsLevel only controls exportable/heavy details (trace payload, hotspots, static IR export, etc.).\n const shouldCollectDecision = debugSinks.length > 0 && !Debug.isErrorOnlyOnlySinks(debugSinks)\n const shouldCollectDecisionDetails = shouldCollectDecision && diagnosticsLevel !== 'off'\n const shouldCollectDecisionHeavyDetails = shouldCollectDecision && diagnosticsLevel !== 'off'\n const execVmMode = yield* currentExecVmMode\n\n // 044: deterministic sampling for sampled mode (uses txnSeq as a stable anchor by default).\n let diagnosticsSampling: TraitConvergeDiagnosticsSamplingSummary | undefined\n if (diagnosticsLevel === 'sampled') {\n const cfg = yield* Effect.service(Debug.currentTraitConvergeDiagnosticsSampling).pipe(Effect.orDie)\n const sampleEveryN = normalizePositiveInt(cfg.sampleEveryN) ?? 32\n const topK = normalizePositiveInt(cfg.topK) ?? 3\n const txnSeq = ctx.txnSeq\n const sampled =\n typeof txnSeq === 'number' && Number.isFinite(txnSeq) && txnSeq > 0\n ? (Math.floor(txnSeq) - 1) % sampleEveryN === 0\n : false\n diagnosticsSampling = {\n strategy: 'txnSeq_interval',\n sampleEveryN,\n topK,\n sampled,\n }\n }\n\n const shouldTimeStepsForHotspots =\n shouldCollectDecision && (diagnosticsLevel === 'full' || diagnosticsSampling?.sampled === true)\n const hotspotsTopK = diagnosticsLevel === 'full' ? 3 : (diagnosticsSampling?.topK ?? 3)\n const hotspots: Array<TraitConvergeHotspot> | undefined = shouldTimeStepsForHotspots ? [] : undefined\n const schedulingScope = ctx.schedulingScope ?? 'all'\n const scopeStepIds =\n ctx.schedulingScopeStepIds ??\n (schedulingScope === 'immediate'\n ? execIr.topoOrderImmediateInt32\n : schedulingScope === 'deferred'\n ? execIr.topoOrderDeferredInt32\n : execIr.topoOrderInt32)\n const scopeStepCount = scopeStepIds.length\n const immediateStepCount = execIr.topoOrderImmediateInt32.length\n const deferredStepCount = execIr.topoOrderDeferredInt32.length\n const timeSlicingSummary =\n deferredStepCount > 0\n ? {\n scope: schedulingScope,\n immediateStepCount,\n deferredStepCount,\n }\n : undefined\n\n if (deferredStepCount > 0) {\n if (schedulingScope === 'immediate' && !reasons.includes('time_slicing_immediate')) {\n reasons.push('time_slicing_immediate')\n } else if (schedulingScope === 'deferred' && !reasons.includes('time_slicing_deferred')) {\n reasons.push('time_slicing_deferred')\n }\n }\n\n const emitTraitConvergeTraceEvent = (decision: TraitConvergeDecisionSummary): Effect.Effect<void> =>\n !shouldCollectDecision\n ? Effect.void\n : Debug.record({\n type: 'trace:trait:converge',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n data: decision as any,\n })\n\n const registry = ir.fieldPathIdRegistry\n const dirtyPaths = ctx.dirtyPaths == null ? [] : Array.isArray(ctx.dirtyPaths) ? ctx.dirtyPaths : ctx.dirtyPaths\n const dirtyPathCountHint = Array.isArray(dirtyPaths)\n ? dirtyPaths.length\n : typeof (dirtyPaths as any)?.size === 'number'\n ? ((dirtyPaths as any).size as number)\n : undefined\n\n type DirtyRootIds = {\n readonly dirtyAll: boolean\n readonly reason?: DirtyAllReason\n readonly rootIds: Int32Array\n readonly rootCount: number\n readonly keySize: number\n readonly keyHash: number\n }\n\n const makeDirtyAll = (reason: DirtyAllReason): DirtyRootIds => ({\n dirtyAll: true,\n reason,\n rootIds: EMPTY_INT32,\n rootCount: 0,\n keySize: 0,\n keyHash: 0,\n })\n\n const hashFieldPathIdsInt32 = (ids: Int32Array): number => {\n // FNV-1a (32-bit)\n let hash = 2166136261 >>> 0\n for (let i = 0; i < ids.length; i++) {\n hash ^= ids[i]! >>> 0\n hash = Math.imul(hash, 16777619)\n }\n return hash >>> 0\n }\n\n let dirtyRootIds: DirtyRootIds | undefined\n\n const DIRTY_ROOT_IDS_TOP_K = 3\n const AUTO_FLOOR_RATIO = 1.05\n const AUTO_FAST_FULL_EWMA_THRESHOLD_MS = 0.6\n const AUTO_FAST_FULL_WARMUP_FULL_SAMPLES_OFF = 2\n const AUTO_TINY_GRAPH_FULL_STEP_THRESHOLD = 2\n const MAX_CACHEABLE_ROOT_IDS = 128\n const MAX_CACHEABLE_ROOT_RATIO = 0.5\n const NO_CACHE_NEAR_FULL_STEP_THRESHOLD = 512\n\n const configScope: TraitConvergeConfigScope = ctx.configScope ?? 'builtin'\n const generationEvidence: TraitConvergeGenerationEvidence = ctx.generation ?? {\n generation: ir.generation,\n }\n const generation = generationEvidence.generation\n const staticIrDigest = !shouldCollectDecisionHeavyDetails ? '' : getConvergeStaticIrDigest(ir)\n const decisionBudgetMs = requestedMode === 'auto' ? ctx.decisionBudgetMs : undefined\n const cacheMissReasonHint = ctx.cacheMissReasonHint\n\n if (cacheMissReasonHint === 'generation_bumped' && !reasons.includes('generation_bumped')) {\n reasons.push('generation_bumped')\n }\n\n const isDecisionBudgetExceeded = (): boolean =>\n typeof decisionBudgetMs === 'number' &&\n Number.isFinite(decisionBudgetMs) &&\n decisionBudgetMs > 0 &&\n ctx.now() - decisionStartedAt > decisionBudgetMs\n\n const markDecisionBudgetCutoff = (): void => {\n if (!reasons.includes('budget_cutoff')) reasons.push('budget_cutoff')\n }\n\n const prefixFieldPathIdsByPathId = execIr.prefixFieldPathIdsByPathId\n const prefixOffsetsByPathId = execIr.prefixOffsetsByPathId\n const topoOrderInt32 = execIr.topoOrderInt32\n const topoIndexByStepId = execIr.topoIndexByStepId\n\n const dirtyPrefixBitSet = execIr.scratch.dirtyPrefixBitSet\n const reachableStepBitSet = execIr.scratch.reachableStepBitSet\n const dirtyPrefixQueue = execIr.scratch.dirtyPrefixQueue\n const dirtyRootIdsScratch = execIr.scratch.dirtyRootIds\n const planScratch = execIr.scratch.planStepIds\n const triggerStepIdsByFieldPathId = execIr.triggerStepIdsByFieldPathId\n const triggerStepOffsetsByFieldPathId = execIr.triggerStepOffsetsByFieldPathId\n\n const addPathPrefixes = (pathId: number): void => {\n const start = prefixOffsetsByPathId[pathId]\n const end = prefixOffsetsByPathId[pathId + 1]\n if (start == null || end == null) return\n for (let i = start; i < end; i++) {\n dirtyPrefixBitSet.add(prefixFieldPathIdsByPathId[i]!)\n }\n }\n\n const hasAnyDirtyPrefix = (pathId: number): boolean => {\n const start = prefixOffsetsByPathId[pathId]\n const end = prefixOffsetsByPathId[pathId + 1]\n if (start == null || end == null) return false\n for (let i = start; i < end; i++) {\n if (dirtyPrefixBitSet.has(prefixFieldPathIdsByPathId[i]!)) return true\n }\n return false\n }\n\n const shouldRunStepById = (stepId: number): boolean => {\n const outId = execIr.stepOutFieldPathIdByStepId[stepId]\n if (typeof outId === 'number' && hasAnyDirtyPrefix(outId)) {\n return true\n }\n const depsStart = execIr.stepDepsOffsetsByStepId[stepId]\n const depsEnd = execIr.stepDepsOffsetsByStepId[stepId + 1]\n if (depsStart == null || depsEnd == null) return false\n for (let i = depsStart; i < depsEnd; i++) {\n if (hasAnyDirtyPrefix(execIr.stepDepsFieldPathIds[i]!)) return true\n }\n return false\n }\n\n const computePlanStepIds = (\n rootIds: Int32Array,\n options?: { readonly stopOnDecisionBudget?: boolean },\n ): { readonly plan?: Int32Array; readonly budgetCutoff?: true } => {\n // Small graphs and custom step slices are cheap to scan; keep the simpler logic.\n if (totalSteps < 32 || ctx.schedulingScopeStepIds != null) {\n dirtyPrefixBitSet.clear()\n for (let i = 0; i < rootIds.length; i++) {\n addPathPrefixes(rootIds[i]!)\n }\n\n let planLen = 0\n let checks = 0\n for (let i = 0; i < scopeStepIds.length; i++) {\n const stepId = scopeStepIds[i]!\n if (options?.stopOnDecisionBudget) {\n checks += 1\n if (checks >= 32) {\n checks = 0\n if (isDecisionBudgetExceeded()) {\n dirtyPrefixBitSet.clear()\n return { budgetCutoff: true } as const\n }\n }\n }\n\n if (!shouldRunStepById(stepId)) {\n continue\n }\n\n planScratch[planLen] = stepId\n planLen += 1\n addPathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n\n const plan = execVmMode ? planScratch.subarray(0, planLen) : new Int32Array(planLen)\n if (!execVmMode && planLen > 0) {\n plan.set(planScratch.subarray(0, planLen))\n }\n dirtyPrefixBitSet.clear()\n return { plan } as const\n }\n\n // 059: Typed reachability (prefixId -> stepIds) + queue + bitset.\n dirtyPrefixBitSet.clear()\n reachableStepBitSet.clear()\n\n let queueLen = 0\n const enqueuePathPrefixes = (pathId: number): void => {\n const start = prefixOffsetsByPathId[pathId]\n const end = prefixOffsetsByPathId[pathId + 1]\n if (start == null || end == null) return\n for (let i = start; i < end; i++) {\n const prefixId = prefixFieldPathIdsByPathId[i]!\n if (dirtyPrefixBitSet.has(prefixId)) continue\n dirtyPrefixBitSet.add(prefixId)\n dirtyPrefixQueue[queueLen] = prefixId\n queueLen += 1\n }\n }\n\n for (let i = 0; i < rootIds.length; i++) {\n enqueuePathPrefixes(rootIds[i]!)\n }\n\n const isStepInScope = (stepId: number): boolean => {\n if (schedulingScope === 'all') return true\n const flag = execIr.stepSchedulingByStepId[stepId]\n return schedulingScope === 'immediate' ? flag === 0 : flag === 1\n }\n\n let cursor = 0\n let checks = 0\n while (cursor < queueLen) {\n if (options?.stopOnDecisionBudget) {\n checks += 1\n if (checks >= 32) {\n checks = 0\n if (isDecisionBudgetExceeded()) {\n dirtyPrefixBitSet.clear()\n return { budgetCutoff: true } as const\n }\n }\n }\n\n const prefixId = dirtyPrefixQueue[cursor]!\n cursor += 1\n\n const start = triggerStepOffsetsByFieldPathId[prefixId]\n const end = triggerStepOffsetsByFieldPathId[prefixId + 1]\n if (start == null || end == null) continue\n for (let i = start; i < end; i++) {\n const stepId = triggerStepIdsByFieldPathId[i]!\n if (!isStepInScope(stepId)) continue\n if (reachableStepBitSet.has(stepId)) continue\n reachableStepBitSet.add(stepId)\n enqueuePathPrefixes(execIr.stepOutFieldPathIdByStepId[stepId]!)\n }\n }\n\n // Materialize plan in topo order by scanning the scope slice.\n // This avoids TypedArray.sort() tail latency on some platforms.\n let planLen = 0\n for (let i = 0; i < scopeStepIds.length; i++) {\n const stepId = scopeStepIds[i]!\n if (!reachableStepBitSet.has(stepId)) continue\n planScratch[planLen] = stepId\n planLen += 1\n }\n\n const plan = execVmMode ? planScratch.subarray(0, planLen) : new Int32Array(planLen)\n if (!execVmMode && planLen > 0) {\n for (let i = 0; i < planLen; i++) {\n plan[i] = planScratch[i]!\n }\n }\n dirtyPrefixBitSet.clear()\n return { plan } as const\n }\n\n const cache = ctx.planCache\n const cacheMissReasonHintCount = ctx.cacheMissReasonHintCount ?? 0\n if (cacheMissReasonHint === 'generation_bumped' && cacheMissReasonHintCount >= 3 && cache && !cache.isDisabled()) {\n cache.disable('generation_thrash')\n }\n let canUseCache = false\n let planKeyHash = 0\n let rootIdsKey: Int32Array | undefined = undefined\n\n const ensureDirtyRootIds = (): DirtyRootIds => {\n if (dirtyRootIds) return dirtyRootIds\n\n if (ctx.dirtyAllReason) {\n dirtyRootIds = makeDirtyAll(ctx.dirtyAllReason)\n } else if (dirtyPaths instanceof Set) {\n dirtyPrefixBitSet.clear()\n\n let candidateLen = 0\n for (const raw of dirtyPaths as ReadonlySet<unknown>) {\n if (typeof raw !== 'number' || !Number.isFinite(raw)) {\n dirtyPrefixBitSet.clear()\n dirtyRootIds = makeDirtyAll('nonTrackablePatch')\n break\n }\n\n const id = Math.floor(raw)\n if (id < 0 || id >= execIr.fieldPathsById.length) {\n dirtyPrefixBitSet.clear()\n dirtyRootIds = makeDirtyAll('fallbackPolicy')\n break\n }\n\n dirtyPrefixBitSet.add(id)\n dirtyRootIdsScratch[candidateLen] = id\n candidateLen += 1\n }\n\n if (!dirtyRootIds) {\n if (candidateLen === 0) {\n dirtyPrefixBitSet.clear()\n dirtyRootIds = makeDirtyAll('unknownWrite')\n } else {\n let rootLen = 0\n for (let i = 0; i < candidateLen; i++) {\n const id = dirtyRootIdsScratch[i]!\n const start = prefixOffsetsByPathId[id]\n const end = prefixOffsetsByPathId[id + 1]\n if (start == null || end == null) continue\n\n // If any proper prefix is also directly dirty, skip this id.\n let coveredByDirtyPrefix = false\n for (let j = start; j < end - 1; j++) {\n const prefixId = prefixFieldPathIdsByPathId[j]!\n if (dirtyPrefixBitSet.has(prefixId)) {\n coveredByDirtyPrefix = true\n break\n }\n }\n if (coveredByDirtyPrefix) continue\n\n dirtyRootIdsScratch[rootLen] = id\n rootLen += 1\n }\n\n if (rootLen === 0) {\n dirtyPrefixBitSet.clear()\n dirtyRootIds = makeDirtyAll('unknownWrite')\n } else {\n const rootIds = dirtyRootIdsScratch.subarray(0, rootLen)\n rootIds.sort()\n const keyHash = hashFieldPathIdsInt32(rootIds)\n\n dirtyPrefixBitSet.clear()\n\n dirtyRootIds = {\n dirtyAll: false,\n rootIds,\n rootCount: rootIds.length,\n keySize: rootIds.length,\n keyHash,\n }\n }\n }\n }\n } else {\n const dirty = dirtyPathsToRootIds({\n dirtyPaths,\n registry,\n dirtyAllReason: ctx.dirtyAllReason,\n })\n\n dirtyRootIds = dirty.dirtyAll\n ? makeDirtyAll(dirty.reason ?? 'unknownWrite')\n : {\n dirtyAll: false,\n rootIds: Int32Array.from(dirty.rootIds),\n rootCount: dirty.rootCount,\n keySize: dirty.keySize,\n keyHash: dirty.keyHash,\n }\n }\n\n const rootRatioForCache =\n !dirtyRootIds.dirtyAll && scopeStepCount > 0 ? dirtyRootIds.rootCount / scopeStepCount : undefined\n const cacheableBySize =\n !dirtyRootIds.dirtyAll &&\n dirtyRootIds.rootIds.length > 0 &&\n dirtyRootIds.rootIds.length <= MAX_CACHEABLE_ROOT_IDS &&\n (rootRatioForCache == null || rootRatioForCache <= MAX_CACHEABLE_ROOT_RATIO)\n canUseCache =\n !!cache &&\n !cache.isDisabled() &&\n ctx.schedulingScopeStepIds == null &&\n cacheableBySize\n planKeyHash =\n dirtyRootIds.keyHash ^ (schedulingScope === 'all' ? 0 : schedulingScope === 'immediate' ? 1 : 2)\n rootIdsKey = canUseCache ? dirtyRootIds.rootIds : undefined\n return dirtyRootIds\n }\n\n let cacheEvidence: TraitConvergePlanCacheEvidence | undefined = shouldCollectDecisionHeavyDetails\n ? {\n capacity: 0,\n size: 0,\n hits: 0,\n misses: 0,\n evicts: 0,\n hit: false,\n }\n : undefined\n\n let affectedSteps: number | undefined\n let planStepIds: Int32Array | undefined\n let planStepCount: number | undefined\n\n const getOrComputePlan = (options?: {\n readonly missReason?: TraitConvergePlanCacheEvidence['missReason']\n readonly stopOnDecisionBudget?: boolean\n }): { readonly plan: Int32Array; readonly hit: boolean; readonly budgetCutoff?: true } => {\n const dirty = ensureDirtyRootIds()\n\t if (dirty.dirtyAll) {\n\t if (cacheEvidence && cache) {\n\t cacheEvidence = cache.evidence({\n\t hit: false,\n\t keySize: dirty.keySize,\n\t missReason: options?.missReason ?? 'unknown',\n\t })\n\t }\n\t const fullPlan = scopeStepIds\n\t affectedSteps = fullPlan.length\n\t return { plan: fullPlan, hit: false }\n\t }\n\n\t // When we cannot reuse a plan (cache disabled / non-cacheable), doing expensive plan computation in auto mode\n\t // tends to be a negative optimization in off-fast-path workloads.\n\t if (requestedMode === 'auto' && diagnosticsLevel === 'off' && stack.length === 0 && (!canUseCache || !cache)) {\n\t if (cacheEvidence && cache) {\n\t cacheEvidence = cache.evidence({\n\t hit: false,\n\t keySize: dirty.keySize,\n\t missReason: options?.missReason ?? 'unknown',\n\t })\n\t }\n\t const fullPlan = scopeStepIds\n\t affectedSteps = fullPlan.length\n\t return { plan: fullPlan, hit: false }\n\t }\n\n\t if (canUseCache && cache && rootIdsKey) {\n\t const cached = cache.get(planKeyHash, rootIdsKey)\n\t if (cached) {\n\t if (cacheEvidence) {\n\t cacheEvidence = cache.evidence({\n\t hit: true,\n\t keySize: dirty.keySize,\n\t })\n\t }\n\t affectedSteps = cached.length\n\t return { plan: cached, hit: true }\n\t }\n\n\t // 2-hit admission for plan computation:\n\t // - On cache miss, do NOT compute a plan until we observe the same key again.\n\t // - Prevents high-cardinality dirty patterns from turning auto into a negative optimization.\n\t if (requestedMode === 'auto' && diagnosticsLevel === 'off' && stack.length === 0) {\n\t const h = planKeyHash\n\t const seen1 = execIr.perf.recentPlanMissHash1\n\t const seen2 = execIr.perf.recentPlanMissHash2\n\t if (h !== seen1 && h !== seen2) {\n\t execIr.perf.recentPlanMissHash2 = seen1\n\t execIr.perf.recentPlanMissHash1 = h\n\n\t if (cacheEvidence) {\n\t cacheEvidence = cache.evidence({\n\t hit: false,\n\t keySize: dirty.keySize,\n\t missReason: options?.missReason ?? 'unknown',\n\t })\n\t }\n\n\t const fullPlan = scopeStepIds\n\t affectedSteps = fullPlan.length\n\t return { plan: fullPlan, hit: false }\n\t }\n\t }\n\t }\n\n // Decision budget is designed to cap worst-case plan computation cost.\n // For small graphs (<32 steps), the plan scan is bounded and the early cutoff\n // can introduce flakiness due to sub-ms clock jitter on some platforms.\n if (options?.stopOnDecisionBudget && totalSteps >= 32 && isDecisionBudgetExceeded()) {\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirty.keySize,\n missReason: options?.missReason ?? 'unknown',\n })\n }\n const fullPlan = scopeStepIds\n affectedSteps = fullPlan.length\n if (canUseCache && cache && rootIdsKey) {\n cache.set(planKeyHash, rootIdsKey.slice(), fullPlan)\n }\n return { plan: fullPlan, hit: false, budgetCutoff: true } as const\n }\n\n const computed = computePlanStepIds(dirty.rootIds, {\n stopOnDecisionBudget: options?.stopOnDecisionBudget,\n })\n if (computed.budgetCutoff) {\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirty.keySize,\n missReason: options?.missReason ?? 'unknown',\n })\n }\n const fullPlan = scopeStepIds\n affectedSteps = fullPlan.length\n if (canUseCache && cache && rootIdsKey) {\n cache.set(planKeyHash, rootIdsKey.slice(), fullPlan)\n }\n return { plan: fullPlan, hit: false, budgetCutoff: true } as const\n }\n\n const plan = computed.plan ?? new Int32Array(0)\n if (canUseCache && cache && rootIdsKey) {\n cache.set(planKeyHash, rootIdsKey.slice(), execVmMode ? plan.slice() : plan)\n }\n if (cacheEvidence && cache) {\n cacheEvidence = cache.evidence({\n hit: false,\n keySize: dirty.keySize,\n missReason: options?.missReason ?? 'not_cached',\n })\n }\n affectedSteps = plan.length\n return { plan, hit: false }\n }\n\n const getNearFullRootRatioThreshold = (stepCount: number): number => {\n // Heuristic:\n // - For large graphs, computing/reusing a precise dirty plan can be dominated by decision/cache overhead,\n // and can make auto slower than full under mixed dirty patterns (perf: converge-steps).\n // - Cut over to full earlier to keep auto<=full stable and avoid large retained plan-cache entries.\n if (stepCount >= 1536) return 0.65\n if (stepCount >= 1024) return 0.7\n if (stepCount >= 512) return 0.75\n return 0.9\n }\n const NEAR_FULL_PLAN_RATIO_THRESHOLD = 0.9\n const isOffFastPath = diagnosticsLevel === 'off' && stack.length === 0\n const fullCommitEwmaOffMs = execIr.perf.fullCommitEwmaOffMs\n const fullCommitMinOffMs = execIr.perf.fullCommitMinOffMs\n const fullCommitSampleCountOff = execIr.perf.fullCommitSampleCountOff ?? 0\n const nearFullRootRatioThreshold = getNearFullRootRatioThreshold(scopeStepCount)\n const rootRatioHint =\n typeof dirtyPathCountHint === 'number' && dirtyPathCountHint > 0\n ? scopeStepCount > 0\n ? dirtyPathCountHint / scopeStepCount\n : 1\n : undefined\n\n if (requestedMode === 'auto') {\n if (ctx.txnSeq === 1) {\n mode = 'full'\n reasons.push('cold_start')\n } else if (scopeStepCount <= AUTO_TINY_GRAPH_FULL_STEP_THRESHOLD) {\n mode = 'full'\n reasons.push('near_full')\n } else if (ctx.dirtyAllReason) {\n mode = 'full'\n reasons.push('dirty_all')\n reasons.push('unknown_write')\n } else if (dirtyPathCountHint === 0) {\n mode = 'full'\n reasons.push('unknown_write')\n } else if (rootRatioHint != null && rootRatioHint >= nearFullRootRatioThreshold) {\n mode = 'full'\n reasons.push('near_full')\n } else if (isOffFastPath) {\n // Off-fast-path is extremely sensitive to planning/caching overhead (sub-ms full converge).\n // Use a tiny (O(1)) inline admission strategy:\n // - If a dirty-pattern doesn't repeat, don't build a reachability plan; just run full.\n // - If it repeats, compute and (optionally) cache a small plan and run dirty.\n //\n // This keeps auto<=full stable under adversarial high-cardinality patterns.\n const scopeKey = schedulingScope === 'all' ? 0 : schedulingScope === 'immediate' ? 1 : 2\n const dirtyKeyHash = ctx.dirtyPathsKeyHash\n const dirtyKeySize = ctx.dirtyPathsKeySize\n const canUseInlineKey =\n dirtyPaths instanceof Set &&\n typeof dirtyKeyHash === 'number' &&\n Number.isFinite(dirtyKeyHash) &&\n typeof dirtyKeySize === 'number' &&\n Number.isFinite(dirtyKeySize) &&\n dirtyKeySize > 0 &&\n dirtyKeySize <= 64\n\n if (canUseInlineKey) {\n const inlineKeyHash = ((dirtyKeyHash ^ scopeKey) >>> 0) as number\n const scratch: any = execIr.scratch as any\n\n // Inline plan cache hit: reuse plan without any decision/plan build work.\n const h1 = scratch.inlinePlanCacheHash1 as number | undefined\n const s1 = scratch.inlinePlanCacheSize1 as number | undefined\n const b1 = scratch.inlinePlanCacheBuf1 as Int32Array | undefined\n const l1 = scratch.inlinePlanCachePlanLen1 as number | undefined\n if (inlineKeyHash === h1 && dirtyKeySize === s1 && b1 && typeof l1 === 'number' && l1 > 0) {\n mode = 'dirty'\n reasons.push('inline_dirty')\n reasons.push('cache_hit')\n planStepIds = b1\n planStepCount = l1\n affectedSteps = l1\n } else {\n const h2 = scratch.inlinePlanCacheHash2 as number | undefined\n const s2 = scratch.inlinePlanCacheSize2 as number | undefined\n const b2 = scratch.inlinePlanCacheBuf2 as Int32Array | undefined\n const l2 = scratch.inlinePlanCachePlanLen2 as number | undefined\n if (inlineKeyHash === h2 && dirtyKeySize === s2 && b2 && typeof l2 === 'number' && l2 > 0) {\n // Promote to MRU.\n scratch.inlinePlanCacheHash2 = h1\n scratch.inlinePlanCacheSize2 = s1\n scratch.inlinePlanCacheBuf2 = b1\n scratch.inlinePlanCachePlanLen2 = l1\n scratch.inlinePlanCacheHash1 = h2\n scratch.inlinePlanCacheSize1 = s2\n scratch.inlinePlanCacheBuf1 = b2\n scratch.inlinePlanCachePlanLen1 = l2\n\n mode = 'dirty'\n reasons.push('inline_dirty')\n reasons.push('cache_hit')\n planStepIds = b2\n planStepCount = l2\n affectedSteps = l2\n } else {\n // 2-hit admission for inline plan computation: build plan only after we see the same key again.\n const seen1 = scratch.inlinePlanCacheRecentMissHash1 as number | undefined\n const seen2 = scratch.inlinePlanCacheRecentMissHash2 as number | undefined\n\n if (inlineKeyHash !== seen1 && inlineKeyHash !== seen2) {\n scratch.inlinePlanCacheRecentMissHash2 = seen1\n scratch.inlinePlanCacheRecentMissHash1 = inlineKeyHash\n mode = 'full'\n reasons.push('low_hit_rate_protection')\n\n // If we keep seeing new keys with very few repeats, disable inline plan computation to avoid GC spikes.\n // (Cache hits still work; we only stop computing new plans.)\n if (scratch.inlinePlanCacheDisabled !== true) {\n const prevSkips = scratch.inlinePlanCacheSkipCount as number | undefined\n const nextSkips = (typeof prevSkips === 'number' && Number.isFinite(prevSkips) ? prevSkips : 0) + 1\n scratch.inlinePlanCacheSkipCount = nextSkips\n\n const prevComputes = scratch.inlinePlanCacheComputeCount as number | undefined\n const computes = typeof prevComputes === 'number' && Number.isFinite(prevComputes) ? prevComputes : 0\n if (nextSkips >= 32 && computes <= 2) {\n scratch.inlinePlanCacheDisabled = true\n }\n }\n } else {\n if (scratch.inlinePlanCacheDisabled === true) {\n mode = 'full'\n reasons.push('low_hit_rate_protection')\n } else {\n mode = 'dirty'\n reasons.push('inline_dirty')\n reasons.push('cache_miss')\n\n const prevComputes = scratch.inlinePlanCacheComputeCount as number | undefined\n const nextComputes =\n (typeof prevComputes === 'number' && Number.isFinite(prevComputes) ? prevComputes : 0) + 1\n scratch.inlinePlanCacheComputeCount = nextComputes\n }\n }\n }\n }\n } else {\n // Fallback: if full is already cheap, pick full; otherwise run dirty inline.\n //\n // NOTE: we deliberately warm up a couple of full samples under off-fast-path so the EWMA/min can\n // converge after cold-start/JIT effects. Those warmup samples are typically discarded by perf harness.\n const fastFullMsCandidate =\n typeof fullCommitMinOffMs === 'number' && Number.isFinite(fullCommitMinOffMs)\n ? fullCommitMinOffMs\n : typeof fullCommitEwmaOffMs === 'number' && Number.isFinite(fullCommitEwmaOffMs)\n ? fullCommitEwmaOffMs\n : undefined\n const shouldWarmupFull =\n fullCommitSampleCountOff < AUTO_FAST_FULL_WARMUP_FULL_SAMPLES_OFF && scopeStepCount <= 1024\n\n if (\n shouldWarmupFull ||\n (typeof fastFullMsCandidate === 'number' &&\n Number.isFinite(fastFullMsCandidate) &&\n fastFullMsCandidate <= AUTO_FAST_FULL_EWMA_THRESHOLD_MS)\n ) {\n mode = 'full'\n reasons.push('fast_full')\n } else {\n mode = 'dirty'\n reasons.push('inline_dirty')\n }\n }\n } else {\n const dirty = ensureDirtyRootIds()\n if (dirty.dirtyAll) {\n mode = 'full'\n reasons.push('dirty_all')\n reasons.push('unknown_write')\n } else if (dirty.rootIds.length === 0) {\n mode = 'full'\n reasons.push('unknown_write')\n } else {\n const dirtyRootRatio = scopeStepCount > 0 ? dirty.rootCount / scopeStepCount : 1\n if (dirtyRootRatio >= nearFullRootRatioThreshold) {\n mode = 'full'\n reasons.push('near_full')\n } else if (\n !canUseCache &&\n scopeStepCount >= NO_CACHE_NEAR_FULL_STEP_THRESHOLD &&\n dirtyRootRatio >= nearFullRootRatioThreshold / AUTO_FLOOR_RATIO\n ) {\n // No reusable cache path + near-full roots on large graphs tends to pay decision cost without step pruning wins.\n mode = 'full'\n reasons.push('near_full')\n } else {\n const { plan, hit, budgetCutoff } = getOrComputePlan({\n missReason: cacheMissReasonHint ?? 'not_cached',\n stopOnDecisionBudget: decisionBudgetMs != null,\n })\n if (budgetCutoff) {\n markDecisionBudgetCutoff()\n }\n planStepIds = plan\n reasons.push(hit ? 'cache_hit' : 'cache_miss')\n const ratio = scopeStepCount > 0 ? plan.length / scopeStepCount : 1\n if (ratio >= NEAR_FULL_PLAN_RATIO_THRESHOLD) {\n mode = 'full'\n reasons.push('near_full')\n } else {\n mode = 'dirty'\n }\n }\n }\n }\n } else {\n reasons.push('module_override')\n if (mode === 'dirty') {\n const dirty = ensureDirtyRootIds()\n const { plan, hit } = getOrComputePlan({ missReason: cacheMissReasonHint ?? 'not_cached' })\n planStepIds = plan\n if (dirty.dirtyAll) {\n reasons.push('dirty_all')\n } else if (cache && dirty.rootIds.length > 0) {\n reasons.push(hit ? 'cache_hit' : 'cache_miss')\n }\n }\n }\n\n if (\n cacheEvidence?.disabled &&\n cacheEvidence.disableReason === 'low_hit_rate' &&\n !reasons.includes('low_hit_rate_protection')\n ) {\n reasons.push('low_hit_rate_protection')\n }\n\n const getDirtySummary = (): TraitConvergeDirtySummary | undefined => {\n if (!shouldCollectDecisionDetails) return undefined\n\n // Diagnostics contract:\n // - light/full: exported evidence expects dirty.rootIds as canonical anchor; rootPaths is materialized only on consumer side.\n // - sampled: keep slim by default (DebugSink strips heavy fields, but we also avoid unnecessary root mapping here).\n const requiresRootIds = diagnosticsLevel === 'light' || diagnosticsLevel === 'full'\n\n if (ctx.dirtyAllReason != null) {\n return {\n dirtyAll: true,\n reason: ctx.dirtyAllReason,\n rootCount: 0,\n ...(requiresRootIds ? { rootIds: [], rootIdsTruncated: false } : null),\n }\n }\n\n if (typeof dirtyPathCountHint === 'number' && dirtyPathCountHint === 0) {\n return {\n dirtyAll: true,\n reason: 'unknownWrite',\n rootCount: 0,\n ...(requiresRootIds ? { rootIds: [], rootIdsTruncated: false } : null),\n }\n }\n\n const dirty =\n requiresRootIds && dirtyRootIds == null && (diagnosticsLevel === 'light' || diagnosticsLevel === 'full')\n ? ensureDirtyRootIds()\n : dirtyRootIds\n\n if (dirty?.dirtyAll) {\n return {\n dirtyAll: true,\n reason: dirty.reason ?? 'unknownWrite',\n rootCount: 0,\n ...(requiresRootIds ? { rootIds: [], rootIdsTruncated: false } : null),\n }\n }\n\n if (dirty) {\n return {\n dirtyAll: false,\n rootCount: dirty.rootCount,\n ...(requiresRootIds\n ? {\n rootIds: Array.from(dirty.rootIds.subarray(0, DIRTY_ROOT_IDS_TOP_K)),\n rootIdsTruncated: dirty.rootIds.length > DIRTY_ROOT_IDS_TOP_K,\n }\n : null),\n }\n }\n\n if (typeof dirtyPathCountHint === 'number') {\n return {\n dirtyAll: false,\n rootCount: dirtyPathCountHint,\n }\n }\n\n return {\n dirtyAll: true,\n reason: 'unknownWrite',\n rootCount: 0,\n ...(requiresRootIds ? { rootIds: [], rootIdsTruncated: false } : null),\n }\n }\n\n executionStartedAt = ctx.now()\n if (requestedMode === 'auto') {\n decisionDurationMs = Math.max(0, executionStartedAt - decisionStartedAt)\n }\n\n let changedCount = 0\n const shouldCollectNearFullSlimDecision =\n diagnosticsLevel === 'off' && requestedMode === 'auto' && mode === 'full' && reasons.length === 1 && reasons[0] === 'near_full'\n const shouldCollectDecisionSummary = shouldCollectDecision && !shouldCollectNearFullSlimDecision\n\n const buildStepStats = (executedSteps: number): TraitConvergeStepStats => ({\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n ...(typeof affectedSteps === 'number' ? { affectedSteps } : null),\n })\n\n const makeDecisionSummary = (params: {\n readonly outcome: TraitConvergeOutcomeTag\n readonly executedSteps: number\n readonly executionDurationMs: number\n }): TraitConvergeDecisionSummary => {\n const stepStats = buildStepStats(params.executedSteps)\n\n return {\n requestedMode,\n executedMode: mode,\n outcome: params.outcome,\n configScope,\n staticIrDigest,\n executionBudgetMs: ctx.budgetMs,\n executionDurationMs: params.executionDurationMs,\n decisionBudgetMs: requestedMode === 'auto' ? ctx.decisionBudgetMs : undefined,\n decisionDurationMs: requestedMode === 'auto' ? decisionDurationMs : undefined,\n reasons,\n stepStats,\n dirty: shouldCollectDecisionDetails ? getDirtySummary() : undefined,\n thresholds: shouldCollectDecisionHeavyDetails ? { floorRatio: AUTO_FLOOR_RATIO } : undefined,\n cache: shouldCollectDecisionHeavyDetails ? cacheEvidence : undefined,\n generation: shouldCollectDecisionHeavyDetails ? generationEvidence : undefined,\n staticIr: shouldCollectDecisionHeavyDetails\n ? {\n fieldPathCount: ir.fieldPaths.length,\n stepCount: totalSteps,\n buildDurationMs: ir.buildDurationMs,\n }\n : undefined,\n timeSlicing: shouldCollectDecisionHeavyDetails ? timeSlicingSummary : undefined,\n diagnosticsSampling: shouldCollectDecisionHeavyDetails ? diagnosticsSampling : undefined,\n top3:\n shouldCollectDecisionHeavyDetails && hotspots && hotspots.length > 0\n ? hotspots.slice()\n : undefined,\n } satisfies TraitConvergeDecisionSummary\n }\n\n const makeNearFullSlimDecisionSummary = (params: {\n readonly outcome: TraitConvergeOutcomeTag\n readonly executedSteps: number\n readonly executionDurationMs: number\n }): TraitConvergeDecisionSummary => ({\n requestedMode,\n executedMode: mode,\n outcome: params.outcome,\n configScope,\n staticIrDigest: '',\n executionBudgetMs: ctx.budgetMs,\n executionDurationMs: params.executionDurationMs,\n decisionBudgetMs: requestedMode === 'auto' ? ctx.decisionBudgetMs : undefined,\n decisionDurationMs: requestedMode === 'auto' ? decisionDurationMs : undefined,\n reasons,\n stepStats: buildStepStats(params.executedSteps),\n })\n\n const steps: Array<ConvergeStepSummary> | undefined = diagnosticsLevel === 'full' ? [] : undefined\n let executedSteps = 0\n const canUseInPlaceDraft = ctx.allowInPlaceDraft === true && execIr.allOutPathsShallow\n const draft = (() => {\n if (!canUseInPlaceDraft) {\n return new CowDraft(base)\n }\n const scratch: any = execIr.scratch as any\n const cached = scratch.shallowInPlaceDraft as ShallowInPlaceDraft<S> | undefined\n if (cached) {\n cached.reset(base)\n return cached\n }\n const next = new ShallowInPlaceDraft(base)\n scratch.shallowInPlaceDraft = next\n return next\n })()\n let budgetChecks = 0\n const rollbackDraft = (): void => {\n if (draft instanceof ShallowInPlaceDraft) {\n draft.rollback()\n }\n ctx.setDraft(base)\n }\n\n try {\n if (mode === 'dirty' && !planStepIds) {\n // Inline dirty: build an actual plan (reachability) without hashing/caching.\n // This keeps decisionDurationMs ~0 but avoids scanning every step with shouldRunStepById,\n // and it supports transitive dirty propagation (out -> deps closure).\n let ok = false\n if (dirtyPaths instanceof Set) {\n const dirtyPathIds = dirtyPaths as ReadonlySet<unknown>\n const dirtyCount =\n typeof ctx.dirtyPathsKeySize === 'number'\n ? ctx.dirtyPathsKeySize\n : ((dirtyPathIds as any).size as number | undefined)\n\n // Micro-cache for inline_dirty:\n // - Avoids repeated reachability plan builds for stable dirty patterns (e.g. alternatingTwoStable),\n // which can trigger p95 tail spikes due to JIT/GC timing in ultra-fast off-fast-path workloads.\n //\n // NOTE: This is deliberately tiny (2-entry) and has 2-hit admission to avoid thrashing on high-cardinality patterns.\n let inlineKeyHash: number | undefined\n if (typeof dirtyCount === 'number' && dirtyCount > 0 && dirtyCount <= 64) {\n const scopeKey = schedulingScope === 'all' ? 0 : schedulingScope === 'immediate' ? 1 : 2\n const preHash = ctx.dirtyPathsKeyHash\n\n if (typeof preHash === 'number') {\n inlineKeyHash = (preHash ^ scopeKey) >>> 0\n } else {\n let h = 2166136261 >>> 0\n let okKey = true\n for (const raw of dirtyPathIds) {\n if (typeof raw !== 'number' || !Number.isFinite(raw)) {\n okKey = false\n break\n }\n const id = Math.floor(raw)\n h ^= id >>> 0\n h = Math.imul(h, 16777619)\n }\n if (okKey) {\n inlineKeyHash = (h ^ scopeKey) >>> 0\n }\n }\n\n if (inlineKeyHash !== undefined) {\n const scratch: any = execIr.scratch as any\n const h1 = scratch.inlinePlanCacheHash1 as number | undefined\n const s1 = scratch.inlinePlanCacheSize1 as number | undefined\n const b1 = scratch.inlinePlanCacheBuf1 as Int32Array | undefined\n const l1 = scratch.inlinePlanCachePlanLen1 as number | undefined\n if (inlineKeyHash === h1 && dirtyCount === s1 && b1 && typeof l1 === 'number' && l1 > 0) {\n planStepIds = b1\n planStepCount = l1\n affectedSteps = l1\n ok = true\n } else {\n const h2 = scratch.inlinePlanCacheHash2 as number | undefined\n const s2 = scratch.inlinePlanCacheSize2 as number | undefined\n const b2 = scratch.inlinePlanCacheBuf2 as Int32Array | undefined\n const l2 = scratch.inlinePlanCachePlanLen2 as number | undefined\n if (inlineKeyHash === h2 && dirtyCount === s2 && b2 && typeof l2 === 'number' && l2 > 0) {\n // Promote to MRU.\n scratch.inlinePlanCacheHash2 = h1\n scratch.inlinePlanCacheSize2 = s1\n scratch.inlinePlanCacheBuf2 = b1\n scratch.inlinePlanCachePlanLen2 = l1\n scratch.inlinePlanCacheHash1 = h2\n scratch.inlinePlanCacheSize1 = s2\n scratch.inlinePlanCacheBuf1 = b2\n scratch.inlinePlanCachePlanLen1 = l2\n\n planStepIds = b2\n planStepCount = l2\n affectedSteps = l2\n ok = true\n }\n }\n }\n }\n\n if (!ok) {\n const planLen = computeInlineDirtyPlanLenFromDirtyPathIdsSet(\n execIr,\n dirtyPathIds,\n scopeStepIds,\n scopeStepCount,\n schedulingScope,\n NEAR_FULL_PLAN_RATIO_THRESHOLD,\n )\n if (planLen === -2) {\n mode = 'full'\n affectedSteps = scopeStepCount\n if (!reasons.includes('near_full')) reasons.push('near_full')\n ok = true\n } else if (planLen >= 0) {\n planStepCount = planLen\n affectedSteps = planLen\n ok = true\n\n // 2-hit admission: cache only if the same pattern repeats.\n if (\n inlineKeyHash !== undefined &&\n typeof dirtyCount === 'number' &&\n dirtyCount > 0 &&\n dirtyCount <= 64 &&\n planLen > 0 &&\n planLen <= 256\n ) {\n const scratch: any = execIr.scratch as any\n const seen1 = scratch.inlinePlanCacheRecentMissHash1 as number | undefined\n const seen2 = scratch.inlinePlanCacheRecentMissHash2 as number | undefined\n const admit = inlineKeyHash === seen1 || inlineKeyHash === seen2\n\n if (admit) {\n // Insert as MRU (shift existing entry1 to entry2) without allocating:\n // - Reuse a fixed 2-slot typed buffer to avoid GC spikes under adversarial patterns.\n const oldHash1 = scratch.inlinePlanCacheHash1 as number | undefined\n const oldSize1 = scratch.inlinePlanCacheSize1 as number | undefined\n const oldBuf1 = scratch.inlinePlanCacheBuf1 as Int32Array | undefined\n const oldLen1 = scratch.inlinePlanCachePlanLen1 as number | undefined\n\n const oldBuf2 = scratch.inlinePlanCacheBuf2 as Int32Array | undefined\n const buf = oldBuf2 ?? new Int32Array(256)\n\n for (let i = 0; i < planLen; i++) {\n buf[i] = planScratch[i]!\n }\n\n scratch.inlinePlanCacheHash2 = oldHash1\n scratch.inlinePlanCacheSize2 = oldSize1\n scratch.inlinePlanCacheBuf2 = oldBuf1\n scratch.inlinePlanCachePlanLen2 = oldLen1\n scratch.inlinePlanCacheHash1 = inlineKeyHash\n scratch.inlinePlanCacheSize1 = dirtyCount\n scratch.inlinePlanCacheBuf1 = buf\n scratch.inlinePlanCachePlanLen1 = planLen\n } else {\n scratch.inlinePlanCacheRecentMissHash2 = seen1\n scratch.inlinePlanCacheRecentMissHash1 = inlineKeyHash\n }\n }\n } else {\n // Fallback: cannot derive a reliable dirty plan from the Set.\n // Use the canonical dirty-root path (which may degrade to full) to preserve correctness.\n ok = false\n }\n }\n }\n\n if (!ok) {\n const dirty = ensureDirtyRootIds()\n if (dirty.dirtyAll) {\n mode = 'full'\n if (!reasons.includes('dirty_all')) reasons.push('dirty_all')\n if (!reasons.includes('unknown_write')) reasons.push('unknown_write')\n } else {\n const computed = computePlanStepIds(dirty.rootIds)\n const plan = computed.plan ?? new Int32Array(0)\n affectedSteps = plan.length\n const ratio = scopeStepCount > 0 ? plan.length / scopeStepCount : 1\n if (ratio >= NEAR_FULL_PLAN_RATIO_THRESHOLD) {\n mode = 'full'\n if (!reasons.includes('near_full')) reasons.push('near_full')\n } else {\n planStepIds = plan\n }\n }\n }\n }\n\n const stepIds =\n mode === 'dirty'\n ? planStepIds ?? (planStepCount != null ? planScratch : scopeStepIds)\n : scopeStepIds\n const stepCount =\n mode === 'dirty'\n ? planStepCount ?? (planStepIds ? planStepIds.length : scopeStepCount)\n : scopeStepCount\n\n for (let i = 0; i < stepCount; i++) {\n const stepId = stepIds[i]!\n const entry = stepsInTopoOrder[stepId]\n if (!entry) continue\n\n const fieldPath = entry.fieldPath\n\n if (steps) {\n if (ctx.now() - executionStartedAt > ctx.budgetMs) {\n // Budget exceeded: soft degrade, roll back to base (avoid partially-applied state).\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const top3 = pickTop3Steps(steps)\n const summary: ConvergeSummary = {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3,\n }\n if (!reasons.includes('budget_cutoff')) reasons.push('budget_cutoff')\n const decision = makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n if (decision && diagnosticsLevel !== 'off') {\n yield* emitTraitConvergeTraceEvent(decision)\n }\n return {\n _tag: 'Degraded',\n reason: 'budget_exceeded',\n summary,\n ...(decision ? { decision } : null),\n } as const\n }\n } else {\n budgetChecks += 1\n if (budgetChecks >= 32) {\n budgetChecks = 0\n if (ctx.now() - executionStartedAt > ctx.budgetMs) {\n // Budget exceeded: soft degrade, roll back to base (avoid partially-applied state).\n rollbackDraft()\n if (!reasons.includes('budget_cutoff')) reasons.push('budget_cutoff')\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'budget_exceeded',\n ...(decision ? { decision } : null),\n } as const\n }\n }\n }\n\n executedSteps += 1\n\n if (steps) {\n const stepStartedAt = ctx.now()\n const exit = yield* Effect.exit(\n runWriterStep(ctx, execIr, draft, stepId, entry, shouldCollectDecision, diagnosticsLevel, stack),\n )\n const stepEndedAt = ctx.now()\n const durationMs = Math.max(0, stepEndedAt - stepStartedAt)\n const stepKind = entry.kind === 'computed' ? 'computed' : 'link'\n const stepLabel = execIr.stepLabelByStepId[stepId] ?? String(stepId)\n const changed = exit._tag === 'Success' ? exit.value : false\n if (hotspots) {\n insertTopKHotspot({\n hotspots,\n topK: hotspotsTopK,\n next: {\n kind: stepKind,\n stepId,\n outFieldPathId: execIr.stepOutFieldPathIdByStepId[stepId],\n durationMs,\n changed,\n },\n })\n }\n steps.push({\n stepId: stepLabel,\n kind: stepKind,\n fieldPath,\n durationMs,\n changed,\n })\n if (exit._tag === 'Failure') {\n const error = toSerializableErrorSummary(exit.cause)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const top3 = pickTop3Steps(steps)\n const summary: ConvergeSummary = {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3,\n }\n const decision = makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n if (decision && diagnosticsLevel !== 'off') {\n yield* emitTraitConvergeTraceEvent(decision)\n }\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n summary,\n ...(decision ? { decision } : null),\n } as const\n }\n if (exit.value) {\n changedCount += 1\n }\n continue\n }\n\n if (hotspots) {\n const stepStartedAt = ctx.now()\n const exit = yield* Effect.exit(\n runWriterStep(ctx, execIr, draft, stepId, entry, shouldCollectDecision, diagnosticsLevel, stack),\n )\n const stepEndedAt = ctx.now()\n const durationMs = Math.max(0, stepEndedAt - stepStartedAt)\n const stepKind = entry.kind === 'computed' ? 'computed' : 'link'\n const changed = exit._tag === 'Success' ? exit.value : false\n insertTopKHotspot({\n hotspots,\n topK: hotspotsTopK,\n next: {\n kind: stepKind,\n stepId,\n outFieldPathId: execIr.stepOutFieldPathIdByStepId[stepId],\n durationMs,\n changed,\n },\n })\n\n if (exit._tag === 'Failure') {\n const error = toSerializableErrorSummary(exit.cause)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n if (decision && diagnosticsLevel !== 'off') {\n yield* emitTraitConvergeTraceEvent(decision)\n }\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(decision ? { decision } : null),\n } as const\n }\n\n if (changed) {\n changedCount += 1\n }\n\n continue\n }\n\n // Off-fast-path: enabled only when middleware is empty and diagnostics=off, to keep near-zero overhead in off mode.\n // If you need deps tracing / mismatch diagnostics, switch to light/full/sampled explicitly.\n if (diagnosticsLevel === 'off' && stack.length === 0) {\n try {\n if (runWriterStepOffFast(ctx, execIr, draft, stepId, entry)) {\n changedCount += 1\n }\n } catch (e) {\n const error = toSerializableErrorSummary(e)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(decision ? { decision } : null),\n } as const\n }\n continue\n }\n\n const exit = yield* Effect.exit(\n runWriterStep(ctx, execIr, draft, stepId, entry, shouldCollectDecision, diagnosticsLevel, stack),\n )\n if (exit._tag === 'Failure') {\n const error = toSerializableErrorSummary(exit.cause)\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(decision ? { decision } : null),\n } as const\n }\n if (exit.value) {\n changedCount += 1\n }\n }\n } catch (e) {\n // Config error: hard fail (let the outer transaction entrypoint block commit).\n if (e instanceof StateTraitConfigError) {\n throw e\n }\n const error = toSerializableErrorSummary(e)\n // Runtime error: soft degrade, roll back to base (avoid partially-applied state).\n rollbackDraft()\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const summary: ConvergeSummary | undefined = steps\n ? {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3: pickTop3Steps(steps),\n }\n : undefined\n const decision = shouldCollectDecision\n ? makeDecisionSummary({\n outcome: 'Degraded',\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n return {\n _tag: 'Degraded',\n reason: 'runtime_error',\n errorSummary: error.errorSummary,\n errorDowngrade: error.downgrade,\n ...(summary ? { summary } : null),\n ...(decision ? { decision } : null),\n } as const\n }\n\n if (draft instanceof ShallowInPlaceDraft) {\n // On success, keep the in-place writes but clear rollback bookkeeping (reuse scratch draft).\n draft.commit()\n }\n\n const totalDurationMs = Math.max(0, ctx.now() - executionStartedAt)\n const outcome: TraitConvergeOutcomeTag = changedCount > 0 ? 'Converged' : 'Noop'\n\n if (mode === 'dirty' && affectedSteps === undefined) {\n affectedSteps = executedSteps\n }\n\n if (mode === 'full' && diagnosticsLevel === 'off' && stack.length === 0) {\n // Skip cold-start samples: dominated by JIT/module init and poison the EWMA.\n if (ctx.txnSeq !== 1) {\n const perf = execIr.perf\n const prev = perf.fullCommitEwmaOffMs\n // Keep it O(1): a tiny EWMA is enough to decide if planning is worth trying at all.\n perf.fullCommitEwmaOffMs =\n typeof prev === 'number' && Number.isFinite(prev) ? prev * 0.8 + totalDurationMs * 0.2 : totalDurationMs\n const prevMin = perf.fullCommitMinOffMs\n perf.fullCommitMinOffMs =\n typeof prevMin === 'number' && Number.isFinite(prevMin) ? Math.min(prevMin, totalDurationMs) : totalDurationMs\n const prevCount = perf.fullCommitSampleCountOff ?? 0\n perf.fullCommitSampleCountOff = prevCount + 1\n if (typeof ctx.txnSeq === 'number' && Number.isFinite(ctx.txnSeq)) {\n perf.fullCommitLastTxnSeqOff = ctx.txnSeq\n }\n }\n }\n\n const decision = shouldCollectDecisionSummary\n ? makeDecisionSummary({\n outcome,\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : shouldCollectNearFullSlimDecision\n ? makeNearFullSlimDecisionSummary({\n outcome,\n executedSteps,\n executionDurationMs: totalDurationMs,\n })\n : undefined\n if (decision && diagnosticsLevel !== 'off') yield* emitTraitConvergeTraceEvent(decision)\n\n return changedCount > 0\n ? ({\n _tag: 'Converged',\n patchCount: changedCount,\n ...(steps\n ? {\n summary: {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3: pickTop3Steps(steps),\n } satisfies ConvergeSummary,\n }\n : null),\n ...(decision ? { decision } : null),\n } as const)\n : ({\n _tag: 'Noop',\n ...(steps\n ? {\n summary: {\n mode,\n budgetMs: ctx.budgetMs,\n totalDurationMs,\n totalSteps,\n executedSteps,\n skippedSteps: Math.max(0, totalSteps - executedSteps),\n changedSteps: changedCount,\n top3: pickTop3Steps(steps),\n } satisfies ConvergeSummary,\n }\n : null),\n ...(decision ? { decision } : null),\n } as const)\n })\n","/**\n * internal/digest:\n * - Minimal \"stable digest\" utilities for Runtime / Static IR / Traits, etc.\n * - Goal: stable output across runs/processes while staying lightweight (no extra dependencies).\n *\n * Note: stableStringify does not aim for full JSON equivalence; it only covers the subset needed by this repo:\n * - Stable key ordering (object fields sorted lexicographically).\n * - Non-finite numbers (NaN/±Infinity) degrade to null.\n * - Other non-representable values (undefined/function/symbol, etc.) degrade to null.\n * - undefined inside objects is not omitted; it is encoded as null (differs from JSON.stringify).\n */\n\nexport const stableStringify = (value: unknown): string => {\n if (value === null) return 'null'\n const t = typeof value\n if (t === 'string') return JSON.stringify(value)\n if (t === 'number') return Number.isFinite(value) ? String(value) : 'null'\n if (t === 'boolean') return value ? 'true' : 'false'\n\n if (Array.isArray(value)) {\n return `[${value.map(stableStringify).join(',')}]`\n }\n\n if (t === 'object') {\n const record = value as Record<string, unknown>\n const keys = Object.keys(record).sort()\n return `{${keys.map((k) => `${JSON.stringify(k)}:${stableStringify(record[k])}`).join(',')}}`\n }\n\n return 'null'\n}\n\n/**\n * fnv1a32:\n * - 32-bit FNV-1a hash (for short digests); outputs fixed 8-char hex.\n */\nexport const fnv1a32 = (input: string): string => {\n let hash = 0x811c9dc5\n for (let i = 0; i < input.length; i++) {\n hash ^= input.charCodeAt(i)\n hash = (hash * 0x01000193) >>> 0\n }\n return hash.toString(16).padStart(8, '0')\n}\n","import { Effect, Option } from 'effect'\nimport * as SchemaAST from 'effect/SchemaAST'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport { isDevEnv } from '../runtime/core/env.js'\nimport { RunSessionTag } from '../observability/runSession.js'\nimport { normalizeFieldPath } from '../field-path.js'\nimport * as DepsTrace from './deps-trace.js'\nimport type { ConvergeContext } from './converge.types.js'\nimport type { StateTraitProgram, StateTraitSchemaPathRef } from './model.js'\n\nconst onceKeysFallback = new Set<string>()\n\nexport const onceInRunSession = (key: string): Effect.Effect<boolean> =>\n Effect.serviceOption(RunSessionTag).pipe(\n Effect.map((maybe) => {\n if (Option.isSome(maybe)) {\n return maybe.value.local.once(key)\n }\n if (onceKeysFallback.has(key)) return false\n onceKeysFallback.add(key)\n return true\n }),\n )\n\nconst formatList = (items: ReadonlyArray<string>, limit = 10): string => {\n if (items.length === 0) return ''\n if (items.length <= limit) return items.join(', ')\n return `${items.slice(0, limit).join(', ')}, …(+${items.length - limit})`\n}\n\nexport const emitDepsMismatch = (params: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'computed' | 'source'\n readonly fieldPath: string\n readonly diff: DepsTrace.DepsDiff\n}): Effect.Effect<void> => {\n return Effect.gen(function* () {\n const key = `${params.moduleId ?? 'unknown'}::${params.instanceId ?? 'unknown'}::${params.kind}::${params.fieldPath}`\n const shouldEmit = yield* onceInRunSession(`deps_mismatch:${key}`)\n if (!shouldEmit) return\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: params.moduleId,\n instanceId: params.instanceId,\n code: 'state_trait::deps_mismatch',\n severity: 'warning',\n message:\n `[deps] ${params.kind} \"${params.fieldPath}\" declared=[${formatList(params.diff.declared)}] ` +\n `reads=[${formatList(params.diff.reads)}] missing=[${formatList(params.diff.missing)}] ` +\n `unused=[${formatList(params.diff.unused)}]`,\n hint:\n 'deps is the single source of truth for dependencies: incremental scheduling / reverse closures / performance optimizations rely on deps only. ' +\n 'Keep deps consistent with actual reads; if you really depend on the whole object, declare a coarser-grained dep (e.g. \"profile\") to cover sub-field reads.',\n kind: `deps_mismatch:${params.kind}`,\n })\n })\n}\n\nconst schemaHasPath = (\n ast: SchemaAST.AST,\n segments: ReadonlyArray<string>,\n seen: Set<SchemaAST.AST> = new Set(),\n): boolean => {\n if (segments.length === 0) return true\n\n let current = SchemaAST.toType(ast)\n\n while (SchemaAST.isSuspend(current)) {\n if (seen.has(current)) {\n return true\n }\n seen.add(current)\n current = SchemaAST.toType(current.thunk())\n }\n\n if (SchemaAST.isUnion(current)) {\n return current.types.some((t) => schemaHasPath(t, segments, seen))\n }\n\n if (SchemaAST.isArrays(current)) {\n const candidates: Array<SchemaAST.AST> = []\n for (const e of current.elements) candidates.push(e)\n for (const r of current.rest) candidates.push(r)\n if (candidates.length === 0) return true\n return candidates.some((t) => schemaHasPath(t, segments, seen))\n }\n\n if (SchemaAST.isObjects(current)) {\n const [head, ...tail] = segments\n\n for (const ps of current.propertySignatures) {\n if (String(ps.name) !== head) continue\n return schemaHasPath(ps.type, tail, seen)\n }\n\n // index signature: open objects like Record<string, T> allow any key\n for (const sig of current.indexSignatures) {\n const param = SchemaAST.toType(sig.parameter as unknown as SchemaAST.AST)\n const tag = (param as any)?._tag\n if (tag === 'String' || tag === 'TemplateLiteral') {\n return schemaHasPath(sig.type, tail, seen)\n }\n }\n\n return false\n }\n\n const tag = (current as any)?._tag\n if (tag === 'AnyKeyword' || tag === 'UnknownKeyword' || tag === 'ObjectKeyword' || tag === 'Declaration') {\n return true\n }\n\n return false\n}\n\nconst schemaHasFieldPath = (stateSchemaAst: SchemaAST.AST, path: string): boolean => {\n if (!path) return true\n if (path === '$root') return true\n\n const normalized = normalizeFieldPath(path)\n if (!normalized) return false\n\n const segs = normalized[0] === '$root' ? normalized.slice(1) : normalized\n return schemaHasPath(stateSchemaAst, segs)\n}\n\nconst formatSchemaMismatchLine = (ref: StateTraitSchemaPathRef): string => {\n if (ref.kind === 'fieldPath') {\n return `- ${ref.entryKind} \"${ref.entryFieldPath}\" fieldPath=\"${ref.path}\"`\n }\n if (ref.kind === 'dep') {\n const rule = ref.ruleName ? ` rule=\"${ref.ruleName}\"` : ''\n return `- ${ref.entryKind} \"${ref.entryFieldPath}\" deps=\"${ref.path}\"${rule}`\n }\n if (ref.kind === 'link_from') {\n return `- link \"${ref.entryFieldPath}\" from=\"${ref.path}\"`\n }\n if (ref.kind === 'check_writeback') {\n return `- check \"${ref.entryFieldPath}\" writeback=\"${ref.path}\"`\n }\n return `- ${ref.entryKind} \"${ref.entryFieldPath}\" path=\"${ref.path}\"`\n}\n\nexport const emitSchemaMismatch = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: Pick<ConvergeContext<S>, 'moduleId' | 'instanceId'>,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (!isDevEnv()) return\n\n const level = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n if (level === 'off') return\n\n const key = `${ctx.moduleId ?? 'unknown'}::${ctx.instanceId}`\n const shouldEmit = yield* onceInRunSession(`schema_mismatch:${key}`)\n if (!shouldEmit) return\n\n const refs = (program.schemaPaths ?? []) as ReadonlyArray<StateTraitSchemaPathRef>\n if (refs.length === 0) return\n\n const stateSchemaAst = program.stateSchema.ast as unknown as SchemaAST.AST\n\n const mismatches: Array<StateTraitSchemaPathRef> = []\n const seen = new Set<string>()\n\n for (const ref of refs) {\n if (schemaHasFieldPath(stateSchemaAst, ref.path)) continue\n const k = `${ref.kind}|${ref.entryKind}|${ref.entryFieldPath}|${ref.ruleName ?? ''}|${ref.path}`\n if (seen.has(k)) continue\n seen.add(k)\n mismatches.push(ref)\n }\n\n if (mismatches.length === 0) return\n\n const limit = level === 'light' ? 8 : 24\n const lines = mismatches.slice(0, limit).map(formatSchemaMismatchLine)\n if (mismatches.length > limit) {\n lines.push(`- …(+${mismatches.length - limit})`)\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n code: 'state_trait::schema_mismatch',\n severity: 'warning',\n message: `[schema] The following paths are not declared in stateSchema (total ${mismatches.length}):\\n${lines.join('\\n')}`,\n hint: 'StateTrait writeback will create missing objects/fields. Declare all fieldPath/deps/link.from and errors.* writeback paths in stateSchema, or fix typos in trait paths.',\n kind: 'schema_mismatch',\n })\n })\n","import { Effect, Layer, ServiceMap } from 'effect'\n\n/**\n * ExecVmMode:\n * - Allows kernel implementations (core-ng) to switch converge hot-path execution form without changing public semantics.\n * - Currently mainly affects typed-array reuse strategy in converge plan computation.\n * - Disabled by default in core (enable explicitly via Layer for perf/comparison runs).\n */\nexport const currentExecVmMode = ServiceMap.Reference<boolean>('@logixjs/core/ExecVmMode', {\n defaultValue: () => false,\n})\n\nexport const withExecVmMode = <A, E, R>(effect: Effect.Effect<A, E, R>): Effect.Effect<A, E, R> =>\n Effect.provideService(effect, currentExecVmMode, true)\n\nexport const execVmModeLayer = (enabled: boolean): Layer.Layer<any, never, never> =>\n Layer.succeed(currentExecVmMode, enabled) as Layer.Layer<any, never, never>\n","import { Effect, Option } from 'effect'\nimport * as EffectOp from '../effect-op.js'\nimport * as EffectOpCore from '../runtime/core/EffectOpCore.js'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport { isDevEnv } from '../runtime/core/env.js'\nimport { RunSessionTag } from '../observability/runSession.js'\nimport type { PatchReason } from '../runtime/core/StateTransaction.js'\nimport type { FieldPath } from '../field-path.js'\nimport { emitDepsMismatch, onceInRunSession } from './converge-diagnostics.js'\nimport type { ConvergeContext } from './converge.types.js'\nimport type { ConvergeExecIr } from './converge-exec-ir.js'\nimport * as DepsTrace from './deps-trace.js'\nimport type { StateTraitEntry } from './model.js'\n\nexport const getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack> =>\n Effect.serviceOption(EffectOpCore.EffectOpMiddlewareTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\ntype WriterKind = 'computed' | 'link'\n\nconst getWriterKind = (entry: StateTraitEntry<any, string>): WriterKind | undefined =>\n entry.kind === 'computed' ? 'computed' : entry.kind === 'link' ? 'link' : undefined\n\nconst getWriterDeps = (entry: StateTraitEntry<any, string>): ReadonlyArray<string> => {\n if (entry.kind === 'computed') {\n return ((entry.meta as any)?.deps ?? []) as ReadonlyArray<string>\n }\n if (entry.kind === 'link') {\n return [entry.meta.from as string]\n }\n return []\n}\n\nconst shouldSkip = (entry: StateTraitEntry<any, string>, prev: unknown, next: unknown): boolean => {\n if (entry.kind === 'computed') {\n const equals = (entry.meta as any)?.equals as ((a: unknown, b: unknown) => boolean) | undefined\n return equals ? equals(prev, next) : Object.is(prev, next)\n }\n return Object.is(prev, next)\n}\n\nexport const runWriterStepOffFast = <S extends object>(\n ctx: ConvergeContext<S>,\n execIr: ConvergeExecIr,\n draft: {\n readonly getRoot: () => S\n readonly getAt: (path: FieldPath) => unknown\n readonly setAt: (path: FieldPath, value: unknown, prev?: unknown) => void\n },\n stepId: number,\n entry: StateTraitEntry<any, string>,\n): boolean => {\n const kind = getWriterKind(entry)\n if (!kind) return false\n\n const reason: PatchReason = kind === 'computed' ? 'trait-computed' : 'trait-link'\n const from = kind === 'link' ? (entry.meta as any).from : undefined\n const outPathId = execIr.stepOutFieldPathIdByStepId[stepId]\n const outPath = execIr.fieldPathsById[outPathId]!\n const fromPathId = execIr.stepFromFieldPathIdByStepId[stepId]\n const fromPath = fromPathId >= 0 ? execIr.fieldPathsById[fromPathId] : undefined\n\n const current = draft.getRoot() as any\n const prev = draft.getAt(outPath)\n\n let next: unknown\n if (kind === 'computed') {\n const derive = (entry.meta as any).derive as (s: any) => unknown\n next = derive(current)\n } else {\n if (!fromPath) {\n throw new Error(`[StateTrait.converge] Missing link.from FieldPathId: from=\"${String(from)}\"`)\n }\n next = draft.getAt(fromPath)\n }\n\n const changed = !shouldSkip(entry, prev, next)\n if (!changed) return false\n\n draft.setAt(outPath, next, prev)\n ctx.setDraft(draft.getRoot())\n ctx.recordPatch(outPathId, reason, prev, next)\n return true\n}\n\nexport const runWriterStep = <S extends object>(\n ctx: ConvergeContext<S>,\n execIr: ConvergeExecIr,\n draft: {\n readonly getRoot: () => S\n readonly getAt: (path: FieldPath) => unknown\n readonly setAt: (path: FieldPath, value: unknown, prev?: unknown) => void\n },\n stepId: number,\n entry: StateTraitEntry<any, string>,\n shouldCollectDecision: boolean,\n diagnosticsLevel: Debug.DiagnosticsLevel,\n stack: EffectOp.MiddlewareStack,\n): Effect.Effect<boolean> => {\n const moduleId = ctx.moduleId\n const instanceId = ctx.instanceId\n const fieldPath = entry.fieldPath\n\n const kind = getWriterKind(entry)\n if (!kind) return Effect.succeed(false)\n\n const reason: PatchReason = kind === 'computed' ? 'trait-computed' : 'trait-link'\n const opKind: EffectOp.EffectOp<any, any, any>['kind'] = kind === 'computed' ? 'trait-computed' : 'trait-link'\n const opName = kind === 'computed' ? 'computed:update' : 'link:propagate'\n\n const deps = getWriterDeps(entry)\n const from = kind === 'link' ? (entry.meta as any).from : undefined\n const outPathId = execIr.stepOutFieldPathIdByStepId[stepId]\n const outPath = execIr.fieldPathsById[outPathId]!\n const fromPathId = execIr.stepFromFieldPathIdByStepId[stepId]\n const fromPath = fromPathId >= 0 ? execIr.fieldPathsById[fromPathId] : undefined\n\n const runBody = (shouldTraceDeps: boolean): Effect.Effect<boolean> =>\n Effect.sync(() => {\n const current = draft.getRoot() as any\n\n const prev = draft.getAt(outPath)\n\n let next: unknown\n let depsDiff: DepsTrace.DepsDiff | undefined\n\n if (kind === 'computed') {\n const derive = (entry.meta as any).derive as (s: any) => unknown\n if (shouldTraceDeps) {\n const traced = DepsTrace.trace((s) => derive(s), current)\n next = traced.value\n depsDiff = DepsTrace.diffDeps(((entry.meta as any).deps ?? []) as ReadonlyArray<string>, traced.reads)\n } else {\n next = derive(current)\n }\n } else {\n if (!fromPath) {\n throw new Error(`[StateTrait.converge] Missing link.from FieldPathId: from=\"${String(from)}\"`)\n }\n next = draft.getAt(fromPath)\n }\n\n const changed = !shouldSkip(entry, prev, next)\n if (!changed) {\n return { changed: false, depsDiff }\n }\n\n draft.setAt(outPath, next, prev)\n ctx.setDraft(draft.getRoot())\n ctx.recordPatch(outPathId, reason, prev, next, undefined, stepId)\n return { changed: true, depsDiff }\n }).pipe(\n Effect.flatMap(({ changed, depsDiff }) =>\n depsDiff && kind === 'computed'\n ? emitDepsMismatch({\n moduleId,\n instanceId,\n kind: 'computed',\n fieldPath,\n diff: depsDiff,\n }).pipe(Effect.as(changed))\n : Effect.succeed(changed),\n ),\n )\n\n if (stack.length === 0) {\n if (!(kind === 'computed' && shouldCollectDecision && isDevEnv())) {\n return runBody(false)\n }\n return Effect.gen(function* () {\n const traceKey = `${moduleId ?? 'unknown'}::${instanceId ?? 'unknown'}::computed::${fieldPath}`\n const shouldTraceDeps = yield* onceInRunSession(`deps_trace:settled:${traceKey}`)\n return yield* runBody(shouldTraceDeps)\n })\n }\n\n return Effect.gen(function* () {\n const stepLabel = diagnosticsLevel === 'off' ? undefined : (execIr.stepLabelByStepId[stepId] ?? String(stepId))\n\n let shouldTraceDeps = false\n if (kind === 'computed' && shouldCollectDecision && isDevEnv()) {\n const traceKey = `${moduleId ?? 'unknown'}::${instanceId ?? 'unknown'}::computed::${fieldPath}`\n shouldTraceDeps = yield* onceInRunSession(`deps_trace:settled:${traceKey}`)\n }\n\n const body = runBody(shouldTraceDeps)\n\n const meta: any = {\n moduleId,\n instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n fieldPath,\n deps,\n ...(kind === 'link'\n ? {\n from,\n to: fieldPath,\n }\n : null),\n ...(stepLabel ? { stepId: stepLabel } : null),\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const key = instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', key)\n }\n }\n\n const op = EffectOp.make<boolean, never, never>({\n kind: opKind,\n name: opName,\n effect: body,\n meta,\n })\n\n return yield* EffectOp.run(op, stack)\n })\n}\n","import { Effect } from 'effect'\nimport { create } from 'mutative'\nimport type { PatchReason, StateTxnOrigin, TxnDirtyEvidence } from '../runtime/core/StateTransaction.js'\nimport { normalizeFieldPath, type FieldPath, type FieldPathId } from '../field-path.js'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport { buildDependencyGraph } from './graph.js'\nimport type { StateTraitEntry, StateTraitProgram } from './model.js'\nimport { reverseClosure } from './reverse-closure.js'\nimport type * as RowId from './rowid.js'\n\nexport type ValidateMode = 'submit' | 'blur' | 'valueChange' | 'manual'\n\n/**\n * RULE_SKIP:\n * - Used by rules to indicate \"skip execution for this run\" (e.g. validateOn gating).\n * - Distinct from `undefined` (\"no error after execution\"): skip must not clear existing errors.\n */\nconst RULE_SKIP = Symbol.for('logix.state-trait.validate.skip')\n\nexport type ValidateTarget =\n | { readonly kind: 'root' }\n | { readonly kind: 'field'; readonly path: string }\n | {\n readonly kind: 'list'\n readonly path: string\n readonly listIndexPath?: ReadonlyArray<number>\n }\n | {\n readonly kind: 'item'\n readonly path: string\n readonly listIndexPath?: ReadonlyArray<number>\n readonly index: number\n readonly field?: string\n }\n\nexport interface ScopedValidateRequest {\n readonly mode: ValidateMode\n readonly target: ValidateTarget\n}\n\nexport interface ValidateContext<S> {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly txnSeq?: number\n readonly txnId?: string\n /**\n * External trigger at transaction start: stabilizes validate attribution to the txn origin,\n * preventing in-transaction derived writes from polluting attribution.\n */\n readonly origin?: StateTxnOrigin\n /**\n * RowIdStore: stable row identity for list scopes (later `$rowId` and rowIdMode depend on this).\n */\n readonly rowIdStore?: RowId.RowIdStore\n /**\n * List config hint from StateTraitSpec.list.identityHint (trackBy), used for rowIdMode explanation and degrade diagnostics.\n */\n readonly listConfigs?: ReadonlyArray<RowId.ListConfig>\n /**\n * Transaction dirty evidence (best-effort):\n * - Unified root/list evidence derived from StateTransaction.recordPatch(...) within the same txn window.\n * - Enables list-scope incremental rules even when callers validate by Ref.list(...).\n */\n readonly txnDirtyEvidence?: TxnDirtyEvidence\n readonly getDraft: () => S\n readonly setDraft: (next: S) => void\n readonly recordPatch: (\n path: string | FieldPath | FieldPathId | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n}\n\ntype RuleContext = {\n readonly mode: ValidateMode\n readonly state: unknown\n readonly scope: {\n readonly fieldPath: string\n readonly listPath?: string\n readonly listIndexPath?: ReadonlyArray<number>\n readonly index?: number\n /**\n * list-scope check hint: stable RowId accessor for the current list instance.\n * Used by optimized list rules to cache/skip full rescans without requiring an O(n) rowId array.\n */\n readonly rowIdAt?: (index: number) => string\n /**\n * list-scope check hint: indices that are known to be relevant for this validation batch.\n * Derived from item/field validate targets (best-effort).\n */\n readonly changedIndices?: ReadonlyArray<number>\n }\n}\n\nconst parseSegments = (path: string): ReadonlyArray<string | number> => {\n if (!path) return []\n return path.split('.').map((seg) => (/^[0-9]+$/.test(seg) ? Number(seg) : seg))\n}\n\nconst getAtPath = (state: any, path: string): any => {\n if (!path || state == null) return state\n const segments = parseSegments(path)\n let current: any = state\n for (const seg of segments) {\n if (current == null) return undefined\n current = current[seg as any]\n }\n return current\n}\n\nconst setAtPathMutating = (draft: unknown, path: string, value: unknown): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const nextKey = segments[i + 1]!\n\n const next = current[key as any]\n if (next == null || typeof next !== 'object') {\n current[key as any] = typeof nextKey === 'number' ? [] : {}\n }\n current = current[key as any]\n }\n\n const last = segments[segments.length - 1]!\n current[last as any] = value\n}\n\nconst unsetAtPathMutating = (draft: unknown, path: string): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const next = current[key as any]\n if (next == null || typeof next !== 'object') {\n return\n }\n current = next\n }\n\n const last = segments[segments.length - 1]!\n if (Array.isArray(current) && typeof last === 'number') {\n // Keep `errors.*.rows` arrays sparse to avoid dense-undefined scans (perf + variance).\n // Note: `delete` keeps `.length` unchanged but removes the element key (hole).\n delete current[last]\n return\n }\n if (current && typeof current === 'object') {\n delete current[last as any]\n }\n}\n\nconst isPlainObject = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\nconst normalizeErrorValue = (value: unknown): unknown => (value === undefined || value === null ? undefined : value)\n\nconst mergeRuleErrors = (errors: ReadonlyArray<unknown>): unknown => {\n if (errors.length === 0) return undefined\n if (errors.length === 1) return errors[0]\n\n // ErrorValue constraint: arrays must not represent \"multiple errors\"; for duplicates on the same field, keep the first deterministically.\n if (errors.every(isPlainObject)) {\n const merged: Record<string, unknown> = {}\n for (const patch of errors as ReadonlyArray<Record<string, unknown>>) {\n for (const key of Object.keys(patch)) {\n const incoming = normalizeErrorValue(patch[key])\n if (incoming === undefined) continue\n if (!(key in merged)) merged[key] = incoming\n }\n }\n return Object.keys(merged).length > 0 ? merged : undefined\n }\n\n return errors[0]\n}\n\ntype ErrorValueLeafObject = {\n readonly message: string\n readonly code?: string\n readonly details?: unknown\n}\n\nconst isErrorValueLeafObject = (value: unknown): value is ErrorValueLeafObject => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return false\n const anyValue = value as Record<string, unknown>\n const msg = anyValue.message\n if (typeof msg !== 'string' || msg.length === 0) return false\n for (const key of Object.keys(anyValue)) {\n if (key !== 'message' && key !== 'code' && key !== 'details') return false\n }\n const code = anyValue.code\n if (code !== undefined && (typeof code !== 'string' || code.length === 0)) return false\n return true\n}\n\nconst countErrorLeaves = (value: unknown): number => {\n if (value === null || value === undefined) return 0\n if (typeof value === 'string') return value.length > 0 ? 1 : 0\n if (Array.isArray(value)) return value.reduce((acc, v) => acc + countErrorLeaves(v), 0)\n if (typeof value === 'object') {\n if (isErrorValueLeafObject(value)) return 1\n let acc = 0\n for (const [k, v] of Object.entries(value as any)) {\n if (k === '$rowId') continue\n acc += countErrorLeaves(v)\n }\n return acc\n }\n return 1\n}\n\ntype ListScopeResult = {\n readonly listError?: unknown\n readonly rows?: ReadonlyArray<unknown>\n readonly traces?: ReadonlyArray<ListScopeRuleTrace>\n readonly touchedKeys: ReadonlySet<string>\n readonly touchedListError: boolean\n}\n\ntype TraitCheckOp = 'set' | 'unset' | 'insert' | 'remove'\n\ntype ListScopeRuleTrace = {\n readonly ruleId: string\n readonly summary: {\n readonly scannedRows: number\n readonly affectedRows: number\n readonly changedRows: number\n readonly setCount?: number\n readonly clearedCount?: number\n readonly durationMs?: number\n }\n}\n\ntype TraitCheckRowIdMode = 'trackBy' | 'store' | 'index'\n\ntype TraitCheckDegraded = {\n readonly kind: string\n readonly message?: string\n}\n\nconst nowMs = (() => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === 'function') {\n return () => perf.now()\n }\n return () => Date.now()\n})()\n\nconst isTraitCheckOp = (value: unknown): value is TraitCheckOp =>\n value === 'set' || value === 'unset' || value === 'insert' || value === 'remove'\n\nconst normalizeTraitCheckPath = (path: string): ReadonlyArray<string> => normalizeFieldPath(path) ?? ['$root']\n\nconst sameFieldPath = (a: ReadonlyArray<string>, b: ReadonlyArray<string>): boolean => {\n if (a.length !== b.length) return false\n for (let i = 0; i < a.length; i++) {\n if (a[i] !== b[i]) return false\n }\n return true\n}\n\nconst toTraitCheckTrigger = (\n origin: StateTxnOrigin | undefined,\n fallbackPath: string,\n): { readonly kind: string; readonly path: ReadonlyArray<string>; readonly op: TraitCheckOp } => {\n const details = origin?.details\n const detailsObj = isPlainObject(details) ? (details as Record<string, unknown>) : undefined\n const tag = detailsObj && typeof detailsObj._tag === 'string' ? detailsObj._tag : undefined\n\n const kindBase = origin?.kind && origin.kind.length > 0 ? origin.kind : 'unknown'\n const kind =\n tag && tag.length > 0\n ? `${kindBase}:${tag}`\n : origin?.name && origin.name.length > 0\n ? `${kindBase}:${origin.name}`\n : kindBase\n\n const opRaw = detailsObj?.op\n const op: TraitCheckOp = isTraitCheckOp(opRaw) ? opRaw : 'set'\n\n const pathRaw = detailsObj && typeof detailsObj.path === 'string' ? detailsObj.path : undefined\n const path = normalizeTraitCheckPath(pathRaw ?? fallbackPath)\n\n return { kind, path, op }\n}\n\nconst toTraitCheckRowIdMode = (params: {\n readonly trackBy?: string\n readonly rowIdStore?: RowId.RowIdStore\n}): TraitCheckRowIdMode => {\n if (params.trackBy) return 'trackBy'\n if (params.rowIdStore) return 'store'\n return 'index'\n}\n\nconst toTraitCheckDegraded = (\n trigger: { readonly op: TraitCheckOp; readonly path: ReadonlyArray<string> },\n scopeFieldPath: ReadonlyArray<string>,\n rowIdMode: TraitCheckRowIdMode,\n): TraitCheckDegraded | undefined => {\n if (rowIdMode === 'trackBy') return undefined\n if (trigger.op !== 'set') return undefined\n if (!sameFieldPath(trigger.path, scopeFieldPath)) return undefined\n return {\n kind: 'rowId:degraded:no_trackBy_root_replace',\n message: 'list root was replaced without trackBy; rowId stability is degraded',\n }\n}\n\nconst mergeRowPatchPreferFirst = (\n base: Record<string, unknown> | undefined,\n incoming: unknown,\n): Record<string, unknown> | undefined => {\n if (!isPlainObject(incoming)) return base\n const next: Record<string, unknown> = base ? { ...base } : {}\n for (const key of Object.keys(incoming)) {\n const v = normalizeErrorValue(incoming[key])\n if (v === undefined) continue\n if (!(key in next)) next[key] = v\n }\n return Object.keys(next).length > 0 ? next : undefined\n}\n\nconst shallowEqualPlainObject = (a: Record<string, unknown>, b: Record<string, unknown>): boolean => {\n const aKeys = Object.keys(a)\n const bKeys = Object.keys(b)\n if (aKeys.length !== bKeys.length) return false\n for (const key of aKeys) {\n if (!Object.prototype.hasOwnProperty.call(b, key)) return false\n if (!Object.is(a[key], b[key])) return false\n }\n return true\n}\n\nconst collectRuleKeysFromDeps = (rule: unknown, listPath: string): ReadonlyArray<string> => {\n if (!rule || typeof rule !== 'object') return []\n const deps = (rule as any).deps\n if (!Array.isArray(deps)) return []\n\n const prefix = `${listPath}[].`\n const keys: Array<string> = []\n for (const dep of deps) {\n if (typeof dep !== 'string') continue\n if (dep.startsWith(prefix)) {\n const key = dep.slice(prefix.length)\n if (key) keys.push(key)\n continue\n }\n if (dep.length > 0 && !dep.includes('.') && !dep.includes('[') && !dep.includes(']')) {\n keys.push(dep)\n }\n }\n\n return Array.from(new Set(keys)).sort()\n}\n\nconst evalListScopeCheck = (\n entry: Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }>,\n input: unknown,\n ctx: RuleContext,\n options?: {\n readonly trace?: {\n readonly listPath: string\n readonly errorsBasePath: string\n readonly errorsRoot: unknown\n }\n readonly traceLite?: boolean\n },\n): ListScopeResult | typeof RULE_SKIP => {\n const rules = entry.meta.rules as Record<string, any>\n const names = Object.keys(rules).sort()\n\n let listError: unknown | undefined = undefined\n let rows: Array<Record<string, unknown> | undefined> | undefined = undefined\n let traces: Array<ListScopeRuleTrace> | undefined = undefined\n let ran = false\n let touchedListError = false\n const touchedKeys = new Set<string>()\n const listPath = ctx.scope.listPath ?? ctx.scope.fieldPath\n const scannedRows = Array.isArray(input) ? input.length : 0\n\n const mergeRows = (incomingRows: ReadonlyArray<unknown>): void => {\n if (!rows) rows = []\n if (rows.length < incomingRows.length) rows.length = incomingRows.length\n\n // Sparse merge: most list-scope rules only touch a small number of rows.\n for (const key of Object.keys(incomingRows as any)) {\n const index = Number(key)\n if (!Number.isInteger(index) || index < 0) continue\n const incoming = (incomingRows as any)[index]\n if (incoming === undefined) continue\n const merged = mergeRowPatchPreferFirst(rows[index], incoming)\n rows[index] = merged\n }\n }\n\n const summarizeRuleRows = (\n errorsBasePath: string,\n keys: ReadonlyArray<string>,\n scannedRows: number,\n rowsPatch: ReadonlyArray<unknown> | undefined,\n ): {\n readonly affectedRows: number\n readonly changedRows: number\n readonly setCount: number\n readonly clearedCount: number\n } => {\n if (keys.length === 0 || scannedRows <= 0) {\n return { affectedRows: 0, changedRows: 0, setCount: 0, clearedCount: 0 }\n }\n\n let affectedRows = 0\n let changedRows = 0\n let setCount = 0\n let clearedCount = 0\n\n const prevRowsAny = getAtPath(options?.trace?.errorsRoot as any, `${errorsBasePath}.rows`)\n const prevRowsLike =\n prevRowsAny && typeof prevRowsAny === 'object' && !Array.isArray(prevRowsAny) ? (prevRowsAny as any) : undefined\n const prevRows = Array.isArray(prevRowsAny) ? prevRowsAny : undefined\n\n const indices = new Set<number>()\n const addIndex = (k: string): void => {\n const i = Number(k)\n if (!Number.isInteger(i) || i < 0 || i >= scannedRows) return\n indices.add(i)\n }\n\n if (rowsPatch && typeof rowsPatch === 'object') {\n for (const k of Object.keys(rowsPatch as any)) {\n const v = (rowsPatch as any)[k]\n if (v === undefined) continue\n addIndex(k)\n }\n }\n\n if (prevRows && typeof prevRows === 'object') {\n for (const k of Object.keys(prevRows as any)) {\n const v = (prevRows as any)[k]\n if (v === undefined) continue\n addIndex(k)\n }\n } else if (prevRowsLike && typeof prevRowsLike === 'object') {\n for (const k of Object.keys(prevRowsLike as any)) {\n const v = (prevRowsLike as any)[k]\n if (v === undefined) continue\n addIndex(k)\n }\n }\n\n for (const index of Array.from(indices).sort((a, b) => a - b)) {\n const prevRow = prevRows ? prevRows[index] : prevRowsLike ? prevRowsLike[index] : undefined\n const prevObj = isPlainObject(prevRow) ? (prevRow as Record<string, unknown>) : undefined\n const patch = rowsPatch?.[index]\n const patchObj = isPlainObject(patch) ? (patch as Record<string, unknown>) : undefined\n\n let hasPrev = false\n let hasNext = false\n let rowChanged = false\n\n for (const key of keys) {\n const prev = normalizeErrorValue(prevObj?.[key])\n const next = normalizeErrorValue(patchObj?.[key])\n if (prev !== undefined) hasPrev = true\n if (next !== undefined) hasNext = true\n if (Object.is(prev, next)) continue\n rowChanged = true\n if (next === undefined) {\n if (prev !== undefined) clearedCount += 1\n } else {\n setCount += 1\n }\n }\n\n if (hasPrev || hasNext) affectedRows += 1\n if (rowChanged) changedRows += 1\n }\n\n return { affectedRows, changedRows, setCount, clearedCount }\n }\n\n for (const name of names) {\n const rule = rules[name]\n const collectTrace = options?.trace?.listPath && options?.trace?.errorsRoot\n const collectTraceLite = options?.traceLite === true\n const startedAt = collectTrace ? nowMs() : 0\n try {\n const out =\n typeof rule === 'function'\n ? rule(input, ctx)\n : rule && typeof rule === 'object'\n ? rule.validate(input, ctx)\n : undefined\n\n if (out === RULE_SKIP) continue\n ran = true\n\n const keysFromDeps = collectRuleKeysFromDeps(rule, listPath)\n\n for (const key of keysFromDeps) {\n touchedKeys.add(key)\n }\n\n if (collectTrace) {\n const traceErrorsBasePath = options!.trace!.errorsBasePath\n\n const rowsPatch: ReadonlyArray<unknown> | undefined = Array.isArray(out)\n ? out\n : isPlainObject(out) && Array.isArray((out as any).rows)\n ? ((out as any).rows as ReadonlyArray<unknown>)\n : undefined\n\n const summary = summarizeRuleRows(traceErrorsBasePath, keysFromDeps, scannedRows, rowsPatch)\n const durationMs = Math.max(0, nowMs() - startedAt)\n\n if (!traces) traces = []\n traces.push({\n ruleId: `${entry.fieldPath}#${name}`,\n summary: {\n scannedRows,\n affectedRows: summary.affectedRows,\n changedRows: summary.changedRows,\n setCount: summary.setCount,\n clearedCount: summary.clearedCount,\n durationMs,\n },\n })\n } else if (collectTraceLite) {\n if (!traces) traces = []\n traces.push({\n ruleId: `${entry.fieldPath}#${name}`,\n summary: {\n scannedRows,\n affectedRows: 0,\n changedRows: 0,\n },\n })\n }\n\n if (out === undefined) continue\n\n if (Array.isArray(out)) {\n mergeRows(out)\n continue\n }\n\n if (isPlainObject(out)) {\n const maybeRows = (out as any).rows\n const hasListKey = Object.prototype.hasOwnProperty.call(out, '$list')\n if (hasListKey) touchedListError = true\n const maybeListError = normalizeErrorValue((out as any).$list)\n if (maybeListError !== undefined && listError === undefined) {\n listError = maybeListError\n }\n if (Array.isArray(maybeRows)) {\n mergeRows(maybeRows)\n } else if (!hasListKey && maybeListError === undefined) {\n // Allow list-scope rules to return a `$list` error value (string/object) directly without implying a rows structure.\n const v = normalizeErrorValue(out)\n if (v !== undefined && listError === undefined) listError = v\n touchedListError = true\n }\n continue\n }\n\n // Non object/array: treat as a `$list` error value.\n const v = normalizeErrorValue(out)\n if (v !== undefined && listError === undefined) listError = v\n touchedListError = true\n } catch {\n // Rule runtime error: keep it a no-op to avoid producing a partial error tree.\n // Diagnostics and degrade handling are handled by DebugSink/DevtoolsHub in later phases.\n }\n }\n\n if (!ran) return RULE_SKIP\n return {\n listError,\n rows,\n traces,\n touchedKeys,\n touchedListError,\n }\n}\n\nconst toPatternPath = (path: string): string => {\n if (!path) return path\n const segments = path.split('.').filter(Boolean)\n const out: Array<string> = []\n\n for (const seg of segments) {\n if (/^[0-9]+$/.test(seg)) {\n if (out.length === 0) continue\n const last = out[out.length - 1]!\n if (!last.endsWith('[]')) out[out.length - 1] = `${last}[]`\n continue\n }\n out.push(seg)\n }\n\n return out.join('.')\n}\n\nconst toGraphTargets = (target: ValidateTarget): ReadonlyArray<string> => {\n if (target.kind === 'root') {\n return []\n }\n if (target.kind === 'field') {\n return [toPatternPath(target.path)]\n }\n if (target.kind === 'list') {\n // A list target should hit both list-scope check (fieldPath=listPath) and item-scope check (fieldPath=listPath[]).\n return [target.path, `${target.path}[]`]\n }\n // item\n const base = `${target.path}[]`\n const field = target.field ? toPatternPath(target.field) : undefined\n return [field ? `${base}.${field}` : base]\n}\n\nconst normalizeListIndexPath = (listIndexPath: ReadonlyArray<number> | undefined): ReadonlyArray<number> => {\n if (!Array.isArray(listIndexPath) || listIndexPath.length === 0) return []\n const out: Array<number> = []\n for (const n of listIndexPath) {\n if (!Number.isInteger(n) || n < 0) continue\n out.push(n)\n }\n return out\n}\n\nconst toListInstanceKey = (listPath: string, listIndexPath: ReadonlyArray<number> | undefined): string => {\n const p = normalizeListIndexPath(listIndexPath)\n return p.length === 0 ? `${listPath}@@` : `${listPath}@@${p.join(',')}`\n}\n\nconst extractIndexBindingsAndInstanceIndexPaths = (\n requests: ReadonlyArray<ScopedValidateRequest>,\n): {\n readonly indexBindings: Map<string, ReadonlySet<number>>\n readonly instanceIndexPathByKey: Map<string, ReadonlyArray<number>>\n} => {\n const indexBindings = new Map<string, Set<number>>()\n const instanceIndexPathByKey = new Map<string, ReadonlyArray<number>>()\n\n const ensureInstanceIndexPath = (listPath: string, listIndexPath: ReadonlyArray<number> | undefined): string => {\n const normalized = normalizeListIndexPath(listIndexPath)\n const key = toListInstanceKey(listPath, normalized)\n if (!instanceIndexPathByKey.has(key)) {\n instanceIndexPathByKey.set(key, normalized)\n }\n return key\n }\n\n const addIndexBinding = (listPath: string, listIndexPath: ReadonlyArray<number> | undefined, index: number): void => {\n if (!Number.isInteger(index) || index < 0) return\n const key = ensureInstanceIndexPath(listPath, listIndexPath)\n const set = indexBindings.get(key) ?? new Set<number>()\n set.add(index)\n indexBindings.set(key, set)\n }\n\n for (const req of requests) {\n const target = req.target\n if (target.kind === 'item') {\n addIndexBinding(target.path, target.listIndexPath, target.index)\n continue\n }\n if (target.kind === 'list') {\n // Needed for nested list instance enumeration (listIndexPath comes from the ref).\n ensureInstanceIndexPath(target.path, target.listIndexPath)\n continue\n }\n if (target.kind === 'field') {\n // Best-effort: treat numeric segments as list indices, and derive index bindings for list/item scopes.\n // This enables incremental list-scope rules even when callers validate by concrete valuePath (\"items.10.x\").\n const segments = target.path.split('.').filter(Boolean)\n const listPathSegments: Array<string> = []\n const listIndexPath: Array<number> = []\n for (const seg of segments) {\n if (/^[0-9]+$/.test(seg)) {\n if (listPathSegments.length === 0) continue\n const idx = Number(seg)\n if (!Number.isFinite(idx) || idx < 0) continue\n const n = Math.floor(idx)\n addIndexBinding(listPathSegments.join('.'), listIndexPath, n)\n // Descend into this list item: subsequent nested list bindings should carry this index as parent listIndexPath.\n listIndexPath.push(n)\n continue\n }\n listPathSegments.push(seg)\n }\n continue\n }\n }\n\n const readonlyIndexBindings = new Map<string, ReadonlySet<number>>()\n for (const [k, set] of indexBindings) readonlyIndexBindings.set(k, set)\n\n return {\n indexBindings: readonlyIndexBindings,\n instanceIndexPathByKey,\n }\n}\n\nconst extractListBindings = (\n requests: ReadonlyArray<ScopedValidateRequest>,\n): {\n readonly all: ReadonlySet<string>\n readonly instances: ReadonlySet<string>\n} => {\n const all = new Set<string>()\n const instances = new Set<string>()\n for (const req of requests) {\n if (req.target.kind !== 'list') continue\n if (!req.target.path) continue\n if (req.target.listIndexPath && req.target.listIndexPath.length > 0) {\n instances.add(toListInstanceKey(req.target.path, req.target.listIndexPath))\n continue\n }\n all.add(req.target.path)\n }\n return { all, instances }\n}\n\nconst resolveMode = (requests: ReadonlyArray<ScopedValidateRequest>): ValidateMode => {\n const priorities: Record<ValidateMode, number> = {\n submit: 4,\n blur: 3,\n valueChange: 2,\n manual: 1,\n }\n let best: ValidateMode = 'manual'\n let bestP = priorities[best]\n for (const r of requests) {\n const p = priorities[r.mode]\n if (p > bestP) {\n bestP = p\n best = r.mode\n }\n }\n return best\n}\n\nconst evalCheck = (\n entry: Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }>,\n input: unknown,\n ctx: RuleContext,\n): unknown => {\n const rules = entry.meta.rules as Record<string, any>\n const names = Object.keys(rules).sort()\n const results: Array<unknown> = []\n let ran = false\n\n for (const name of names) {\n const rule = rules[name]\n try {\n const out =\n typeof rule === 'function'\n ? rule(input, ctx)\n : rule && typeof rule === 'object'\n ? rule.validate(input, ctx)\n : undefined\n if (out === RULE_SKIP) continue\n ran = true\n const normalized = normalizeErrorValue(out)\n if (normalized !== undefined) results.push(normalized)\n } catch {\n // Rule runtime error: keep it a no-op to avoid producing a partial error tree.\n // Diagnostics and degrade handling are handled by DebugSink/DevtoolsHub in later phases.\n }\n }\n\n if (!ran) return RULE_SKIP\n return mergeRuleErrors(results)\n}\n\ntype ErrorUpdate = {\n readonly errorPath: string\n readonly prev: unknown\n readonly next: unknown\n readonly stepId: string\n}\n\n/**\n * validateInTransaction:\n * - Execute a batch of scoped validate requests within an already-started StateTransaction.\n * - Compute the minimal check set via ReverseClosure, and write results back to `state.errors.*`.\n * - If no actual error changes occur, do not update the draft (preserve 0-commit semantics).\n */\nexport const validateInTransaction = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: ValidateContext<S>,\n requests: ReadonlyArray<ScopedValidateRequest>,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n const diagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n const enableTrace = diagnosticsLevel !== 'off'\n const enableTraceDetail = diagnosticsLevel === 'full' || diagnosticsLevel === 'sampled'\n const enableTraceLite = diagnosticsLevel === 'light'\n const traceEvents: Array<Debug.Event> | undefined = enableTrace ? [] : undefined\n\n yield* Effect.sync(() => {\n if (requests.length === 0) return\n\n const checks = program.entries.filter(\n (e): e is Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }> => (e as any).kind === 'check',\n )\n if (checks.length === 0) return\n\n const hasRoot = requests.some((r) => r.target.kind === 'root')\n const draft = ctx.getDraft() as any\n\n // Compute check scopes to execute (set of field paths).\n const scopesToValidate = (() => {\n if (hasRoot) {\n return new Set<string>(checks.map((c) => c.fieldPath))\n }\n const graph = buildDependencyGraph(program)\n const set = new Set<string>()\n for (const req of requests) {\n for (const t of toGraphTargets(req.target)) {\n for (const node of reverseClosure(graph, t)) {\n set.add(node)\n }\n }\n }\n return set\n })()\n\n const selectedChecks = checks.filter((c) => scopesToValidate.has(c.fieldPath))\n if (selectedChecks.length === 0) return\n\n const mode = resolveMode(requests)\n\n if (enableTrace && traceEvents) {\n traceEvents.push({\n type: 'trace:trait:validate',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n data: {\n mode,\n requestCount: requests.length,\n selectedCheckCount: selectedChecks.length,\n hasRoot,\n },\n })\n }\n\n // Item-scope bindings: used only for non-root validate (root validate runs full length by current arrays).\n // Includes best-effort extraction from field valuePaths with numeric segments (e.g. \"items.10.x\").\n const extracted = extractIndexBindingsAndInstanceIndexPaths(requests)\n const indexBindings = extracted.indexBindings\n const instanceIndexPathByKey = extracted.instanceIndexPathByKey\n\n const listBindings = extractListBindings(requests)\n const listBindingsAll = listBindings.all\n const listBindingsInstances = listBindings.instances\n\n const updates: Array<ErrorUpdate> = []\n\n const listConfigByPath = (() => {\n const map = new Map<string, RowId.ListConfig>()\n const configs = ctx.listConfigs ?? []\n for (const cfg of configs) {\n if (!cfg || typeof (cfg as any).path !== 'string') continue\n map.set((cfg as any).path, cfg as any)\n }\n return map\n })()\n\n const readTrackBy = (item: unknown, trackBy: string): unknown => {\n if (!item || typeof item !== 'object') return undefined\n const segments = trackBy.split('.')\n let current: any = item\n for (const seg of segments) {\n if (current == null) return undefined\n current = current[seg as any]\n }\n return current\n }\n\n const makeStepId = (fieldPath: string, index?: number): string =>\n index === undefined ? `check:${fieldPath}` : `check:${fieldPath}@${index}`\n\n type ListRuntime = {\n readonly listPath: string\n readonly listIndexPath: ReadonlyArray<number>\n readonly valuePath: string\n readonly errorBasePath: string\n readonly parentRowId?: RowId.RowId\n readonly items: ReadonlyArray<unknown>\n readonly trackBy?: string\n readonly rowIdAt?: (index: number) => string\n }\n\n const listPaths = Array.from(listConfigByPath.keys())\n const listPathSet = new Set(listPaths)\n\n const parentOf = (path: string): string | undefined => {\n const segments = path.split('.').filter(Boolean)\n let best: string | undefined\n for (let i = 1; i < segments.length; i++) {\n const prefix = segments.slice(0, i).join('.')\n if (listPathSet.has(prefix)) best = prefix\n }\n return best\n }\n\n const parentByPath = new Map<string, string | undefined>()\n const suffixByPath = new Map<string, string>()\n for (const path of listPaths) {\n const parent = parentOf(path)\n parentByPath.set(path, parent)\n const suffix = parent ? path.slice(parent.length + 1) : path\n suffixByPath.set(path, suffix)\n }\n\n const normalizeInstanceIndexPath = (\n listPath: string,\n listIndexPath: ReadonlyArray<number> | undefined,\n ): ReadonlyArray<number> | undefined => {\n const normalized = normalizeListIndexPath(listIndexPath)\n let expected = 0\n let p = parentByPath.get(listPath)\n while (p) {\n expected += 1\n p = parentByPath.get(p)\n }\n if (expected === 0) return []\n if (normalized.length !== expected) return undefined\n return normalized\n }\n\n const listRuntimeByKey = new Map<string, ListRuntime>()\n\n const getListRuntime = (listPath: string, listIndexPath: ReadonlyArray<number>): ListRuntime | undefined => {\n const parent = parentByPath.get(listPath)\n const cacheKey = parent ? `${listPath}@@#${listIndexPath.join(',')}` : `${listPath}@@root`\n\n const cached = listRuntimeByKey.get(cacheKey)\n if (cached) return cached\n\n const listCfg = listConfigByPath.get(listPath)\n const trackBy =\n listCfg && typeof (listCfg as any).trackBy === 'string' ? ((listCfg as any).trackBy as string) : undefined\n\n if (!parent) {\n const listValue = getAtPath(draft, listPath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n // Correctness: lists without trackBy must reconcile rowIds on every validate window,\n // otherwise nested list rowId bindings can break under parent reorder.\n const ensuredRowIds: ReadonlyArray<string> | undefined =\n ctx.rowIdStore && !trackBy ? ctx.rowIdStore.ensureList(listPath, items, trackBy) : undefined\n const rowIdAt = (index: number): string => {\n if (index < 0 || index >= items.length) return String(index)\n const item = items[index]\n if (trackBy) {\n const k = readTrackBy(item, trackBy)\n if (k !== undefined) return String(k)\n }\n const fromEnsured = ensuredRowIds?.[index]\n if (typeof fromEnsured === 'string' && fromEnsured.length > 0) return fromEnsured\n const fromStore = ctx.rowIdStore?.getRowId(listPath, index)\n if (typeof fromStore === 'string' && fromStore.length > 0) return fromStore\n return String(index)\n }\n\n const out: ListRuntime = {\n listPath,\n listIndexPath: [],\n valuePath: listPath,\n errorBasePath: `errors.${listPath}`,\n items,\n trackBy,\n rowIdAt,\n }\n listRuntimeByKey.set(cacheKey, out)\n return out\n }\n\n if (listIndexPath.length === 0) return undefined\n const parentIndexPath = listIndexPath.slice(0, -1)\n const parentIndex = listIndexPath[listIndexPath.length - 1]!\n const parentRuntime = getListRuntime(parent, parentIndexPath)\n if (!parentRuntime) return undefined\n if (parentIndex < 0 || parentIndex >= parentRuntime.items.length) return undefined\n\n const suffix = suffixByPath.get(listPath) ?? ''\n if (!suffix) return undefined\n\n const valuePath = `${parentRuntime.valuePath}.${parentIndex}.${suffix}`\n const errorBasePath = `${parentRuntime.errorBasePath}.rows.${parentIndex}.${suffix}`\n\n const listValue = getAtPath(draft, valuePath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n\n const parentRowId =\n (parentRuntime.rowIdAt ? (parentRuntime.rowIdAt(parentIndex) as any) : undefined) ??\n (ctx.rowIdStore ? ctx.rowIdStore.getRowId(parent, parentIndex, parentRuntime.parentRowId) : undefined)\n\n // See root list comment above: ensure mapping for no-trackBy lists to preserve stability under reorder.\n const ensuredRowIds: ReadonlyArray<string> | undefined =\n ctx.rowIdStore && !trackBy ? ctx.rowIdStore.ensureList(listPath, items, trackBy, parentRowId) : undefined\n const rowIdAt = (index: number): string => {\n if (index < 0 || index >= items.length) return String(index)\n const item = items[index]\n if (trackBy) {\n const k = readTrackBy(item, trackBy)\n if (k !== undefined) return String(k)\n }\n const fromEnsured = ensuredRowIds?.[index]\n if (typeof fromEnsured === 'string' && fromEnsured.length > 0) return fromEnsured\n const fromStore = ctx.rowIdStore?.getRowId(listPath, index, parentRowId)\n if (typeof fromStore === 'string' && fromStore.length > 0) return fromStore\n return String(index)\n }\n\n const out: ListRuntime = {\n listPath,\n listIndexPath,\n valuePath,\n errorBasePath,\n parentRowId,\n items,\n trackBy,\n rowIdAt,\n }\n listRuntimeByKey.set(cacheKey, out)\n return out\n }\n\n const enumerateAllListInstances = (listPath: string): ReadonlyArray<ListRuntime> => {\n const parent = parentByPath.get(listPath)\n if (!parent) {\n const rt = getListRuntime(listPath, [])\n return rt ? [rt] : []\n }\n\n const parentInstances = enumerateAllListInstances(parent)\n const out: Array<ListRuntime> = []\n for (const p of parentInstances) {\n for (let i = 0; i < p.items.length; i++) {\n const childIndexPath = [...p.listIndexPath, i]\n const rt = getListRuntime(listPath, childIndexPath)\n if (rt) out.push(rt)\n }\n }\n return out\n }\n\n type RowDraft = {\n readonly listPath: string\n readonly listIndexPath: ReadonlyArray<number>\n readonly parentRowId?: RowId.RowId\n readonly index: number\n readonly errorBasePath: string\n readonly errorPath: string\n readonly prev: unknown\n readonly next: Record<string, unknown>\n readonly stepId: string\n removed?: boolean\n }\n\n const rowDrafts = new Map<string, RowDraft>()\n\n const ROW_DRAFT_PREV_NOT_PROVIDED = Symbol.for('logix.state-trait.validate.rowDraftPrev.notProvided')\n const getOrCreateRowDraft = (\n list: ListRuntime,\n index: number,\n stepId: string,\n prevRow: unknown | typeof ROW_DRAFT_PREV_NOT_PROVIDED = ROW_DRAFT_PREV_NOT_PROVIDED,\n ): RowDraft => {\n const errorPath = `${list.errorBasePath}.rows.${index}`\n const existing = rowDrafts.get(errorPath)\n if (existing) return existing\n\n const prev = prevRow === ROW_DRAFT_PREV_NOT_PROVIDED ? getAtPath(draft, errorPath) : prevRow\n const next: Record<string, unknown> = isPlainObject(prev) ? { ...(prev as any) } : {}\n\n const out: RowDraft = {\n listPath: list.listPath,\n listIndexPath: list.listIndexPath,\n parentRowId: list.parentRowId,\n index,\n errorBasePath: list.errorBasePath,\n errorPath,\n prev,\n next,\n stepId,\n removed: false,\n }\n rowDrafts.set(errorPath, out)\n return out\n }\n\n const applyScopedRowPatch = (\n row: RowDraft,\n keysFromDeps: ReadonlySet<string>,\n patchObj: Record<string, unknown> | undefined,\n ): void => {\n if (keysFromDeps.size === 0) return\n\n const patchKeys = patchObj ? Object.keys(patchObj) : []\n const existingKeys = Object.keys(row.next).filter((k) => k !== '$rowId')\n\n const keysToApply = new Set<string>()\n for (const key of existingKeys) {\n if (keysFromDeps.has(key)) keysToApply.add(key)\n }\n for (const key of patchKeys) {\n if (keysFromDeps.has(key)) keysToApply.add(key)\n }\n\n if (keysToApply.size === 0) return\n\n for (const key of keysToApply) {\n const v = normalizeErrorValue(patchObj?.[key])\n if (v === undefined) {\n delete row.next[key]\n } else {\n row.next[key] = v\n }\n }\n }\n\n for (const check of selectedChecks) {\n const scopeFieldPath = check.fieldPath\n\n // list-scope check: write back into `$list/rows[]` (errors.<listPath>.$list / errors.<listPath>.rows[i].*).\n const listCfg = listConfigByPath.get(scopeFieldPath)\n if (listCfg) {\n const listPath = scopeFieldPath\n const listInstances = (() => {\n if (hasRoot) return enumerateAllListInstances(listPath)\n if (listBindingsAll.has(listPath)) return enumerateAllListInstances(listPath)\n\n const keys = new Set<string>()\n for (const k of listBindingsInstances) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n for (const k of indexBindings.keys()) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n\n if (keys.size === 0) return enumerateAllListInstances(listPath)\n\n const out: Array<ListRuntime> = []\n for (const k of keys) {\n const indexPath = instanceIndexPathByKey.get(k)\n const normalized = normalizeInstanceIndexPath(listPath, indexPath)\n if (!normalized) continue\n const rt = getListRuntime(listPath, normalized)\n if (rt) out.push(rt)\n }\n return out\n })()\n\n for (const listRuntime of listInstances) {\n const items = listRuntime.items\n\n const instanceKey = toListInstanceKey(listPath, listRuntime.listIndexPath)\n\n\t // Best-effort changed indices hint for incremental list-scope rules.\n\t // Priority order:\n\t // 1) request-scoped evidence (item/field validate targets)\n\t // 2) txn evidence (Ref.list(...) -> changedIndices derived from recordPatch(valuePath))\n\t const changedIndices: ReadonlyArray<number> | undefined = (() => {\n\t if (hasRoot) return undefined\n\n\t // When the batch is scoped by item/field targets, prefer request-derived bindings (cheapest + explicit).\n\t if (!listBindingsAll.has(listPath) && !listBindingsInstances.has(instanceKey)) {\n\t const set = indexBindings.get(instanceKey)\n\t if (!set || set.size === 0) return undefined\n\t return Array.from(set).sort((a, b) => a - b)\n\t }\n\n\t // Ref.list(...) / list-scope validate: derive from txn dirty evidence.\n\t const ev = ctx.txnDirtyEvidence\n\t const listEv = ev?.list\n\t if (!ev || ev.dirtyAll || !listEv) return undefined\n\t if (listEv.rootTouched.has(instanceKey)) return undefined\n\t const set = listEv.indexBindings.get(instanceKey)\n\t if (!set || set.size === 0) return undefined\n\t return Array.from(set).sort((a, b) => a - b)\n\t })()\n\n const trigger = enableTrace ? toTraitCheckTrigger(ctx.origin, listPath) : undefined\n\n const scopeFieldPathSegments = enableTrace ? normalizeTraitCheckPath(listPath) : undefined\n\n const rowIdMode = enableTrace\n ? toTraitCheckRowIdMode({\n trackBy: listRuntime.trackBy,\n rowIdStore: ctx.rowIdStore,\n })\n : undefined\n\n const degraded =\n enableTrace && trigger && scopeFieldPathSegments && rowIdMode\n ? toTraitCheckDegraded(trigger, scopeFieldPathSegments, rowIdMode)\n : undefined\n\n const next = evalListScopeCheck(\n check,\n items,\n {\n mode,\n state: draft,\n scope: {\n fieldPath: scopeFieldPath,\n listPath,\n listIndexPath: listRuntime.listIndexPath,\n rowIdAt: listRuntime.rowIdAt,\n changedIndices,\n },\n },\n enableTraceDetail\n ? {\n trace: { listPath, errorsBasePath: listRuntime.errorBasePath, errorsRoot: draft },\n }\n : enableTraceLite\n ? { traceLite: true }\n : undefined,\n )\n if (next === RULE_SKIP) continue\n const keysFromDeps = next.touchedKeys\n\n if (\n enableTrace &&\n traceEvents &&\n trigger &&\n scopeFieldPathSegments &&\n rowIdMode &&\n next.traces &&\n next.traces.length > 0\n ) {\n for (const t of next.traces) {\n const data: any = {\n ruleId: t.ruleId,\n scopeFieldPath: scopeFieldPathSegments,\n mode,\n trigger,\n summary: t.summary,\n rowIdMode,\n }\n if (degraded) {\n data.degraded = degraded\n }\n traceEvents.push({\n type: 'trace:trait:check',\n moduleId: ctx.moduleId,\n instanceId: ctx.instanceId,\n txnSeq: ctx.txnSeq,\n txnId: ctx.txnId,\n data,\n })\n }\n }\n\n const listErrorPath = `${listRuntime.errorBasePath}.$list`\n const prevListError = getAtPath(draft, listErrorPath)\n const nextListError = normalizeErrorValue(next.listError)\n\n if (next.touchedListError && !Object.is(prevListError, nextListError)) {\n updates.push({\n errorPath: listErrorPath,\n prev: prevListError,\n next: nextListError,\n stepId: makeStepId(scopeFieldPath),\n })\n }\n\n const rows = next.rows ?? []\n const prevRowsAny = getAtPath(draft, `${listRuntime.errorBasePath}.rows`)\n const prevRows =\n prevRowsAny && typeof prevRowsAny === 'object' && !Array.isArray(prevRowsAny)\n ? (prevRowsAny as any)\n : Array.isArray(prevRowsAny)\n ? prevRowsAny\n : undefined\n\n const hasRelevantKeys = (obj: Record<string, unknown>, deps: ReadonlySet<string>): boolean => {\n for (const k in obj) {\n if (!Object.prototype.hasOwnProperty.call(obj, k)) continue\n if (k === '$rowId') continue\n if (deps.has(k)) return true\n }\n return false\n }\n\n const isOnlyRowId = (obj: Record<string, unknown>): boolean => {\n let hasRowId = false\n for (const k in obj) {\n if (!Object.prototype.hasOwnProperty.call(obj, k)) continue\n if (k === '$rowId') {\n hasRowId = true\n continue\n }\n return false\n }\n return hasRowId\n }\n\n const rowErrorPrefix = `${listRuntime.errorBasePath}.rows.`\n\n const indicesToVisit = (() => {\n const set = new Set<number>()\n const addIndex = (k: string): void => {\n const i = Number(k)\n if (!Number.isInteger(i) || i < 0) return\n set.add(i)\n }\n\n // Patch indices (sparse by convention).\n if (rows && typeof rows === 'object') {\n for (const k of Object.keys(rows as any)) {\n const v = (rows as any)[k]\n if (v === undefined) continue\n addIndex(k)\n }\n }\n\n // Existing error indices (sparse if we keep delete-holes on clear).\n if (prevRows && typeof prevRows === 'object') {\n for (const k of Object.keys(prevRows as any)) {\n const v = (prevRows as any)[k]\n if (v === undefined) continue\n addIndex(k)\n }\n }\n\n return Array.from(set).sort((a, b) => a - b)\n })()\n\n for (const index of indicesToVisit) {\n const rowErrorPath = `${rowErrorPrefix}${index}`\n const existing = rowDrafts.get(rowErrorPath)\n\n if (index >= items.length) {\n const prevRow =\n existing?.prev ?? (prevRows ? (prevRows as any)[index] : getAtPath(draft, rowErrorPath))\n if (prevRow === undefined && !existing) continue\n const row =\n existing ??\n getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index), prevRow)\n row.removed = true\n for (const key of Object.keys(row.next)) {\n delete row.next[key]\n }\n continue\n }\n\n const patch = (rows as any)[index]\n const patchObj = isPlainObject(patch) ? patch : undefined\n\n if (existing) {\n applyScopedRowPatch(existing, keysFromDeps, patchObj)\n continue\n }\n\n const patchHasRelevant = patchObj && hasRelevantKeys(patchObj as any, keysFromDeps)\n const prevRow = prevRows ? (prevRows as any)[index] : getAtPath(draft, rowErrorPath)\n const prevObj = isPlainObject(prevRow) ? (prevRow as Record<string, unknown>) : undefined\n const prevHasRelevant = prevObj ? hasRelevantKeys(prevObj, keysFromDeps) : false\n const prevOnlyRowId = prevObj ? isOnlyRowId(prevObj) : false\n\n if (patchHasRelevant || prevHasRelevant || prevOnlyRowId) {\n const row = getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index), prevRow)\n applyScopedRowPatch(row, keysFromDeps, patchHasRelevant ? (patchObj as any) : undefined)\n }\n }\n\n continue\n }\n\n continue\n }\n\n // Phase 2: supports list.item scope (\"items[]\" / \"orders.items[]\"), and uses listIndexPath for nested writebacks.\n if (scopeFieldPath.endsWith('[]')) {\n const listPath = scopeFieldPath.slice(0, -2)\n\n const listInstances = (() => {\n if (hasRoot) return enumerateAllListInstances(listPath)\n if (listBindingsAll.has(listPath)) return enumerateAllListInstances(listPath)\n\n const keys = new Set<string>()\n for (const k of listBindingsInstances) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n for (const k of indexBindings.keys()) {\n if (k.startsWith(`${listPath}@@`)) keys.add(k)\n }\n\n if (keys.size === 0) return enumerateAllListInstances(listPath)\n\n const out: Array<ListRuntime> = []\n for (const k of keys) {\n const indexPath = instanceIndexPathByKey.get(k)\n const normalized = normalizeInstanceIndexPath(listPath, indexPath)\n if (!normalized) continue\n const rt = getListRuntime(listPath, normalized)\n if (rt) out.push(rt)\n }\n return out\n })()\n\n const rules = check.meta.rules as Record<string, any>\n const names = Object.keys(rules).sort()\n\n\t for (const listRuntime of listInstances) {\n\t const instanceKey = toListInstanceKey(listPath, listRuntime.listIndexPath)\n\t const indices: ReadonlyArray<number> = (() => {\n\t if (hasRoot) return listRuntime.items.map((_, i) => i)\n\n\t // When the batch is scoped by item/field targets, prefer request-derived bindings.\n\t if (!listBindingsAll.has(listPath) && !listBindingsInstances.has(instanceKey)) {\n\t return Array.from(indexBindings.get(instanceKey) ?? [])\n\t }\n\n\t // Ref.list(...) / list-scope validate: derive from txn dirty evidence.\n\t const ev = ctx.txnDirtyEvidence\n\t const listEv = ev?.list\n\t if (!ev || ev.dirtyAll || !listEv) return listRuntime.items.map((_, i) => i)\n\t if (listEv.rootTouched.has(instanceKey)) return listRuntime.items.map((_, i) => i)\n\t const set = listEv.indexBindings.get(instanceKey)\n\t if (!set || set.size === 0) return []\n\t return Array.from(set).sort((a, b) => a - b)\n\t })()\n\n\t if (indices.length === 0) continue\n\n for (const index of indices) {\n if (index < 0 || index >= listRuntime.items.length) continue\n\n const boundValuePath = `${listRuntime.valuePath}.${index}`\n const input = getAtPath(draft, boundValuePath)\n\n const rowErrorPath = `${listRuntime.errorBasePath}.rows.${index}`\n const prevRow = getAtPath(draft, rowErrorPath)\n const prevObj = isPlainObject(prevRow) ? (prevRow as Record<string, unknown>) : undefined\n const prevOnlyRowId =\n isPlainObject(prevRow) && Object.keys(prevRow).length === 1 && Object.keys(prevRow)[0] === '$rowId'\n\n let rowDraft: RowDraft | undefined = undefined\n let lockedKeys: Set<string> | undefined = undefined\n\n const ctxForRule: RuleContext = {\n mode,\n state: draft,\n scope: { fieldPath: scopeFieldPath, listPath, listIndexPath: listRuntime.listIndexPath, index },\n }\n\n const ensureRowDraft = (): RowDraft => {\n if (rowDraft) return rowDraft\n rowDraft = getOrCreateRowDraft(listRuntime, index, makeStepId(scopeFieldPath, index))\n return rowDraft\n }\n\n for (const name of names) {\n const rule = rules[name]\n try {\n const out =\n typeof rule === 'function'\n ? rule(input, ctxForRule)\n : rule && typeof rule === 'object'\n ? rule.validate(input, ctxForRule)\n : undefined\n\n if (out === RULE_SKIP) continue\n\n const keys = collectRuleKeysFromDeps(rule, listPath)\n if (keys.length === 0) continue\n\n const patchObj = isPlainObject(out) ? (out as Record<string, unknown>) : undefined\n const patchHasRelevant =\n patchObj && Object.keys(patchObj).some((k) => k !== '$rowId' && keys.includes(k))\n const prevHasRelevant =\n prevObj && Object.keys(prevObj).some((k) => k !== '$rowId' && keys.includes(k))\n\n if (!rowDraft && !patchHasRelevant && !prevHasRelevant && !prevOnlyRowId) {\n continue\n }\n\n const row = ensureRowDraft()\n for (const key of keys) {\n if (key === '$rowId') continue\n if (lockedKeys?.has(key)) continue\n const v = normalizeErrorValue(patchObj?.[key])\n if (v === undefined) {\n delete row.next[key]\n } else {\n row.next[key] = v\n if (!lockedKeys) lockedKeys = new Set<string>()\n lockedKeys.add(key)\n }\n }\n } catch {\n // Rule runtime error: keep it a no-op to avoid producing a partial error tree.\n // Diagnostics and degrade handling are handled by DebugSink/DevtoolsHub in later phases.\n }\n }\n }\n }\n\n continue\n }\n\n const input = scopeFieldPath === '$root' ? draft : getAtPath(draft, scopeFieldPath)\n\n const nextError = evalCheck(check, input, {\n mode,\n state: draft,\n scope: { fieldPath: scopeFieldPath },\n })\n if (nextError === RULE_SKIP) continue\n\n const writebackPath = (() => {\n const wb = (check as any)?.meta?.writeback\n const p = wb && typeof wb === 'object' ? (wb as any).path : undefined\n return typeof p === 'string' && p.startsWith('errors.') ? p : undefined\n })()\n\n const errorPath = writebackPath ?? `errors.${scopeFieldPath}`\n const prev = getAtPath(draft, errorPath)\n\n if (!Object.is(prev, nextError)) {\n updates.push({\n errorPath,\n prev,\n next: nextError,\n stepId: makeStepId(scopeFieldPath),\n })\n }\n }\n\n for (const row of rowDrafts.values()) {\n const prevRow = row.prev\n\n const nextRow = (() => {\n if (row.removed) return undefined\n\n delete row.next.$rowId\n const errorKeys = Object.keys(row.next).filter((k) => k !== '$rowId')\n if (errorKeys.length === 0) return undefined\n\n const listRuntime = getListRuntime(row.listPath, row.listIndexPath)\n const item = listRuntime?.items[row.index]\n const rowId = (() => {\n const fromRuntime = listRuntime?.rowIdAt ? listRuntime.rowIdAt(row.index) : undefined\n if (typeof fromRuntime === 'string' && fromRuntime.length > 0) return fromRuntime\n const fromStore = ctx.rowIdStore?.getRowId(row.listPath, row.index, row.parentRowId)\n if (typeof fromStore === 'string' && fromStore.length > 0) return fromStore\n return String(row.index)\n })()\n\n const nextRowRaw: Record<string, unknown> = { $rowId: rowId, ...row.next }\n return isPlainObject(prevRow) && shallowEqualPlainObject(prevRow, nextRowRaw) ? prevRow : nextRowRaw\n })()\n\n if (!Object.is(prevRow, nextRow)) {\n updates.push({\n errorPath: row.errorPath,\n prev: prevRow,\n next: nextRow,\n stepId: row.stepId,\n })\n }\n }\n\n if (updates.length === 0) {\n return\n }\n\n const reason: PatchReason = 'unknown'\n\n const prevFormErrorCount =\n draft &&\n typeof draft === 'object' &&\n (draft as any).$form &&\n typeof (draft as any).$form === 'object' &&\n !Array.isArray((draft as any).$form) &&\n typeof (draft as any).$form.errorCount === 'number'\n ? ((draft as any).$form.errorCount as number)\n : undefined\n\n const errorCountDelta =\n prevFormErrorCount === undefined\n ? 0\n : updates.reduce((acc, u) => acc + (countErrorLeaves(u.next) - countErrorLeaves(u.prev)), 0)\n\n const nextState = create(draft, (nextDraft) => {\n for (const u of updates) {\n if (u.next === undefined) {\n unsetAtPathMutating(nextDraft, u.errorPath)\n } else {\n setAtPathMutating(nextDraft, u.errorPath, u.next)\n }\n }\n\n if (prevFormErrorCount !== undefined && errorCountDelta !== 0) {\n const meta = nextDraft.$form\n if (meta && typeof meta === 'object' && !Array.isArray(meta)) {\n meta.errorCount = Math.max(0, prevFormErrorCount + errorCountDelta)\n }\n }\n }) as unknown as S\n\n ctx.setDraft(nextState)\n\n // Patch evidence for dirty-set scheduling:\n // - Nested `errors.*` paths are dynamic (Schema.Any) and are typically not present in FieldPathIdRegistry.\n // - Recording them verbatim would degrade the whole txn to dirtyAll (fallbackPolicy), which is both noisy and expensive.\n // - For derived error writebacks, recording the canonical root `errors` anchor is sufficient.\n const prevErrorsRoot = (draft as any)?.errors\n const nextErrorsRoot = (nextState as any)?.errors\n ctx.recordPatch(['errors'], reason, prevErrorsRoot, nextErrorsRoot, 'trait:validate')\n })\n\n if (traceEvents && traceEvents.length > 0) {\n yield* Effect.forEach(traceEvents, (event) => Debug.record(event), {\n discard: true,\n })\n }\n })\n","import { Effect, Fiber, Option } from 'effect'\nimport { create } from 'mutative'\nimport * as EffectOp from '../effect-op.js'\nimport { Snapshot, internal as ResourceInternal, keyHash as hashKey } from '../resource.js'\nimport * as EffectOpCore from '../runtime/core/EffectOpCore.js'\nimport * as Debug from '../runtime/core/DebugSink.js'\nimport * as TaskRunner from '../runtime/core/TaskRunner.js'\nimport { isDevEnv, ReplayModeConfigTag } from '../runtime/core/env.js'\nimport * as ReplayLog from '../runtime/core/ReplayLog.js'\nimport type { PatchReason } from '../runtime/core/StateTransaction.js'\nimport type { FieldPath, FieldPathId } from '../field-path.js'\nimport { normalizeFieldPath } from '../field-path.js'\nimport type { BoundApi } from '../runtime/core/module.js'\nimport { getBoundInternals } from '../runtime/core/runtimeInternalsAccessor.js'\nimport { RunSessionTag } from '../observability/runSession.js'\nimport type { RunSession } from '../observability/runSession.js'\nimport * as DepsTrace from './deps-trace.js'\nimport * as RowId from './rowid.js'\nimport type { StateTraitEntry, StateTraitPlanStep, StateTraitProgram } from './model.js'\nimport type { ServiceMap } from 'effect'\n\nexport interface SourceSyncContext<S> {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly getDraft: () => S\n readonly setDraft: (next: S) => void\n readonly recordPatch: (\n path: string | FieldPath | FieldPathId | undefined,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n stepId?: number,\n ) => void\n}\n\nconst onceInRunSession = (key: string): Effect.Effect<boolean, never, any> =>\n Effect.serviceOption(RunSessionTag as unknown as ServiceMap.Key<any, RunSession>).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.local.once(key) : true)),\n )\n\nconst formatList = (items: ReadonlyArray<string>, limit = 10): string => {\n if (items.length === 0) return ''\n if (items.length <= limit) return items.join(', ')\n return `${items.slice(0, limit).join(', ')}, …(+${items.length - limit})`\n}\n\nconst emitDepsMismatch = (params: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'computed' | 'source'\n readonly fieldPath: string\n readonly diff: DepsTrace.DepsDiff\n}): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const key = `${params.instanceId ?? 'unknown'}::${params.kind}::${params.fieldPath}`\n const shouldEmit = yield* onceInRunSession(`deps_mismatch:${key}`)\n if (!shouldEmit) return\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: params.moduleId,\n instanceId: params.instanceId,\n code: 'state_trait::deps_mismatch',\n severity: 'warning',\n message:\n `[deps] ${params.kind} \"${params.fieldPath}\" declared=[${formatList(params.diff.declared)}] ` +\n `reads=[${formatList(params.diff.reads)}] missing=[${formatList(params.diff.missing)}] ` +\n `unused=[${formatList(params.diff.unused)}]`,\n hint:\n 'deps is the single source of truth for dependencies: incremental scheduling / reverse closures / performance optimizations rely on deps only. ' +\n 'Keep deps consistent with actual reads; if you really depend on the whole object, declare a coarser-grained dep (e.g. \"profile\") to cover sub-field reads.',\n kind: `deps_mismatch:${params.kind}`,\n })\n })\n\nconst getMiddlewareStack = (): Effect.Effect<EffectOpCore.EffectOpMiddlewareEnv['stack'], never, any> =>\n Effect.serviceOption(\n EffectOpCore.EffectOpMiddlewareTag as unknown as ServiceMap.Key<any, EffectOpCore.EffectOpMiddlewareEnv>,\n ).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\nconst recordTraitPatch = (\n bound: BoundApi<any, any>,\n path: string,\n reason: PatchReason,\n from?: unknown,\n to?: unknown,\n traitNodeId?: string,\n): void => {\n const normalized = normalizeFieldPath(path) ?? []\n try {\n const internals = getBoundInternals(bound as any)\n internals.txn.recordStatePatch(normalized, reason, from, to, traitNodeId)\n } catch {\n // no-op for legacy/mocked bound\n }\n}\n\nconst recordReplayEvent = (bound: BoundApi<any, any>, event: ReplayLog.ReplayLogEvent): void => {\n try {\n const internals = getBoundInternals(bound as any)\n internals.txn.recordReplayEvent(event)\n } catch {\n // no-op for legacy/mocked bound\n }\n}\n\nconst getBoundScope = (bound: BoundApi<any, any>): { readonly moduleId?: string; readonly instanceId?: string } => {\n try {\n const internals = getBoundInternals(bound as any)\n return { moduleId: internals.moduleId, instanceId: internals.instanceId }\n } catch {\n return { moduleId: undefined, instanceId: undefined }\n }\n}\n\nconst setSnapshotInTxn = (\n bound: BoundApi<any, any>,\n fieldPath: string,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n traitNodeId?: string,\n): Effect.Effect<boolean, never, any> =>\n Effect.gen(function* () {\n let wrote = false\n yield* bound.state.mutate((draft) => {\n const prev = RowId.getAtPath(draft, fieldPath)\n if (Object.is(prev, next)) return\n wrote = true\n RowId.setAtPathMutating(draft, fieldPath, next)\n recordTraitPatch(bound, fieldPath, reason, prev, next, traitNodeId)\n })\n return wrote\n })\n\nconst writebackIfCurrentKeyHash = (\n bound: BoundApi<any, any>,\n fieldPath: string,\n keyHash: string,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n traitNodeId?: string,\n replayEvent?: ReplayLog.ReplayLogEvent,\n): Effect.Effect<boolean, never, any> =>\n Effect.gen(function* () {\n let wrote = false\n yield* bound.state.mutate((draft) => {\n const current = RowId.getAtPath(draft, fieldPath)\n const currentKeyHash = current && typeof current === 'object' ? (current as any).keyHash : undefined\n if (currentKeyHash !== keyHash) return\n\n const prev = current\n if (Object.is(prev, next)) return\n\n wrote = true\n RowId.setAtPathMutating(draft, fieldPath, next)\n if (replayEvent) {\n recordReplayEvent(bound, replayEvent)\n }\n recordTraitPatch(bound, fieldPath, reason, prev, next, traitNodeId)\n })\n return wrote\n })\n\n/**\n * syncIdleInTransaction:\n * - Synchronously evaluate all source.key(state) within the transaction window.\n * - If a key becomes empty (undefined), synchronously reset the field to an idle snapshot (avoid tearing).\n */\nexport const syncIdleInTransaction = <S extends object>(\n program: StateTraitProgram<S>,\n ctx: SourceSyncContext<S>,\n): Effect.Effect<void> =>\n Effect.sync(() => {\n const draft = ctx.getDraft() as any\n const updates: Array<{ readonly fieldPath: string; readonly prev: unknown }> = []\n\n for (const entry of program.entries) {\n if (entry.kind !== 'source') continue\n const fieldPath = entry.fieldPath\n const listItem = RowId.parseListItemFieldPath(fieldPath)\n\n if (listItem) {\n // list.item scope: evaluate key per row by index, and synchronously write back idle for inactive rows.\n const listValue = RowId.getAtPath(draft, listItem.listPath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n\n for (let index = 0; index < items.length; index++) {\n const item = items[index]\n\n let key: unknown\n try {\n key = (entry.meta as any).key(item)\n } catch {\n continue\n }\n\n if (key !== undefined) continue\n\n const concretePath = RowId.toListItemValuePath(listItem.listPath, index, listItem.itemPath)\n const prev = RowId.getAtPath(draft, concretePath)\n const prevStatus = prev && typeof prev === 'object' ? (prev as any).status : undefined\n if (prevStatus === 'idle') {\n const data = (prev as any)?.data\n const error = (prev as any)?.error\n if (data === undefined && error === undefined) {\n continue\n }\n }\n\n updates.push({ fieldPath: concretePath, prev })\n }\n\n continue\n }\n\n let key: unknown\n try {\n key = (entry.meta as any).key(draft)\n } catch {\n continue\n }\n\n if (key !== undefined) continue\n\n const prev = RowId.getAtPath(draft, fieldPath)\n const prevStatus = prev && typeof prev === 'object' ? (prev as any).status : undefined\n if (prevStatus === 'idle') {\n // Still ensure data/error are cleared.\n const data = (prev as any)?.data\n const error = (prev as any)?.error\n if (data === undefined && error === undefined) {\n continue\n }\n }\n\n updates.push({ fieldPath, prev })\n }\n\n if (updates.length === 0) return\n\n const reason: PatchReason = 'source-refresh'\n\n const nextDraft = create(draft, (next) => {\n for (const u of updates) {\n RowId.setAtPathMutating(next, u.fieldPath, Snapshot.idle())\n }\n })\n\n ctx.setDraft(nextDraft as S)\n\n for (const u of updates) {\n const normalized = normalizeFieldPath(u.fieldPath) ?? []\n ctx.recordPatch(normalized, reason, u.prev, Snapshot.idle(), `source:${u.fieldPath}:idle`)\n }\n })\n\n/**\n * installSourceRefresh:\n * - Register the refresh implementation for a single source field (ResourceSnapshot + keyHash gate + concurrency).\n */\nexport const installSourceRefresh = <S>(\n bound: BoundApi<any, any>,\n step: StateTraitPlanStep,\n entry: Extract<StateTraitEntry<S, string>, { readonly kind: 'source' }>,\n): Effect.Effect<void, never, any> => {\n if (!step.targetFieldPath) return Effect.void\n\n const fieldPath = step.targetFieldPath\n const resourceId = step.resourceId ?? entry.meta.resource\n const listItem = RowId.parseListItemFieldPath(fieldPath)\n\n let internals: ReturnType<typeof getBoundInternals> | undefined\n try {\n internals = getBoundInternals(bound as any)\n } catch {\n return Effect.void\n }\n\n const register = internals.traits.registerSourceRefresh\n\n const recordSnapshot = (\n replayMode: 'live' | 'replay' | 'record',\n replayLog: ReplayLog.ReplayLogService | undefined,\n input:\n | ReplayLog.ReplayLogEvent\n | {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly concurrency?: string\n readonly phase: ReplayLog.ResourceSnapshotPhase\n readonly snapshot: unknown\n },\n ): Effect.Effect<void, never, any> => {\n if (!replayLog) return Effect.void\n if (replayMode !== 'live') return Effect.void\n const event: ReplayLog.ReplayLogEvent =\n input && typeof input === 'object' && (input as any)._tag === 'ResourceSnapshot'\n ? (input as ReplayLog.ReplayLogEvent)\n : {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: (input as any).fieldPath,\n keyHash: (input as any).keyHash,\n concurrency: (input as any).concurrency,\n phase: (input as any).phase,\n snapshot: (input as any).snapshot,\n timestamp: Date.now(),\n moduleId: (input as any).moduleId,\n instanceId: (input as any).instanceId,\n }\n return replayLog.record(event)\n }\n\n // list.item scope: in-flight gating by RowID (avoid writing to the wrong row under insert/remove/reorder).\n if (listItem) {\n const store = internals.traits.rowIdStore as RowId.RowIdStore | undefined\n if (!store) {\n return Effect.void\n }\n\n const listPath = listItem.listPath\n const itemPath = listItem.itemPath\n if (!itemPath) {\n // Never write the snapshot back to the whole item (it would overwrite business values).\n return Effect.void\n }\n\n const concurrency = (entry.meta as any).concurrency as 'switch' | 'exhaust-trailing' | undefined\n const mode = concurrency ?? 'switch'\n\n const inFlight = new Map<\n RowId.RowId,\n {\n readonly gen: number\n readonly fiber: Fiber.Fiber<void, never>\n readonly keyHash: string\n }\n >()\n const trailing = new Map<RowId.RowId, { readonly key: unknown; readonly keyHash: string }>()\n let gen = 0\n\n // When a row is removed: clear trailing/inFlight references to avoid wrong attribution or memory leaks.\n store.onRemoved(listPath, (rowId) => {\n trailing.delete(rowId)\n inFlight.delete(rowId)\n })\n\n const setSnapshotForRowInTxn = (\n rowId: RowId.RowId,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n ): Effect.Effect<string | undefined, never, any> =>\n Effect.gen(function* () {\n let wrotePath: string | undefined\n yield* bound.state.mutate((draft) => {\n const index = store.getIndex(listPath, rowId)\n if (index === undefined) return\n const concretePath = RowId.toListItemValuePath(listPath, index, itemPath)\n const prev = RowId.getAtPath(draft, concretePath)\n if (Object.is(prev, next)) return\n wrotePath = concretePath\n RowId.setAtPathMutating(draft, concretePath, next)\n recordTraitPatch(bound, concretePath, reason, prev, next, step.debugInfo?.graphNodeId)\n })\n return wrotePath\n })\n\n const writebackIfCurrentKeyHashForRow = (\n rowId: RowId.RowId,\n keyHash: string,\n next: unknown,\n reason: PatchReason,\n stepId: string,\n phase?: ReplayLog.ResourceSnapshotPhase,\n ): Effect.Effect<string | undefined, never, any> =>\n Effect.gen(function* () {\n let wrotePath: string | undefined\n yield* bound.state.mutate((draft) => {\n const index = store.getIndex(listPath, rowId)\n if (index === undefined) return\n const concretePath = RowId.toListItemValuePath(listPath, index, itemPath)\n\n const current = RowId.getAtPath(draft, concretePath)\n const currentKeyHash = current && typeof current === 'object' ? (current as any).keyHash : undefined\n if (currentKeyHash !== keyHash) return\n\n const prev = current\n if (Object.is(prev, next)) return\n\n wrotePath = concretePath\n RowId.setAtPathMutating(draft, concretePath, next)\n if (phase) {\n const { moduleId, instanceId } = getBoundScope(bound)\n recordReplayEvent(bound, {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: concretePath,\n keyHash,\n concurrency: mode,\n phase,\n snapshot: next,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n })\n }\n recordTraitPatch(bound, concretePath, reason, prev, next, step.debugInfo?.graphNodeId)\n })\n return wrotePath\n })\n\n const startFetch = (\n rowId: RowId.RowId,\n key: unknown,\n keyHash: string,\n replayMode: 'live' | 'replay' | 'record',\n replayLog: ReplayLog.ReplayLogService | undefined,\n ): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n\n const indexForLog = store.getIndex(listPath, rowId)\n const logFieldPath =\n indexForLog === undefined ? undefined : RowId.toListItemValuePath(listPath, indexForLog, itemPath)\n\n let loadingSnapshot: unknown = Snapshot.loading({ keyHash })\n if (replayMode === 'replay' && replayLog && logFieldPath) {\n const replayLoading = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath: logFieldPath,\n keyHash,\n phase: 'loading',\n })\n if (replayLoading) {\n loadingSnapshot = replayLoading.snapshot\n }\n }\n const wroteLoadingPath = yield* setSnapshotForRowInTxn(\n rowId,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:loading`,\n )\n if (wroteLoadingPath) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteLoadingPath,\n keyHash,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n\n const io = Effect.gen(function* () {\n if (replayMode === 'replay' && replayLog) {\n // Let loading commit become visible first, then replay the settled phase (preserve the async-resource timeline shape).\n yield* Effect.yieldNow\n const consumePath = wroteLoadingPath ?? logFieldPath\n if (!consumePath) return yield* Effect.void\n\n const replayed = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath: consumePath,\n keyHash,\n })\n if (!replayed) return yield* Effect.void\n\n if (replayed.phase === 'success') {\n yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:success`,\n 'success',\n )\n } else if (replayed.phase === 'error') {\n yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:error`,\n 'error',\n )\n }\n\n return yield* Effect.void\n }\n\n const stack = yield* getMiddlewareStack()\n\n const registryOpt = yield* Effect.serviceOption(\n ResourceInternal.ResourceRegistryTag as unknown as ServiceMap.Key<any, { specs: Map<string, { load: (key: unknown) => Effect.Effect<any, any, any> }> }>,\n )\n const registry = Option.isSome(registryOpt) ? registryOpt.value : undefined\n const spec = registry?.specs.get(resourceId)\n\n if (!spec) {\n return yield* Effect.void\n }\n\n const loadEffect = (spec.load as any)(key) as Effect.Effect<any, any, any>\n\n const meta: any = {\n moduleId,\n instanceId,\n fieldPath,\n resourceId,\n key,\n keyHash,\n rowId,\n traitNodeId: step.debugInfo?.graphNodeId,\n stepId: step.id,\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag as unknown as ServiceMap.Key<any, RunSession>)\n if (Option.isSome(sessionOpt)) {\n const seqKey = instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', seqKey)\n }\n }\n\n const op = EffectOp.make<any, any, any>({\n kind: 'service',\n name: resourceId,\n effect: loadEffect,\n meta,\n })\n\n const exit = yield* Effect.exit(EffectOp.run(op, stack))\n\n if (exit._tag === 'Success') {\n const successSnapshot = Snapshot.success({ keyHash, data: exit.value })\n const wroteSuccessPath = yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n successSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:success`,\n 'success',\n )\n if (wroteSuccessPath) {\n yield* recordSnapshot(replayMode, replayLog, {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteSuccessPath,\n keyHash,\n concurrency: mode,\n phase: 'success',\n snapshot: successSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n })\n }\n } else {\n const errorSnapshot = Snapshot.error({ keyHash, error: exit.cause })\n const wroteErrorPath = yield* writebackIfCurrentKeyHashForRow(\n rowId,\n keyHash,\n errorSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:error`,\n 'error',\n )\n if (wroteErrorPath) {\n yield* recordSnapshot(replayMode, replayLog, {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteErrorPath,\n keyHash,\n concurrency: mode,\n phase: 'error',\n snapshot: errorSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n })\n }\n }\n }).pipe(Effect.catchCause(() => Effect.void))\n\n // list.item: IO fibers must detach from the sync-transaction FiberRef; otherwise they'd be misclassified as \"in txn window\"\n // and block subsequent writeback entrypoints.\n const fiber = yield* Effect.forkScoped(Effect.provideService(io, TaskRunner.inSyncTransactionFiber, false))\n const myGen = (gen += 1)\n inFlight.set(rowId, { gen: myGen, fiber, keyHash })\n\n yield* Effect.forkScoped(\n Effect.provideService(Fiber.await(fiber).pipe(\n Effect.flatMap(() => Effect.sync(() => {\n const current = inFlight.get(rowId)\n if (current && current.gen === myGen) {\n inFlight.delete(rowId)\n }\n })),\n Effect.flatMap(() => mode === 'exhaust-trailing'\n ? Effect.gen(function* () {\n const next = trailing.get(rowId)\n trailing.delete(rowId)\n if (next) {\n yield* startFetch(rowId, next.key, next.keyHash, replayMode, replayLog)\n }\n })\n : Effect.void),\n Effect.catchCause(() => Effect.void),\n ), TaskRunner.inSyncTransactionFiber, false),\n )\n })\n\n register(fieldPath, (state: any) =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n const replayModeOpt = yield* Effect.serviceOption(\n ReplayModeConfigTag as unknown as ServiceMap.Key<any, { mode: 'live' | 'record' | 'replay' }>,\n )\n const replayMode = Option.isSome(replayModeOpt) ? replayModeOpt.value.mode : 'live'\n const replayLogOpt = yield* Effect.serviceOption(ReplayLog.ReplayLog as unknown as ServiceMap.Key<any, ReplayLog.ReplayLogService>)\n const replayLog = Option.isSome(replayLogOpt) ? replayLogOpt.value : undefined\n const force = yield* Effect.service(TaskRunner.forceSourceRefresh).pipe(Effect.orDie)\n\n const listValue = RowId.getAtPath(state, listPath)\n const items: ReadonlyArray<unknown> = Array.isArray(listValue) ? listValue : []\n const ids = store.ensureList(listPath, items)\n\n // dev-mode: trace deps once for the first row (diagnostics only; does not affect execution semantics).\n const traceKey = `${instanceId ?? 'unknown'}::source::${fieldPath}`\n if (isDevEnv() && (yield* onceInRunSession(`deps_trace_settled:${traceKey}`))) {\n try {\n const sample = items[0]\n if (sample !== undefined) {\n const traced = DepsTrace.trace((s) => (entry.meta as any).key(s), sample as any)\n const prefixedReads = traced.reads.map((r) => (r ? `${listPath}[].${r}` : `${listPath}[]`))\n const diff = DepsTrace.diffDeps(((entry.meta as any).deps ?? []) as ReadonlyArray<string>, prefixedReads)\n if (diff) {\n yield* emitDepsMismatch({\n moduleId,\n instanceId,\n kind: 'source',\n fieldPath,\n diff,\n })\n }\n }\n } catch {\n // tracing failure should never break refresh flow\n }\n }\n\n for (let index = 0; index < items.length; index++) {\n const rowId = ids[index]\n if (!rowId) continue\n\n const concretePath = RowId.toListItemValuePath(listPath, index, itemPath)\n const prevSnapshot = RowId.getAtPath(state, concretePath) as any\n\n let key: unknown\n try {\n key = (entry.meta as any).key(items[index])\n } catch {\n key = undefined\n }\n\n const current = inFlight.get(rowId)\n\n if (key === undefined) {\n trailing.delete(rowId)\n inFlight.delete(rowId)\n\n // If it's already clean idle, avoid redundant writeback (prevents meaningless patches and UI jitter).\n if (\n prevSnapshot &&\n typeof prevSnapshot === 'object' &&\n prevSnapshot.status === 'idle' &&\n prevSnapshot.data === undefined &&\n prevSnapshot.error === undefined\n ) {\n continue\n }\n\n const idleSnapshot = Snapshot.idle()\n const wroteIdlePath = yield* setSnapshotForRowInTxn(\n rowId,\n idleSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:idle`,\n )\n if (wroteIdlePath) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteIdlePath,\n keyHash: undefined,\n concurrency: mode,\n phase: 'idle',\n snapshot: idleSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n continue\n }\n\n const h = hashKey(key)\n\n // keyHash unchanged: avoid redundant refresh while keeping in-flight.\n if (!force && current && current.keyHash === h) {\n continue\n }\n\n // Not in-flight: if snapshot.keyHash already matches, treat it as already up-to-date (avoid full refresh and row jitter).\n const prevKeyHash =\n prevSnapshot && typeof prevSnapshot === 'object' ? (prevSnapshot as any).keyHash : undefined\n if (!force && !current && prevKeyHash === h) {\n continue\n }\n\n if (mode === 'exhaust-trailing' && current) {\n trailing.set(rowId, { key, keyHash: h })\n const loadingSnapshot = Snapshot.loading({ keyHash: h })\n const wroteLoadingPath = yield* setSnapshotForRowInTxn(\n rowId,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:${rowId}:loading`,\n )\n if (wroteLoadingPath) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath: wroteLoadingPath,\n keyHash: h,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n continue\n }\n\n if (mode === 'switch' && current) {\n // Do not rely on cancellation correctness: stale writebacks are dropped by the keyHash gate.\n trailing.delete(rowId)\n inFlight.delete(rowId)\n }\n\n yield* startFetch(rowId, key, h, replayMode, replayLog)\n }\n }),\n )\n\n return Effect.void\n }\n\n // in-flight state (per field)\n let inFlight:\n | {\n readonly gen: number\n readonly fiber: Fiber.Fiber<void, never>\n readonly keyHash: string\n }\n | undefined\n let gen = 0\n let trailing: { readonly key: unknown; readonly keyHash: string } | undefined\n\n const concurrency = (entry.meta as any).concurrency as 'switch' | 'exhaust-trailing' | undefined\n const mode = concurrency ?? 'switch'\n\n const startFetch = (\n key: unknown,\n keyHash: string,\n replayMode: 'live' | 'replay' | 'record',\n replayLog: ReplayLog.ReplayLogService | undefined,\n ): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n\n // 1) pending: synchronously write a loading snapshot (within the current transaction window).\n let loadingSnapshot: unknown = Snapshot.loading({ keyHash })\n if (replayMode === 'replay' && replayLog) {\n const replayLoading = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath,\n keyHash,\n phase: 'loading',\n })\n if (replayLoading) {\n loadingSnapshot = replayLoading.snapshot\n }\n }\n const wroteLoading = yield* setSnapshotInTxn(\n bound,\n fieldPath,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:loading`,\n step.debugInfo?.graphNodeId,\n )\n if (wroteLoading) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n\n // 2) IO: run in a background fiber (avoid blocking the current transaction).\n const io = Effect.gen(function* () {\n if (replayMode === 'replay' && replayLog) {\n // Let loading commit become visible first, then replay the settled phase (preserve the async-resource timeline shape).\n yield* Effect.yieldNow\n const replayed = yield* replayLog.consumeNextResourceSnapshot({\n resourceId,\n fieldPath,\n keyHash,\n })\n if (!replayed) return yield* Effect.void\n\n if (replayed.phase === 'success') {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'success',\n snapshot: replayed.snapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:success`,\n step.debugInfo?.graphNodeId,\n event,\n )\n } else if (replayed.phase === 'error') {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'error',\n snapshot: replayed.snapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n replayed.snapshot,\n 'source-refresh',\n `source:${fieldPath}:error`,\n step.debugInfo?.graphNodeId,\n event,\n )\n }\n\n return yield* Effect.void\n }\n\n const stack = yield* getMiddlewareStack()\n\n const registryOpt = yield* Effect.serviceOption(ResourceInternal.ResourceRegistryTag)\n const registry = Option.isSome(registryOpt) ? registryOpt.value : undefined\n const spec = registry?.specs.get(resourceId)\n\n if (!spec) {\n return yield* Effect.void\n }\n\n const loadEffect = (spec.load as any)(key) as Effect.Effect<any, any, any>\n\n const meta: any = {\n moduleId,\n instanceId,\n fieldPath,\n resourceId,\n key,\n keyHash,\n traitNodeId: step.debugInfo?.graphNodeId,\n stepId: step.id,\n }\n\n if (!(typeof meta.opSeq === 'number' && Number.isFinite(meta.opSeq))) {\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n if (Option.isSome(sessionOpt)) {\n const seqKey = instanceId ?? 'global'\n meta.opSeq = sessionOpt.value.local.nextSeq('opSeq', seqKey)\n }\n }\n\n const op = EffectOp.make<any, any, any>({\n kind: 'trait-source',\n name: resourceId,\n effect: loadEffect,\n meta,\n })\n\n const exit = yield* Effect.exit(EffectOp.run(op, stack))\n\n // 3) writeback: use a keyHash gate to prevent stale results from writing back onto a new key.\n if (exit._tag === 'Success') {\n const successSnapshot = Snapshot.success({ keyHash, data: exit.value })\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'success',\n snapshot: successSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n const wroteSuccess = yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n successSnapshot,\n 'source-refresh',\n `source:${fieldPath}:success`,\n step.debugInfo?.graphNodeId,\n event,\n )\n if (wroteSuccess) {\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n } else {\n const errorSnapshot = Snapshot.error({ keyHash, error: exit.cause })\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash,\n concurrency: mode,\n phase: 'error',\n snapshot: errorSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n const wroteError = yield* writebackIfCurrentKeyHash(\n bound,\n fieldPath,\n keyHash,\n errorSnapshot,\n 'source-refresh',\n `source:${fieldPath}:error`,\n step.debugInfo?.graphNodeId,\n event,\n )\n if (wroteError) {\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n }\n }).pipe(Effect.catchCause(() => Effect.void))\n\n // Do not wait for IO completion: forkScoped into the runtime scope so unmount will interrupt automatically.\n const fiber = yield* Effect.forkScoped(Effect.provideService(io, TaskRunner.inSyncTransactionFiber, false))\n const myGen = (gen += 1)\n inFlight = { gen: myGen, fiber, keyHash }\n\n // After in-flight completes, clean up; in exhaust-trailing mode, run one trailing fetch if present.\n yield* Effect.forkScoped(\n Effect.provideService(Fiber.await(fiber).pipe(\n Effect.flatMap(() => Effect.sync(() => {\n if (inFlight && inFlight.gen === myGen) {\n inFlight = undefined\n }\n })),\n Effect.flatMap(() => mode === 'exhaust-trailing'\n ? Effect.gen(function* () {\n const next = trailing\n trailing = undefined\n if (next) {\n yield* startFetch(next.key, next.keyHash, replayMode, replayLog)\n }\n })\n : Effect.void),\n Effect.catchCause(() => Effect.void),\n ), TaskRunner.inSyncTransactionFiber, false),\n )\n })\n\n register(fieldPath, (state: any) =>\n Effect.gen(function* () {\n const { moduleId, instanceId } = getBoundScope(bound)\n const replayModeOpt = yield* Effect.serviceOption(\n ReplayModeConfigTag as unknown as ServiceMap.Key<any, { mode: 'live' | 'record' | 'replay' }>,\n )\n const replayMode = Option.isSome(replayModeOpt) ? replayModeOpt.value.mode : 'live'\n const replayLogOpt = yield* Effect.serviceOption(ReplayLog.ReplayLog as unknown as ServiceMap.Key<any, ReplayLog.ReplayLogService>)\n const replayLog = Option.isSome(replayLogOpt) ? replayLogOpt.value : undefined\n const force = yield* Effect.service(TaskRunner.forceSourceRefresh).pipe(Effect.orDie)\n\n let key: unknown\n try {\n key = (entry.meta as any).key(state)\n } catch {\n key = undefined\n }\n\n // dev-mode: detect mismatch between actual reads in keySelector and declared deps (diagnostics only; does not affect execution semantics).\n const traceKey = `${instanceId ?? 'unknown'}::source::${fieldPath}`\n if (isDevEnv() && (yield* onceInRunSession(`deps_trace_settled:${traceKey}`))) {\n try {\n const traced = DepsTrace.trace((s) => (entry.meta as any).key(s), state)\n const diff = DepsTrace.diffDeps(((entry.meta as any).deps ?? []) as ReadonlyArray<string>, traced.reads)\n if (diff) {\n yield* emitDepsMismatch({\n moduleId,\n instanceId,\n kind: 'source',\n fieldPath,\n diff,\n })\n }\n } catch {\n // tracing failure should never break refresh flow\n }\n }\n\n // Key becomes empty: synchronously clear to idle (and interrupt in-flight).\n if (key === undefined) {\n if (inFlight) {\n yield* Fiber.interrupt(inFlight.fiber)\n inFlight = undefined\n }\n trailing = undefined\n\n const idleSnapshot = Snapshot.idle()\n const wroteIdle = yield* setSnapshotInTxn(\n bound,\n fieldPath,\n idleSnapshot,\n 'source-refresh',\n `source:${fieldPath}:idle`,\n step.debugInfo?.graphNodeId,\n )\n if (wroteIdle) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash: undefined,\n concurrency: mode,\n phase: 'idle',\n snapshot: idleSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n return\n }\n\n const h = hashKey(key)\n\n // Default semantics: when a non-idle snapshot already exists for the same keyHash, refresh should be a no-op when possible\n // (avoid duplicate IO/writeback). Explicit refresh/invalidate can bypass via force.\n if (!force) {\n if (inFlight && inFlight.keyHash === h) {\n return\n }\n\n const currentSnapshot = RowId.getAtPath(state, fieldPath) as any\n const currentKeyHash =\n currentSnapshot && typeof currentSnapshot === 'object' ? (currentSnapshot as any).keyHash : undefined\n const currentStatus =\n currentSnapshot && typeof currentSnapshot === 'object' ? (currentSnapshot as any).status : undefined\n if (currentStatus && currentStatus !== 'idle' && currentKeyHash === h) {\n return\n }\n }\n\n if (mode === 'exhaust-trailing' && inFlight) {\n // Busy: record trailing and update loading immediately; stale in-flight writebacks will be blocked by the keyHash gate.\n trailing = { key, keyHash: h }\n const loadingSnapshot = Snapshot.loading({ keyHash: h })\n const wroteLoading = yield* setSnapshotInTxn(\n bound,\n fieldPath,\n loadingSnapshot,\n 'source-refresh',\n `source:${fieldPath}:loading`,\n step.debugInfo?.graphNodeId,\n )\n if (wroteLoading) {\n const event: ReplayLog.ReplayLogEvent = {\n _tag: 'ResourceSnapshot',\n resourceId,\n fieldPath,\n keyHash: h,\n concurrency: mode,\n phase: 'loading',\n snapshot: loadingSnapshot,\n timestamp: Date.now(),\n moduleId,\n instanceId,\n }\n recordReplayEvent(bound, event)\n yield* recordSnapshot(replayMode, replayLog, event)\n }\n return\n }\n\n if (mode === 'switch' && inFlight) {\n yield* Fiber.interrupt(inFlight.fiber)\n inFlight = undefined\n trailing = undefined\n }\n\n // start fetch (pending tx + fork IO)\n yield* startFetch(key, h, replayMode, replayLog)\n }),\n )\n\n return Effect.void\n}\n","import { Layer, ServiceMap } from 'effect'\nimport { isDevEnv } from './runtime/core/env.js'\n\nexport interface ResourceSpec<Key, Out, Err, Env> {\n readonly id: string\n readonly keySchema: import('effect').Schema.Schema<Key>\n readonly load: (key: Key) => import('effect').Effect.Effect<Out, Err, Env>\n readonly meta?: {\n readonly cacheGroup?: string\n readonly description?: string\n readonly [k: string]: unknown\n }\n}\n\nexport type AnyResourceSpec = ResourceSpec<any, any, any, any>\n\nexport type ResourceStatus = 'idle' | 'loading' | 'success' | 'error'\n\nexport interface ResourceSnapshot<Data = unknown, Err = unknown> {\n readonly status: ResourceStatus\n readonly keyHash?: string\n readonly data?: Data\n readonly error?: Err\n}\n\nconst stableStringify = (value: unknown): string => {\n const seen = new WeakSet<object>()\n const encode = (input: unknown): unknown => {\n if (input === null) return null\n if (typeof input === 'string' || typeof input === 'number' || typeof input === 'boolean') {\n return input\n }\n if (typeof input === 'bigint') return input.toString()\n if (typeof input === 'undefined') return '__undefined__'\n if (typeof input === 'symbol') return `__symbol__:${String(input)}`\n if (typeof input === 'function') return '__function__'\n\n if (Array.isArray(input)) {\n return input.map((v) => encode(v))\n }\n if (input instanceof Date) {\n return `__date__:${input.toISOString()}`\n }\n if (input instanceof Error) {\n return {\n _tag: 'Error',\n name: input.name,\n message: input.message,\n }\n }\n if (input && typeof input === 'object') {\n const obj = input as object\n if (seen.has(obj)) return '__cycle__'\n seen.add(obj)\n\n const record = input as Record<string, unknown>\n const keys = Object.keys(record).sort()\n const out: Record<string, unknown> = {}\n for (const k of keys) {\n out[k] = encode(record[k])\n }\n return out\n }\n return String(input)\n }\n\n try {\n return JSON.stringify(encode(value))\n } catch {\n return String(value)\n }\n}\n\nexport const keyHash = (key: unknown): string => stableStringify(key)\n\nexport const Snapshot = {\n idle: <Data = never, Err = never>(): ResourceSnapshot<Data, Err> => ({\n status: 'idle',\n keyHash: undefined,\n data: undefined,\n error: undefined,\n }),\n loading: <Data = never, Err = never>(params: { readonly keyHash: string }): ResourceSnapshot<Data, Err> => ({\n status: 'loading',\n keyHash: params.keyHash,\n data: undefined,\n error: undefined,\n }),\n success: <Data>(params: { readonly keyHash: string; readonly data: Data }): ResourceSnapshot<Data, never> => ({\n status: 'success',\n keyHash: params.keyHash,\n data: params.data,\n error: undefined,\n }),\n error: <Err>(params: { readonly keyHash: string; readonly error: Err }): ResourceSnapshot<never, Err> => ({\n status: 'error',\n keyHash: params.keyHash,\n data: undefined,\n error: params.error,\n }),\n} as const\n\nexport interface ResourceRegistry {\n readonly specs: ReadonlyMap<string, AnyResourceSpec>\n}\n\nexport class ResourceRegistryTag extends ServiceMap.Service<\n ResourceRegistryTag,\n ResourceRegistry\n>()('@logixjs/core/ResourceRegistry') {}\n\nexport const internal = {\n ResourceRegistryTag,\n}\n\nexport type Spec<Key, Out, Err, Env> = ResourceSpec<Key, Out, Err, Env>\n\nexport const make = <Key, Out, Err, Env>(spec: ResourceSpec<Key, Out, Err, Env>): ResourceSpec<Key, Out, Err, Env> =>\n spec\n\nexport const layer = (specs: ReadonlyArray<AnyResourceSpec>): Layer.Layer<ResourceRegistryTag, never, never> =>\n Layer.succeed(\n ResourceRegistryTag,\n (() => {\n const map = new Map<string, AnyResourceSpec>()\n for (const spec of specs) {\n if (isDevEnv() && map.has(spec.id) && map.get(spec.id) !== spec) {\n throw new Error(`[Resource.layer] Duplicate resource id \"${spec.id}\" detected in the same runtime scope`)\n }\n map.set(spec.id, spec)\n }\n return { specs: map }\n })(),\n )\n","import { Effect, Layer, ServiceMap } from 'effect'\n\nexport type ResourceSnapshotPhase = 'idle' | 'loading' | 'success' | 'error'\n\nexport type ReplayLogEvent =\n | {\n readonly _tag: 'ResourceSnapshot'\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n /**\n * Optional: source concurrency policy (e.g. \"switch\" / \"exhaust-trailing\").\n * - Must remain slim & serializable.\n * - Used by Devtools/replay to explain why old results are dropped / why trailing happens.\n */\n readonly concurrency?: string\n readonly phase: ResourceSnapshotPhase\n readonly snapshot: unknown\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n }\n | {\n readonly _tag: 'InvalidateRequest'\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'resource' | 'query'\n readonly target: string\n readonly meta?: unknown\n }\n\nexport type ResourceSnapshotEvent = Extract<ReplayLogEvent, { readonly _tag: 'ResourceSnapshot' }>\n\nexport interface ReplayLogService {\n readonly record: (event: ReplayLogEvent) => Effect.Effect<void>\n readonly snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>>\n readonly resetCursor: Effect.Effect<void>\n readonly consumeNext: (predicate: (event: ReplayLogEvent) => boolean) => Effect.Effect<ReplayLogEvent | undefined>\n readonly consumeNextResourceSnapshot: (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }) => Effect.Effect<ResourceSnapshotEvent | undefined>\n}\n\nexport class ReplayLog extends ServiceMap.Service<ReplayLog, ReplayLogService>()('@logixjs/core/ReplayLog') {}\n\nexport const make = (initial?: ReadonlyArray<ReplayLogEvent>): ReplayLogService => {\n const events: Array<ReplayLogEvent> = initial ? Array.from(initial) : []\n let cursor = 0\n\n const consumeNext = (predicate: (event: ReplayLogEvent) => boolean): Effect.Effect<ReplayLogEvent | undefined> =>\n Effect.sync(() => {\n for (let i = cursor; i < events.length; i++) {\n const event = events[i]\n if (!predicate(event)) continue\n cursor = i + 1\n return event\n }\n return undefined\n })\n\n const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }): Effect.Effect<ResourceSnapshotEvent | undefined> =>\n consumeNext((event): event is ResourceSnapshotEvent => {\n if (event._tag !== 'ResourceSnapshot') return false\n if (event.resourceId !== params.resourceId) return false\n if (event.fieldPath !== params.fieldPath) return false\n if (params.keyHash !== undefined && event.keyHash !== params.keyHash) {\n return false\n }\n if (params.phase !== undefined && event.phase !== params.phase) {\n return false\n }\n return true\n }).pipe(Effect.map((event) => event as ResourceSnapshotEvent | undefined))\n\n return {\n record: (event) => Effect.sync(() => events.push(event)),\n snapshot: Effect.sync(() => events.slice()),\n resetCursor: Effect.sync(() => {\n cursor = 0\n }),\n consumeNext,\n consumeNextResourceSnapshot,\n }\n}\n\nexport const layer = (initial?: ReadonlyArray<ReplayLogEvent>): Layer.Layer<ReplayLog, never, never> =>\n Layer.succeed(ReplayLog, make(initial))\n\nexport const record = (event: ReplayLogEvent): Effect.Effect<void, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.record(event)\n })\n\nexport const snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.snapshot\n})\n\nexport const resetCursor: Effect.Effect<void, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.resetCursor\n})\n\nexport const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n}): Effect.Effect<ResourceSnapshotEvent | undefined, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.consumeNextResourceSnapshot(params)\n })\n","import { Deferred, Effect, Ref, Scope, ServiceMap } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport type { ConcurrencyDiagnostics } from './ConcurrencyDiagnostics.js'\nimport type { StateTransactionOverrides } from './env.js'\nimport type { ResolvedConcurrencyPolicy } from './ModuleRuntime.concurrencyPolicy.js'\n\nexport type TxnLane = 'urgent' | 'nonUrgent'\n\nexport type TxnQueueStartMode = 'direct_idle' | 'direct_handoff' | 'post_visibility_window'\n\nexport type TxnQueueStartTrace = {\n readonly lane: TxnLane\n readonly waiterSeq: number\n readonly enqueueAtMs: number\n readonly startAtMs: number\n readonly queueWaitMs: number\n readonly startMode: TxnQueueStartMode\n readonly visibilityWindowMs?: number\n readonly previousCompletedLane?: TxnLane\n readonly activeLaneAtEnqueue?: TxnLane\n readonly queueDepthAtStart: {\n readonly urgent: number\n readonly nonUrgent: number\n }\n}\n\nexport type TxnQueuePhaseTiming = {\n readonly lane: TxnLane\n readonly waiterSeq: number\n readonly contextLookupMs: number\n readonly resolvePolicyMs: number\n readonly backpressureMs: number\n readonly enqueueBookkeepingMs: number\n readonly queueWaitMs: number\n readonly startHandoffMs: number\n readonly startMode?: TxnQueueStartMode\n readonly activeLaneAtEnqueue?: TxnLane\n readonly previousCompletedLane?: TxnLane\n readonly queueDepthAtStart?: {\n readonly urgent: number\n readonly nonUrgent: number\n }\n}\n\nexport const currentTxnQueuePhaseTiming = ServiceMap.Reference<TxnQueuePhaseTiming | undefined>(\n '@logixjs/core/TxnQueue.currentTxnQueuePhaseTiming',\n {\n defaultValue: () => undefined,\n },\n)\n\nexport interface EnqueueTransaction {\n <A, E>(eff: Effect.Effect<A, E, never>): Effect.Effect<A, E, never>\n <A, E>(lane: TxnLane, eff: Effect.Effect<A, E, never>): Effect.Effect<A, E, never>\n}\n\ntype BackpressureState = {\n readonly backlogCount: number\n readonly waiters: number\n readonly signal: Deferred.Deferred<void>\n}\n\ntype BacklogAcquireAttempt =\n | { readonly _tag: 'acquired' }\n | {\n readonly _tag: 'wait'\n readonly backlogCount: number\n readonly signal: Deferred.Deferred<void>\n }\n\ntype QueueWaiter = {\n readonly lane: TxnLane\n readonly start: Deferred.Deferred<void>\n readonly waiterSeq: number\n readonly enqueueAtMs: number\n readonly activeLaneAtEnqueue?: TxnLane\n}\n\nexport type CapturedTxnRuntimeScope = {\n readonly runtimeLabel: string | undefined\n readonly diagnosticsLevel: Debug.DiagnosticsLevel\n readonly debugSinks: ReadonlyArray<Debug.Sink>\n readonly overrides: StateTransactionOverrides | undefined\n}\n\n/**\n * Builds a \"single-instance transaction queue\":\n * - All entry points (dispatch/source-refresh/...) execute serially through the same FIFO queue.\n * - Callers still experience the entry as a single Effect (preserving the existing API shape).\n * - Tasks must \"never fail\", otherwise the queue consumer fiber would deadlock (so we return results via Deferred/Exit).\n *\n * NOTE: transaction execution happens inside a background queue fiber. To support Provider-local overrides (Tag/Layer)\n * and diagnostics tiers (FiberRef) at the call site, we capture minimal context at enqueue-time and re-provide it to the task.\n */\nexport const makeEnqueueTransaction = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n readonly resolveConcurrencyPolicy: () => Effect.Effect<ResolvedConcurrencyPolicy>\n readonly diagnostics: ConcurrencyDiagnostics\n}): Effect.Effect<EnqueueTransaction, never, Scope.Scope> =>\n Effect.gen(function* () {\n const diagnostics = args.diagnostics\n\n const initialUrgentSignal = yield* Deferred.make<void>()\n const urgentStateRef = yield* Ref.make<BackpressureState>({\n backlogCount: 0,\n waiters: 0,\n signal: initialUrgentSignal,\n })\n\n const initialNonUrgentSignal = yield* Deferred.make<void>()\n const nonUrgentStateRef = yield* Ref.make<BackpressureState>({\n backlogCount: 0,\n waiters: 0,\n signal: initialNonUrgentSignal,\n })\n\n const release = (stateRef: Ref.Ref<BackpressureState>) =>\n Effect.gen(function* () {\n let prevSignal: Deferred.Deferred<void> | undefined\n const nextSignal = yield* Deferred.make<void>()\n yield* Ref.update(stateRef, (s) => {\n const nextBacklogCount = s.backlogCount > 0 ? s.backlogCount - 1 : 0\n if (s.waiters <= 0) {\n return {\n backlogCount: nextBacklogCount,\n waiters: 0,\n signal: s.signal,\n }\n }\n prevSignal = s.signal\n return {\n backlogCount: nextBacklogCount,\n waiters: s.waiters,\n signal: nextSignal,\n }\n })\n if (prevSignal) {\n yield* Deferred.succeed(prevSignal, undefined)\n }\n })\n\n const acquireBacklogSlot = (lane: TxnLane, capacity: number, policy: ResolvedConcurrencyPolicy): Effect.Effect<void> =>\n Effect.gen(function* () {\n const inTxn = yield* Effect.service(TaskRunner.inSyncTransactionFiber).pipe(Effect.orDie)\n if (inTxn) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'state_transaction::enqueue_in_transaction',\n severity: 'error',\n message:\n 'enqueueTransaction is not allowed inside a synchronous StateTransaction body (it may deadlock or violate backpressure constraints).',\n hint: 'Move dispatch/setState calls outside the transaction window, or use a multi-entry pattern (pending → IO → writeback).',\n kind: 'enqueue_in_transaction',\n })\n yield* Effect.die(new Error('enqueueTransaction is not allowed inside a synchronous StateTransaction body'))\n }\n\n const stateRef = lane === 'urgent' ? urgentStateRef : nonUrgentStateRef\n\n let waitedFromMs: number | undefined\n while (true) {\n const attempt = yield* Ref.modify(stateRef, (s): readonly [BacklogAcquireAttempt, BackpressureState] => {\n if (s.backlogCount < capacity) {\n return [\n { _tag: 'acquired' },\n {\n backlogCount: s.backlogCount + 1,\n waiters: s.waiters,\n signal: s.signal,\n },\n ] as const\n }\n\n return [\n { _tag: 'wait', backlogCount: s.backlogCount, signal: s.signal },\n {\n backlogCount: s.backlogCount,\n waiters: s.waiters + 1,\n signal: s.signal,\n },\n ] as const\n })\n\n if (attempt._tag === 'acquired') {\n return\n }\n\n yield* Effect.uninterruptibleMask((restore) =>\n Effect.ensuring(\n restore(\n Effect.gen(function* () {\n const now = Date.now()\n if (waitedFromMs === undefined) {\n waitedFromMs = now\n }\n const saturatedDurationMs = now - waitedFromMs\n\n yield* diagnostics.emitPressureIfNeeded({\n policy,\n trigger: { kind: 'txnQueue', name: `enqueueTransaction.${lane}` },\n backlogCount: attempt.backlogCount,\n saturatedDurationMs,\n })\n\n yield* Deferred.await(attempt.signal)\n }),\n ),\n Ref.update(stateRef, (s) => ({\n backlogCount: s.backlogCount,\n waiters: s.waiters > 0 ? s.waiters - 1 : 0,\n signal: s.signal,\n })),\n ),\n )\n }\n })\n\n // Priority FIFO queue:\n // - Runs each transaction Effect on the *caller fiber* (preserves Provider-local overrides + FiberRef diagnostics).\n // - Serializes per-instance transactions (at most one active at a time).\n // - Urgent lane always wins over non-urgent lane when choosing the next task.\n //\n // Observation-first note:\n // - We keep the scheduler semantics unchanged here.\n // - The only added surface is a slim `trace:txn-queue` start event so perf suites can distinguish\n // \"urgent enqueued late\" vs \"urgent already queued but waited for baton\".\n let currentWaiter: QueueWaiter | undefined = undefined\n let currentLane: TxnLane | undefined = undefined\n let lastCompletedLane: TxnLane | undefined = undefined\n let nextWaiterSeq = 0\n const startTraceByStart = new Map<Deferred.Deferred<void>, TxnQueueStartTrace>()\n const urgentWaitQueue: Array<QueueWaiter> = []\n const nonUrgentWaitQueue: Array<QueueWaiter> = []\n\n const readClockMs = (): number => {\n const perf = globalThis.performance\n if (perf && typeof perf.now === 'function') {\n return perf.now()\n }\n return Date.now()\n }\n\n const nextQueueWaiterSeq = (): number => {\n nextWaiterSeq += 1\n return nextWaiterSeq\n }\n\n const pickNextWaiter = (): QueueWaiter | undefined => {\n if (urgentWaitQueue.length > 0) {\n return urgentWaitQueue.shift()\n }\n if (nonUrgentWaitQueue.length > 0) {\n return nonUrgentWaitQueue.shift()\n }\n return undefined\n }\n\n const recordStartTrace = (waiter: QueueWaiter, startMode: TxnQueueStartMode): void => {\n const startAtMs = readClockMs()\n startTraceByStart.set(waiter.start, {\n lane: waiter.lane,\n waiterSeq: waiter.waiterSeq,\n enqueueAtMs: waiter.enqueueAtMs,\n startAtMs,\n queueWaitMs: Math.max(0, startAtMs - waiter.enqueueAtMs),\n startMode,\n ...(lastCompletedLane ? { previousCompletedLane: lastCompletedLane } : {}),\n ...(waiter.activeLaneAtEnqueue ? { activeLaneAtEnqueue: waiter.activeLaneAtEnqueue } : {}),\n queueDepthAtStart: {\n urgent: urgentWaitQueue.length,\n nonUrgent: nonUrgentWaitQueue.length,\n },\n })\n }\n\n const removeWaiter = (lane: TxnLane, start: Deferred.Deferred<void>): void => {\n const q = lane === 'urgent' ? urgentWaitQueue : nonUrgentWaitQueue\n for (let i = 0; i < q.length; i += 1) {\n if (q[i]?.start === start) {\n q.splice(i, 1)\n return\n }\n }\n }\n\n const emitStartTrace = (\n diagnosticsLevel: Debug.DiagnosticsLevel,\n trace: TxnQueueStartTrace | undefined,\n ): Effect.Effect<void> =>\n !trace\n ? Effect.void\n : diagnosticsLevel === 'off'\n ? Effect.void\n : Debug.record({\n type: 'trace:txn-queue',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n data: trace,\n })\n\n const enqueueAndMaybeStart = (waiter: QueueWaiter): Effect.Effect<void> =>\n Effect.gen(function* () {\n let toStart: QueueWaiter | undefined\n yield* Effect.uninterruptible(\n Effect.sync(() => {\n if (waiter.lane === 'urgent') {\n urgentWaitQueue.push(waiter)\n } else {\n nonUrgentWaitQueue.push(waiter)\n }\n\n if (!currentWaiter) {\n const next = pickNextWaiter()\n if (next) {\n currentWaiter = next\n currentLane = next.lane\n recordStartTrace(next, lastCompletedLane ? 'direct_handoff' : 'direct_idle')\n toStart = next\n }\n }\n }),\n )\n if (toStart) {\n yield* Deferred.succeed(toStart.start, undefined)\n }\n })\n\n const advanceQueue = (lane: TxnLane, start: Deferred.Deferred<void>): Effect.Effect<void> =>\n Effect.gen(function* () {\n let toStart: QueueWaiter | undefined\n let needsVisibilityWindow = false\n yield* Effect.uninterruptible(\n Effect.sync(() => {\n if (currentWaiter?.start === start) {\n currentWaiter = undefined\n currentLane = undefined\n lastCompletedLane = lane\n if (urgentWaitQueue.length > 0) {\n const next = pickNextWaiter()\n if (next) {\n currentWaiter = next\n currentLane = next.lane\n recordStartTrace(next, 'direct_handoff')\n toStart = next\n }\n } else if (nonUrgentWaitQueue.length > 0) {\n needsVisibilityWindow = true\n }\n return\n }\n\n // If the fiber is interrupted before it becomes active, remove it from the wait queue.\n removeWaiter(lane, start)\n\n // Safety: if the system became idle (should not happen unless interrupted races), restart the baton.\n if (!currentWaiter) {\n const next = pickNextWaiter()\n if (next) {\n currentWaiter = next\n currentLane = next.lane\n recordStartTrace(next, lastCompletedLane ? 'direct_handoff' : 'direct_idle')\n toStart = next\n }\n }\n }),\n )\n if (toStart) {\n yield* Deferred.succeed(toStart.start, undefined)\n return\n }\n\n if (needsVisibilityWindow) {\n yield* Effect.yieldNow\n let deferredStart: QueueWaiter | undefined\n yield* Effect.uninterruptible(\n Effect.sync(() => {\n if (currentWaiter) return\n const next = pickNextWaiter()\n if (!next) return\n currentWaiter = next\n currentLane = next.lane\n recordStartTrace(next, 'post_visibility_window')\n deferredStart = next\n }),\n )\n if (deferredStart) {\n yield* Deferred.succeed(deferredStart.start, undefined)\n }\n }\n })\n\n let nextLinkSeq = 0\n const assignLinkId = (existing: string | undefined): string => {\n if (typeof existing === 'string' && existing.length > 0) {\n return existing\n }\n nextLinkSeq += 1\n // Stable and deterministic: never use randomness/time.\n return `${args.instanceId}::l${nextLinkSeq}`\n }\n\n const enqueueTransaction: EnqueueTransaction = <A2, E2>(\n a0: TxnLane | Effect.Effect<A2, E2, never>,\n a1?: Effect.Effect<A2, E2, never>,\n ): Effect.Effect<A2, E2, never> =>\n Effect.gen(function* () {\n const lane: TxnLane = a1 ? (a0 as TxnLane) : 'urgent'\n const eff: Effect.Effect<A2, E2, never> = a1 ? a1 : (a0 as Effect.Effect<A2, E2, never>)\n const stateRef = lane === 'urgent' ? urgentStateRef : nonUrgentStateRef\n\n const contextLookupStartedAtMs = readClockMs()\n const existingLinkId = yield* Effect.service(EffectOpCore.currentLinkId).pipe(Effect.orDie)\n const linkId = assignLinkId(existingLinkId)\n const diagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n const phaseTimingEnabled = diagnosticsLevel !== 'off'\n const contextLookupMs = phaseTimingEnabled ? Math.max(0, readClockMs() - contextLookupStartedAtMs) : 0\n\n const resolvePolicyStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const policy = yield* args.resolveConcurrencyPolicy()\n const resolvePolicyMs = phaseTimingEnabled ? Math.max(0, readClockMs() - resolvePolicyStartedAtMs) : 0\n const capacity = policy.losslessBackpressureCapacity\n const backpressureStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n yield* (Effect.provideService(acquireBacklogSlot(lane, capacity, policy), EffectOpCore.currentLinkId, linkId) as Effect.Effect<void, never, never>)\n const backpressureMs = phaseTimingEnabled ? Math.max(0, readClockMs() - backpressureStartedAtMs) : 0\n\n const enqueueBookkeepingStartedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const start = yield* Deferred.make<void>()\n const waiter: QueueWaiter = {\n lane,\n start,\n waiterSeq: nextQueueWaiterSeq(),\n enqueueAtMs: readClockMs(),\n ...(currentLane ? { activeLaneAtEnqueue: currentLane } : {}),\n }\n yield* (Effect.provideService(enqueueAndMaybeStart(waiter), EffectOpCore.currentLinkId, linkId) as Effect.Effect<void, never, never>)\n const enqueueBookkeepingMs = phaseTimingEnabled ? Math.max(0, readClockMs() - enqueueBookkeepingStartedAtMs) : 0\n\n return yield* (Effect.provideService(\n Effect.uninterruptibleMask((restore) =>\n Effect.ensuring(\n Effect.flatMap(restore(Deferred.await(start)), () => {\n const startTrace = startTraceByStart.get(start)\n startTraceByStart.delete(start)\n const resumedAtMs = phaseTimingEnabled ? readClockMs() : 0\n const queuePhaseTiming: TxnQueuePhaseTiming | undefined = phaseTimingEnabled\n ? {\n lane,\n waiterSeq: waiter.waiterSeq,\n contextLookupMs,\n resolvePolicyMs,\n backpressureMs,\n enqueueBookkeepingMs,\n queueWaitMs: startTrace?.queueWaitMs ?? 0,\n startHandoffMs: startTrace ? Math.max(0, resumedAtMs - startTrace.startAtMs) : 0,\n ...(startTrace?.startMode ? { startMode: startTrace.startMode } : null),\n ...(startTrace?.activeLaneAtEnqueue ? { activeLaneAtEnqueue: startTrace.activeLaneAtEnqueue } : null),\n ...(startTrace?.previousCompletedLane ? { previousCompletedLane: startTrace.previousCompletedLane } : null),\n ...(startTrace?.queueDepthAtStart ? { queueDepthAtStart: startTrace.queueDepthAtStart } : null),\n }\n : undefined\n return emitStartTrace(diagnosticsLevel, startTrace).pipe(\n Effect.flatMap(() =>\n restore(\n queuePhaseTiming\n ? Effect.provideService(eff, currentTxnQueuePhaseTiming, queuePhaseTiming)\n : eff,\n ),\n ),\n )\n }),\n Effect.uninterruptible(advanceQueue(lane, start).pipe(Effect.flatMap(() => release(stateRef)))),\n ),\n ),\n EffectOpCore.currentLinkId,\n linkId,\n ) as Effect.Effect<A2, E2, never>)\n })\n\n return enqueueTransaction\n })\n","import { Effect, Exit, type ServiceMap } from 'effect'\n\nexport const runSyncExitWithServices = <A, E>(\n effect: Effect.Effect<A, E, any>,\n services: ServiceMap.ServiceMap<any>,\n): Exit.Exit<A, E> => Effect.runSyncExit(Effect.provideServices(effect, services) as Effect.Effect<A, E, never>)\n","import { Effect, Option } from 'effect'\nimport type { ConcurrencyDiagnostics } from './ConcurrencyDiagnostics.js'\nimport {\n SchedulingPolicySurfaceOverridesTag,\n SchedulingPolicySurfaceTag,\n type SchedulingPolicyLimit,\n type SchedulingPolicySurface,\n type SchedulingPolicySurfaceOverrides,\n type SchedulingPolicySurfacePatch,\n} from './env.js'\nimport { normalizeBoolean, normalizePositiveInt, normalizePositiveNumber } from './normalize.js'\n\nexport type SchedulingPolicySurfaceConfigScope = 'builtin' | 'runtime_default' | 'runtime_module' | 'provider'\nexport type ConcurrencyPolicyConfigScope = SchedulingPolicySurfaceConfigScope\n\nexport type ResolvedSchedulingPolicySurface = {\n readonly concurrencyLimit: SchedulingPolicyLimit\n readonly losslessBackpressureCapacity: number\n readonly allowUnbounded: boolean\n readonly pressureWarningThreshold: {\n readonly backlogCount: number\n readonly backlogDurationMs: number\n }\n readonly warningCooldownMs: number\n readonly configScope: SchedulingPolicySurfaceConfigScope\n /** Field-level scope for the effective concurrency limit. */\n readonly concurrencyLimitScope: SchedulingPolicySurfaceConfigScope\n /** The originally requested concurrency limit (for explaining the unbounded gate). */\n readonly requestedConcurrencyLimit: SchedulingPolicyLimit\n /** Field-level scope for the originally requested concurrency limit. */\n readonly requestedConcurrencyLimitScope: SchedulingPolicySurfaceConfigScope\n /** Field-level scope for allowUnbounded. */\n readonly allowUnboundedScope: SchedulingPolicySurfaceConfigScope\n}\nexport type ResolvedConcurrencyPolicy = ResolvedSchedulingPolicySurface\n\nconst normalizeConcurrencyLimit = (v: unknown): SchedulingPolicyLimit | undefined =>\n v === 'unbounded' ? 'unbounded' : normalizePositiveInt(v)\n\ntype ResolvedPolicyCache = {\n readonly runtimeDefaultFingerprint: string\n readonly runtimeModuleFingerprint: string\n readonly providerDefaultFingerprint: string\n readonly providerModuleFingerprint: string\n readonly resolved: ResolvedSchedulingPolicySurface\n}\n\nconst patchFingerprint = (\n patch: SchedulingPolicySurface | SchedulingPolicySurfacePatch | SchedulingPolicySurfaceOverrides | undefined,\n): string => {\n if (!patch) return ''\n const threshold = (patch as any).pressureWarningThreshold\n const thresholdCount =\n threshold && typeof threshold === 'object' ? String((threshold as any).backlogCount ?? '') : ''\n const thresholdDuration =\n threshold && typeof threshold === 'object' ? String((threshold as any).backlogDurationMs ?? '') : ''\n return [\n String((patch as any).concurrencyLimit ?? ''),\n String((patch as any).losslessBackpressureCapacity ?? ''),\n String((patch as any).allowUnbounded ?? ''),\n thresholdCount,\n thresholdDuration,\n String((patch as any).warningCooldownMs ?? ''),\n ].join('|')\n}\n\nexport const makeResolveSchedulingPolicySurface = (args: {\n /** Original options.moduleId (may be undefined); used for module overrides map lookup. */\n readonly moduleId: string | undefined\n /** Optional: one-shot audit diagnostics for unbounded opt-in/blocked. */\n readonly diagnostics?: ConcurrencyDiagnostics\n}): (() => Effect.Effect<ResolvedSchedulingPolicySurface>) => {\n const builtinConcurrencyLimit: SchedulingPolicyLimit = 16\n const builtinLosslessBackpressureCapacity = 4096\n const builtinAllowUnbounded = false\n const builtinThresholdBacklogCount = 1000\n const builtinThresholdBacklogDurationMs = 5000\n const builtinWarningCooldownMs = 30_000\n let cache: ResolvedPolicyCache | undefined\n\n return () =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(SchedulingPolicySurfaceTag)\n const overridesOpt = yield* Effect.serviceOption(SchedulingPolicySurfaceOverridesTag)\n\n const runtimeConfig: SchedulingPolicySurface | undefined = Option.isSome(runtimeConfigOpt)\n ? runtimeConfigOpt.value\n : undefined\n const providerOverrides: SchedulingPolicySurfaceOverrides | undefined = Option.isSome(overridesOpt)\n ? overridesOpt.value\n : undefined\n\n let concurrencyLimit: SchedulingPolicyLimit = builtinConcurrencyLimit\n let concurrencyLimitScope: SchedulingPolicySurfaceConfigScope = 'builtin'\n let lastBoundedConcurrencyLimit = builtinConcurrencyLimit as number\n let lastBoundedConcurrencyLimitScope: SchedulingPolicySurfaceConfigScope = 'builtin'\n\n let losslessBackpressureCapacity = builtinLosslessBackpressureCapacity\n let allowUnbounded = builtinAllowUnbounded\n let allowUnboundedScope: SchedulingPolicySurfaceConfigScope = 'builtin'\n let thresholdBacklogCount = builtinThresholdBacklogCount\n let thresholdBacklogDurationMs = builtinThresholdBacklogDurationMs\n let warningCooldownMs = builtinWarningCooldownMs\n\n let configScope: SchedulingPolicySurfaceConfigScope = 'builtin'\n\n const applyPatch = (\n patch: SchedulingPolicySurface | SchedulingPolicySurfacePatch | SchedulingPolicySurfaceOverrides | undefined,\n scope: SchedulingPolicySurfaceConfigScope,\n ): void => {\n if (!patch) return\n let changed = false\n\n const limit = normalizeConcurrencyLimit((patch as any).concurrencyLimit)\n if (limit) {\n concurrencyLimit = limit\n concurrencyLimitScope = scope\n if (limit !== 'unbounded') {\n lastBoundedConcurrencyLimit = limit\n lastBoundedConcurrencyLimitScope = scope\n }\n changed = true\n }\n\n const capacity = normalizePositiveInt((patch as any).losslessBackpressureCapacity)\n if (capacity != null) {\n losslessBackpressureCapacity = capacity\n changed = true\n }\n\n const allow = normalizeBoolean((patch as any).allowUnbounded)\n if (allow != null) {\n allowUnbounded = allow\n allowUnboundedScope = scope\n changed = true\n }\n\n const threshold = (patch as any).pressureWarningThreshold\n if (threshold && typeof threshold === 'object') {\n const count = normalizePositiveInt((threshold as any).backlogCount)\n if (count != null) {\n thresholdBacklogCount = count\n changed = true\n }\n\n const duration = normalizePositiveNumber((threshold as any).backlogDurationMs)\n if (duration != null) {\n thresholdBacklogDurationMs = duration\n changed = true\n }\n }\n\n const cooldownMs = normalizePositiveNumber((patch as any).warningCooldownMs)\n if (cooldownMs != null) {\n warningCooldownMs = cooldownMs\n changed = true\n }\n\n if (changed) {\n configScope = scope\n }\n }\n\n const moduleId = args.moduleId\n const runtimeModulePatch: SchedulingPolicySurfacePatch | undefined =\n moduleId && runtimeConfig?.overridesByModuleId ? runtimeConfig.overridesByModuleId[moduleId] : undefined\n const providerModulePatch: SchedulingPolicySurfacePatch | undefined =\n moduleId && providerOverrides?.overridesByModuleId ? providerOverrides.overridesByModuleId[moduleId] : undefined\n const runtimeDefaultFingerprint = patchFingerprint(runtimeConfig)\n const runtimeModuleFingerprint = patchFingerprint(runtimeModulePatch)\n const providerDefaultFingerprint = patchFingerprint(providerOverrides)\n const providerModuleFingerprint = patchFingerprint(providerModulePatch)\n\n if (\n cache &&\n cache.runtimeDefaultFingerprint === runtimeDefaultFingerprint &&\n cache.runtimeModuleFingerprint === runtimeModuleFingerprint &&\n cache.providerDefaultFingerprint === providerDefaultFingerprint &&\n cache.providerModuleFingerprint === providerModuleFingerprint\n ) {\n if (args.diagnostics) {\n yield* args.diagnostics.emitUnboundedPolicyIfNeeded({\n policy: cache.resolved,\n trigger: { kind: 'concurrencyPolicy', name: 'resolve' },\n })\n }\n return cache.resolved\n }\n\n // priority: provider > runtime_module > runtime_default > builtin\n applyPatch(runtimeConfig, 'runtime_default')\n applyPatch(runtimeModulePatch, 'runtime_module')\n applyPatch(providerOverrides, 'provider')\n applyPatch(providerModulePatch, 'provider')\n\n const requestedConcurrencyLimit = concurrencyLimit\n const requestedConcurrencyLimitScope = concurrencyLimitScope\n\n // Unbounded gate: effective unbounded requires an explicit allowUnbounded=true (FR-004).\n if (typeof concurrencyLimit === 'string' && !allowUnbounded) {\n concurrencyLimit = lastBoundedConcurrencyLimit\n concurrencyLimitScope = lastBoundedConcurrencyLimitScope\n }\n\n // NOTE: diagnostics may add implementation-level metrics (e.g. \"saturated duration\"); the resolver only decides configuration.\n const resolved: ResolvedSchedulingPolicySurface = {\n concurrencyLimit,\n losslessBackpressureCapacity,\n allowUnbounded,\n pressureWarningThreshold: {\n backlogCount: thresholdBacklogCount,\n backlogDurationMs: thresholdBacklogDurationMs,\n },\n warningCooldownMs,\n configScope,\n concurrencyLimitScope,\n requestedConcurrencyLimit,\n requestedConcurrencyLimitScope,\n allowUnboundedScope,\n }\n\n if (args.diagnostics) {\n yield* args.diagnostics.emitUnboundedPolicyIfNeeded({\n policy: resolved,\n trigger: { kind: 'concurrencyPolicy', name: 'resolve' },\n })\n }\n\n cache = {\n runtimeDefaultFingerprint,\n runtimeModuleFingerprint,\n providerDefaultFingerprint,\n providerModuleFingerprint,\n resolved,\n }\n\n return resolved\n })\n}\n\nexport const makeResolveConcurrencyPolicy = makeResolveSchedulingPolicySurface\n","import { Effect, Option } from 'effect'\nimport {\n StateTransactionConfigTag,\n StateTransactionOverridesTag,\n type StateTransactionOverrides,\n type TxnLanesPatch,\n} from './env.js'\nimport { normalizeBoolean, normalizeNonNegativeNumber } from './normalize.js'\n\nexport type TxnLanePolicyScope = 'provider' | 'runtime_module' | 'runtime_default' | 'builtin'\n\nexport type TxnLaneQueueMode = 'fifo' | 'lanes'\n\nexport type TxnLaneYieldStrategy = 'baseline' | 'inputPending'\n\nexport type ResolvedTxnLanePolicy = {\n readonly enabled: boolean\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n readonly configScope: TxnLanePolicyScope\n readonly budgetMs: number\n readonly debounceMs: number\n readonly maxLagMs: number\n readonly allowCoalesce: boolean\n readonly yieldStrategy: TxnLaneYieldStrategy\n readonly queueMode: TxnLaneQueueMode\n}\n\ntype ModuleStateTransactionOptions =\n | {\n readonly txnLanes?: TxnLanesPatch\n }\n | undefined\n\nconst normalizeMs = normalizeNonNegativeNumber\nconst normalizeBool = normalizeBoolean\n\nexport const makeResolveTxnLanePolicy = (args: {\n /** Raw options.moduleId (may be undefined), used to query overrides maps. */\n readonly moduleId: string | undefined\n readonly stateTransaction: ModuleStateTransactionOptions\n}): (() => Effect.Effect<ResolvedTxnLanePolicy>) => {\n const moduleRuntimeDefaultPatch = args.stateTransaction?.txnLanes\n\n const builtinEnabled = true\n const builtinBudgetMs = 1\n const builtinDebounceMs = 0\n const builtinMaxLagMs = 50\n const builtinAllowCoalesce = true\n const builtinYieldStrategy: TxnLaneYieldStrategy = 'baseline'\n\n return () =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(StateTransactionConfigTag)\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n\n const runtimeConfig = Option.isSome(runtimeConfigOpt) ? runtimeConfigOpt.value : undefined\n const providerOverrides = Option.isSome(overridesOpt) ? overridesOpt.value : undefined\n\n let enabled = builtinEnabled\n let budgetMs = builtinBudgetMs\n let debounceMs = builtinDebounceMs\n let maxLagMs = builtinMaxLagMs\n let allowCoalesce = builtinAllowCoalesce\n let yieldStrategy: TxnLaneYieldStrategy = builtinYieldStrategy\n\n let overrideMode: ResolvedTxnLanePolicy['overrideMode'] = undefined\n\n let configScope: TxnLanePolicyScope = 'builtin'\n\n const applyPatch = (\n patch: TxnLanesPatch | StateTransactionOverrides | undefined,\n scope: TxnLanePolicyScope,\n ): void => {\n if (!patch) return\n\n const raw = (patch as any).txnLanes != null ? (patch as any).txnLanes : patch\n if (!raw || typeof raw !== 'object') return\n\n let changed = false\n\n const nextEnabled = normalizeBool((raw as any).enabled)\n if (nextEnabled != null) {\n enabled = nextEnabled\n changed = true\n }\n\n const nextOverrideMode = (raw as any).overrideMode\n if (nextOverrideMode === 'forced_off' || nextOverrideMode === 'forced_sync') {\n overrideMode = nextOverrideMode\n changed = true\n }\n\n const nextBudgetMs = normalizeMs((raw as any).budgetMs)\n if (nextBudgetMs != null) {\n budgetMs = nextBudgetMs\n changed = true\n }\n\n const nextDebounceMs = normalizeMs((raw as any).debounceMs)\n if (nextDebounceMs != null) {\n debounceMs = nextDebounceMs\n changed = true\n }\n\n const nextMaxLagMs = normalizeMs((raw as any).maxLagMs)\n if (nextMaxLagMs != null) {\n maxLagMs = nextMaxLagMs\n changed = true\n }\n\n const nextAllowCoalesce = normalizeBool((raw as any).allowCoalesce)\n if (nextAllowCoalesce != null) {\n allowCoalesce = nextAllowCoalesce\n changed = true\n }\n\n const nextYieldStrategy = (raw as any).yieldStrategy\n if (nextYieldStrategy === 'baseline' || nextYieldStrategy === 'inputPending') {\n yieldStrategy = nextYieldStrategy\n changed = true\n }\n\n if (changed) {\n configScope = scope\n }\n }\n\n const moduleId = args.moduleId\n const runtimeModulePatch =\n moduleId && runtimeConfig?.txnLanesOverridesByModuleId\n ? runtimeConfig.txnLanesOverridesByModuleId[moduleId]\n : undefined\n const providerModulePatch =\n moduleId && providerOverrides?.txnLanesOverridesByModuleId\n ? providerOverrides.txnLanesOverridesByModuleId[moduleId]\n : undefined\n\n // priority: provider > runtime_module > runtime_default > builtin\n applyPatch(moduleRuntimeDefaultPatch, 'runtime_default')\n applyPatch(runtimeConfig, 'runtime_default')\n applyPatch(runtimeModulePatch, 'runtime_module')\n applyPatch(providerOverrides, 'provider')\n applyPatch(providerModulePatch, 'provider')\n\n const effectiveEnabled = overrideMode ? false : enabled\n const queueMode: TxnLaneQueueMode = effectiveEnabled ? 'lanes' : 'fifo'\n\n return {\n enabled: effectiveEnabled,\n ...(overrideMode ? { overrideMode } : {}),\n configScope,\n budgetMs,\n debounceMs,\n maxLagMs,\n allowCoalesce,\n yieldStrategy,\n queueMode,\n }\n })\n}\n","import { Effect, Option } from 'effect'\nimport {\n StateTransactionConfigTag,\n StateTransactionOverridesTag,\n type StateTransactionOverrides,\n type StateTransactionTraitConvergeOverrides,\n type TraitConvergeTimeSlicingPatch,\n} from './env.js'\nimport { normalizePositiveNumber } from './normalize.js'\nimport type { TraitConvergeConfigScope, TraitConvergeRequestedMode } from '../../state-trait/model.js'\n\nexport type ResolvedTraitConvergeTimeSlicingConfig = {\n readonly enabled: boolean\n readonly debounceMs: number\n readonly maxLagMs: number\n}\n\nexport type ResolvedTraitConvergeConfig = {\n readonly traitConvergeMode: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs: number\n readonly traitConvergeDecisionBudgetMs: number\n readonly traitConvergeTimeSlicing: ResolvedTraitConvergeTimeSlicingConfig\n readonly configScope: TraitConvergeConfigScope\n}\n\ntype ModuleStateTransactionOptions =\n | {\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeMode?: 'auto' | 'full' | 'dirty'\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n }\n | undefined\n\nconst normalizePositiveMs = normalizePositiveNumber\n\nconst normalizeRequestedMode = (mode: unknown): TraitConvergeRequestedMode | undefined =>\n mode === 'auto' || mode === 'full' || mode === 'dirty' ? mode : undefined\n\nconst normalizeBool = (value: unknown): boolean | undefined => (typeof value === 'boolean' ? value : undefined)\n\nexport const makeResolveTraitConvergeConfig = (args: {\n /** Original options.moduleId (may be undefined); used for module overrides map lookup. */\n readonly moduleId: string | undefined\n readonly stateTransaction: ModuleStateTransactionOptions\n}): (() => Effect.Effect<ResolvedTraitConvergeConfig>) => {\n const builtinTraitConvergeBudgetMs: number = normalizePositiveMs(args.stateTransaction?.traitConvergeBudgetMs) ?? 200\n const builtinTraitConvergeDecisionBudgetMs: number =\n normalizePositiveMs(args.stateTransaction?.traitConvergeDecisionBudgetMs) ?? 0.5\n const builtinTraitConvergeMode: TraitConvergeRequestedMode =\n normalizeRequestedMode(args.stateTransaction?.traitConvergeMode) ?? 'auto'\n\n const builtinTimeSlicingEnabled: boolean =\n normalizeBool(args.stateTransaction?.traitConvergeTimeSlicing?.enabled) ?? false\n const builtinTimeSlicingDebounceMs: number =\n normalizePositiveMs(args.stateTransaction?.traitConvergeTimeSlicing?.debounceMs) ?? 16\n const builtinTimeSlicingMaxLagMs: number =\n normalizePositiveMs(args.stateTransaction?.traitConvergeTimeSlicing?.maxLagMs) ?? 200\n\n return () =>\n Effect.gen(function* () {\n const runtimeConfigOpt = yield* Effect.serviceOption(StateTransactionConfigTag)\n const overridesOpt = yield* Effect.serviceOption(StateTransactionOverridesTag)\n\n const runtimeConfig = Option.isSome(runtimeConfigOpt) ? runtimeConfigOpt.value : undefined\n const providerOverrides = Option.isSome(overridesOpt) ? overridesOpt.value : undefined\n\n let traitConvergeMode = builtinTraitConvergeMode\n let traitConvergeBudgetMs = builtinTraitConvergeBudgetMs\n let traitConvergeDecisionBudgetMs = builtinTraitConvergeDecisionBudgetMs\n let traitConvergeTimeSlicingEnabled = builtinTimeSlicingEnabled\n let traitConvergeTimeSlicingDebounceMs = builtinTimeSlicingDebounceMs\n let traitConvergeTimeSlicingMaxLagMs = builtinTimeSlicingMaxLagMs\n\n let configScope: TraitConvergeConfigScope = 'builtin'\n\n const applyPatch = (\n patch: StateTransactionTraitConvergeOverrides | StateTransactionOverrides | undefined,\n scope: TraitConvergeConfigScope,\n ): void => {\n if (!patch) return\n let changed = false\n\n const mode = normalizeRequestedMode((patch as any).traitConvergeMode)\n if (mode) {\n traitConvergeMode = mode\n changed = true\n }\n\n const budgetMs = normalizePositiveMs((patch as any).traitConvergeBudgetMs)\n if (budgetMs != null) {\n traitConvergeBudgetMs = budgetMs\n changed = true\n }\n\n const decisionBudgetMs = normalizePositiveMs((patch as any).traitConvergeDecisionBudgetMs)\n if (decisionBudgetMs != null) {\n traitConvergeDecisionBudgetMs = decisionBudgetMs\n changed = true\n }\n\n const timeSlicing = (patch as any).traitConvergeTimeSlicing\n if (timeSlicing && typeof timeSlicing === 'object') {\n const enabled = normalizeBool((timeSlicing as any).enabled)\n if (enabled != null) {\n traitConvergeTimeSlicingEnabled = enabled\n changed = true\n }\n\n const debounceMs = normalizePositiveMs((timeSlicing as any).debounceMs)\n if (debounceMs != null) {\n traitConvergeTimeSlicingDebounceMs = debounceMs\n changed = true\n }\n\n const maxLagMs = normalizePositiveMs((timeSlicing as any).maxLagMs)\n if (maxLagMs != null) {\n traitConvergeTimeSlicingMaxLagMs = maxLagMs\n changed = true\n }\n }\n\n if (changed) {\n configScope = scope\n }\n }\n\n const moduleId = args.moduleId\n const runtimeModulePatch =\n moduleId && runtimeConfig?.traitConvergeOverridesByModuleId\n ? runtimeConfig.traitConvergeOverridesByModuleId[moduleId]\n : undefined\n const providerModulePatch =\n moduleId && providerOverrides?.traitConvergeOverridesByModuleId\n ? providerOverrides.traitConvergeOverridesByModuleId[moduleId]\n : undefined\n\n // priority: provider > runtime_module > runtime_default > builtin\n applyPatch(runtimeConfig, 'runtime_default')\n applyPatch(runtimeModulePatch, 'runtime_module')\n applyPatch(providerOverrides, 'provider')\n applyPatch(providerModulePatch, 'provider')\n\n return {\n traitConvergeMode,\n traitConvergeBudgetMs,\n traitConvergeDecisionBudgetMs,\n traitConvergeTimeSlicing: {\n enabled: traitConvergeTimeSlicingEnabled,\n debounceMs: traitConvergeTimeSlicingDebounceMs,\n maxLagMs: traitConvergeTimeSlicingMaxLagMs,\n },\n configScope,\n }\n })\n}\n","import type { Schema } from 'effect'\nimport * as SchemaAST from 'effect/SchemaAST'\nimport {\n type StateTraitProgram,\n type StateTraitSpec,\n type StateTraitEntry,\n type StateTraitGraph,\n type StateTraitGraphEdge,\n type StateTraitGraphNode,\n type StateTraitField,\n type StateTraitFieldTrait,\n type StateTraitKind,\n type StateTraitPlan,\n type StateTraitPlanStep,\n type StateTraitResource,\n type StateTraitSchemaPathRef,\n collectNodeMeta,\n normalizeSpec,\n} from './model.js'\nimport * as Meta from './meta.js'\nimport {\n compareFieldPath,\n getFieldPathId,\n makeFieldPathIdRegistry,\n normalizeFieldPath,\n type FieldPath,\n type FieldPathId,\n} from '../field-path.js'\nimport { fnv1a32, stableStringify } from '../digest.js'\nimport { getConvergeStaticIrDigest, type ConvergeStaticIrRegistry } from './converge-ir.js'\n\nconst nowPerf = (): number =>\n typeof globalThis.performance !== 'undefined' && typeof globalThis.performance.now === 'function'\n ? globalThis.performance.now()\n : Date.now()\n\ntype ConvergeWriter = Extract<StateTraitEntry<any, string>, { readonly kind: 'computed' | 'link' }>\n\nconst collectMultipleWritersError = (\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n): ConvergeStaticIrRegistry['configError'] | undefined => {\n const kindsByFieldPath = new Map<string, Set<StateTraitKind>>()\n\n for (const entry of entries) {\n if (entry.kind !== 'computed' && entry.kind !== 'link' && entry.kind !== 'source' && entry.kind !== 'externalStore') {\n continue\n }\n const set = kindsByFieldPath.get(entry.fieldPath) ?? new Set<StateTraitKind>()\n set.add(entry.kind)\n kindsByFieldPath.set(entry.fieldPath, set)\n }\n\n const conflicts: Array<{ readonly fieldPath: string; readonly kinds: ReadonlyArray<StateTraitKind> }> = []\n for (const [fieldPath, kinds] of kindsByFieldPath.entries()) {\n if (kinds.size <= 1) continue\n conflicts.push({ fieldPath, kinds: Array.from(kinds).sort() })\n }\n\n if (conflicts.length === 0) return undefined\n\n conflicts.sort((a, b) => (a.fieldPath < b.fieldPath ? -1 : a.fieldPath > b.fieldPath ? 1 : 0))\n const fields = conflicts.map((c) => c.fieldPath)\n const primary = conflicts[0]!\n const kindSummary = primary.kinds.join(' + ')\n\n return {\n code: 'MULTIPLE_WRITERS',\n message:\n `[StateTrait.build] Multiple writers for field \"${primary.fieldPath}\" (${kindSummary}). ` +\n 'Only one of computed/link/source/externalStore can write a fieldPath.',\n fields,\n }\n}\n\nconst getConvergeWriterDeps = (entry: ConvergeWriter): ReadonlyArray<string> => {\n if (entry.kind === 'computed') {\n return ((entry.meta as any)?.deps ?? []) as ReadonlyArray<string>\n }\n return [entry.meta.from as string]\n}\n\nconst computeConvergeTopoOrder = (\n writers: ReadonlyArray<ConvergeWriter>,\n): { readonly order: ReadonlyArray<string>; readonly configError?: ConvergeStaticIrRegistry['configError'] } => {\n const writerByPath = new Map<string, ConvergeWriter>()\n for (const entry of writers) {\n const existing = writerByPath.get(entry.fieldPath)\n if (existing) {\n return {\n order: [],\n configError: {\n code: 'MULTIPLE_WRITERS',\n message: `[StateTrait.converge] Multiple writers for field \"${entry.fieldPath}\" (${existing.kind} + ${entry.kind}).`,\n fields: [entry.fieldPath],\n },\n }\n }\n writerByPath.set(entry.fieldPath, entry)\n }\n\n const nodes = new Set<string>()\n for (const entry of writers) {\n nodes.add(entry.fieldPath)\n }\n\n const indegree = new Map<string, number>()\n const forward = new Map<string, Array<string>>()\n\n for (const node of nodes) {\n indegree.set(node, 0)\n forward.set(node, [])\n }\n\n for (const entry of writers) {\n const to = entry.fieldPath\n const deps = getConvergeWriterDeps(entry)\n for (const dep of deps) {\n if (!nodes.has(dep)) continue\n forward.get(dep)!.push(to)\n indegree.set(to, (indegree.get(to) ?? 0) + 1)\n }\n }\n\n const queue: Array<string> = []\n for (const [node, deg] of indegree.entries()) {\n if (deg === 0) queue.push(node)\n }\n\n const order: Array<string> = []\n while (queue.length) {\n const n = queue.shift()!\n order.push(n)\n const outs = forward.get(n)!\n for (const to of outs) {\n const next = (indegree.get(to) ?? 0) - 1\n indegree.set(to, next)\n if (next === 0) queue.push(to)\n }\n }\n\n if (order.length !== nodes.size) {\n const remaining = Array.from(nodes).filter((n) => !order.includes(n))\n return {\n order: [],\n configError: {\n code: 'CYCLE_DETECTED',\n message: `[StateTrait.converge] Cycle detected in computed/link graph: ${remaining.join(', ')}`,\n fields: remaining,\n },\n }\n }\n\n return { order }\n}\n\nconst collectSchemaFieldPaths = (schema: Schema.Schema<any>): ReadonlyArray<FieldPath> => {\n const byKey = new Map<string, FieldPath>()\n\n const add = (path: FieldPath): void => {\n const normalized = normalizeFieldPath(path)\n if (!normalized) return\n byKey.set(JSON.stringify(normalized), normalized)\n }\n\n const visit = (ast: SchemaAST.AST, prefix: ReadonlyArray<string>, seen: Set<SchemaAST.AST>): void => {\n let current: SchemaAST.AST = SchemaAST.toType(ast)\n\n while (SchemaAST.isSuspend(current)) {\n if (seen.has(current)) return\n seen.add(current)\n current = SchemaAST.toType(current.thunk())\n }\n\n if (SchemaAST.isUnion(current)) {\n for (const t of current.types) {\n visit(t, prefix, seen)\n }\n return\n }\n\n // Array / Tuple: indices do not enter the FieldPathId space; recurse into element types to support `items[0].name -> items.name`.\n if (SchemaAST.isArrays(current)) {\n for (const e of current.elements) {\n visit(e, prefix, seen)\n }\n for (const r of current.rest) {\n visit(r, prefix, seen)\n }\n return\n }\n\n if (SchemaAST.isObjects(current)) {\n for (const ps of current.propertySignatures) {\n const seg = String(ps.name)\n if (!seg) continue\n const next = [...prefix, seg]\n add(next)\n visit(ps.type, next, seen)\n }\n // Index signature (Record<string, T>) can't be enumerated statically: avoid generating misaligned dynamic key paths.\n return\n }\n\n // Any / Unknown / Object / Declaration (open types): cannot enumerate nested paths; stop conservatively.\n }\n\n visit(schema.ast as unknown as SchemaAST.AST, [], new Set())\n return Array.from(byKey.values()).sort(compareFieldPath)\n}\n\nconst buildConvergeIr = (\n stateSchema: Schema.Schema<any>,\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n): ConvergeStaticIrRegistry => {\n const startedAt = nowPerf()\n const generation = 0\n\n const multipleWritersError = collectMultipleWritersError(entries)\n\n const writers = entries.filter((e): e is ConvergeWriter => e.kind === 'computed' || e.kind === 'link')\n\n const writersKey = writers\n .map((entry) => `${entry.kind}:${entry.fieldPath}`)\n .sort()\n .join('|')\n\n const depsKey = writers\n .map((entry) => {\n const deps = getConvergeWriterDeps(entry).slice().sort().join(',')\n const scheduling = (entry.meta as any)?.scheduling === 'deferred' ? 'd' : 'i'\n return `${entry.kind}:${entry.fieldPath}@${scheduling}=>${deps}`\n })\n .sort()\n .join('|')\n\n const writerByPath = new Map<string, ConvergeWriter>()\n for (const entry of writers) {\n writerByPath.set(entry.fieldPath, entry)\n }\n\n const topo = multipleWritersError\n ? { order: [] as ReadonlyArray<string> }\n : writers.length > 0\n ? computeConvergeTopoOrder(writers)\n : { order: [] as ReadonlyArray<string> }\n const stepsById: Array<ConvergeWriter> = topo.configError ? [] : topo.order.map((path) => writerByPath.get(path)!)\n\n const fieldPathTable = new Map<string, FieldPath>()\n const addPath = (path: FieldPath): void => {\n for (let i = 1; i <= path.length; i++) {\n const prefix = path.slice(0, i)\n const key = JSON.stringify(prefix)\n if (!fieldPathTable.has(key)) fieldPathTable.set(key, prefix)\n }\n }\n\n // 065: FieldPathId semantics must cover all enumerable field paths of stateSchema; otherwise reducer patchPaths can't map and will fall back to dirtyAll.\n for (const schemaPath of collectSchemaFieldPaths(stateSchema)) {\n addPath(schemaPath)\n }\n\n for (const entry of writers) {\n const out = normalizeFieldPath(entry.fieldPath)\n if (out) addPath(out)\n for (const dep of getConvergeWriterDeps(entry)) {\n const depPath = normalizeFieldPath(dep)\n if (depPath) addPath(depPath)\n }\n }\n\n const fieldPaths = Array.from(fieldPathTable.values()).sort(compareFieldPath)\n const fieldPathIdRegistry = makeFieldPathIdRegistry(fieldPaths)\n const fieldPathsKey = fnv1a32(stableStringify(fieldPaths))\n const staticIrDigest = getConvergeStaticIrDigest({\n writersKey,\n depsKey,\n fieldPathsKey,\n })\n\n const stepOutFieldPathIdByStepId: Array<FieldPathId> = []\n const stepDepsFieldPathIdsByStepId: Array<ReadonlyArray<FieldPathId>> = []\n const stepSchedulingByStepId: Array<'immediate' | 'deferred'> = []\n\n for (const entry of stepsById) {\n const out = normalizeFieldPath(entry.fieldPath)\n const outId = out != null ? getFieldPathId(fieldPathIdRegistry, out) : undefined\n if (outId == null) {\n throw new Error(`[StateTrait.build] Failed to map converge output fieldPath \"${entry.fieldPath}\" to FieldPathId.`)\n }\n\n const depIds: Array<FieldPathId> = []\n for (const dep of getConvergeWriterDeps(entry)) {\n const depPath = normalizeFieldPath(dep)\n if (!depPath) continue\n const depId = getFieldPathId(fieldPathIdRegistry, depPath)\n if (depId != null) depIds.push(depId)\n }\n\n stepOutFieldPathIdByStepId.push(outId)\n stepDepsFieldPathIdsByStepId.push(depIds)\n stepSchedulingByStepId.push((entry.meta as any)?.scheduling === 'deferred' ? 'deferred' : 'immediate')\n }\n\n const topoOrder = stepsById.map((_, i) => i)\n const buildDurationMs = Math.max(0, nowPerf() - startedAt)\n\n return {\n generation,\n writersKey,\n depsKey,\n fieldPathsKey,\n staticIrDigest,\n fieldPaths,\n fieldPathIdRegistry,\n ...(multipleWritersError ? { configError: multipleWritersError } : topo.configError ? { configError: topo.configError } : null),\n stepsById,\n stepOutFieldPathIdByStepId,\n stepDepsFieldPathIdsByStepId,\n stepSchedulingByStepId,\n topoOrder,\n buildDurationMs,\n }\n}\n\n/**\n * Builds a normalized FieldTrait from a StateTraitEntry.\n *\n * - Currently uses explicit deps for computed/source and link edges; deeper dependency analysis is intentionally not performed.\n * - If we evolve explicit dependency declarations further, extend here.\n */\nconst toFieldTrait = (entry: StateTraitEntry<any, string>): StateTraitFieldTrait => {\n const deps: Array<string> = []\n\n if (entry.kind === 'computed') {\n const meta = entry.meta as any\n const list = meta.deps as ReadonlyArray<string> | undefined\n if (list) deps.push(...list)\n } else if (entry.kind === 'source') {\n const meta = entry.meta as any\n const list = meta.deps as ReadonlyArray<string> | undefined\n if (list) deps.push(...list)\n } else if (entry.kind === 'link') {\n deps.push(entry.meta.from as string)\n } else if (entry.kind === 'check') {\n const meta = entry.meta as any\n const rules = (meta?.rules ?? {}) as Record<string, any>\n for (const name of Object.keys(rules)) {\n const rule = rules[name]\n if (rule && typeof rule === 'object') {\n const list = rule.deps as ReadonlyArray<string> | undefined\n if (list) deps.push(...list)\n }\n }\n }\n\n return {\n fieldId: entry.fieldPath,\n kind: entry.kind,\n // Keep meta identical to Entry.meta at runtime so install can reuse it directly.\n meta: entry.meta as any,\n deps,\n }\n}\n\n/**\n * Builds Field / Node / Edge / Resource sets from normalized entries.\n */\nconst buildGraph = (\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n nodeMetaByFieldPath: ReadonlyMap<string, Meta.TraitMeta>,\n): {\n readonly graph: StateTraitGraph\n readonly plan: StateTraitPlan\n} => {\n const fieldMap = new Map<string, StateTraitField>()\n const nodes: Array<StateTraitGraphNode> = []\n const edges: Array<StateTraitGraphEdge> = []\n const resourcesById = new Map<string, StateTraitResource>()\n const planSteps: Array<StateTraitPlanStep> = []\n\n const ensureField = (fieldPath: string): StateTraitField => {\n let field = fieldMap.get(fieldPath)\n if (!field) {\n field = {\n id: fieldPath,\n path: fieldPath,\n traits: [],\n }\n fieldMap.set(fieldPath, field)\n }\n return field\n }\n\n for (const entry of entries) {\n const fieldPath = entry.fieldPath\n const field = ensureField(fieldPath)\n const trait = toFieldTrait(entry)\n\n ;(field.traits as Array<StateTraitFieldTrait>).push(trait)\n\n // Build Graph edges and Plan steps by kind.\n if (entry.kind === 'computed') {\n const stepId = `computed:${fieldPath}`\n planSteps.push({\n id: stepId,\n kind: 'computed-update',\n targetFieldPath: fieldPath,\n // Note: the current version does not statically analyze computed dependencies; sourceFieldPaths remains empty.\n })\n // If deps is explicitly declared, add Graph edges (for diagnostics / reverse-closure computation).\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps) {\n for (const dep of deps) {\n ensureField(dep)\n edges.push({\n id: `computed:${dep}->${fieldPath}`,\n from: dep,\n to: fieldPath,\n kind: 'computed',\n })\n }\n }\n } else if (entry.kind === 'link') {\n const from = entry.meta.from as string\n ensureField(from)\n\n const edgeId = `link:${from}->${fieldPath}`\n edges.push({\n id: edgeId,\n from,\n to: fieldPath,\n kind: 'link',\n })\n\n planSteps.push({\n id: `link:${fieldPath}`,\n kind: 'link-propagate',\n targetFieldPath: fieldPath,\n sourceFieldPaths: [from],\n debugInfo: {\n graphEdgeId: edgeId,\n },\n })\n } else if (entry.kind === 'source') {\n const resourceId = entry.meta.resource\n const resourceMeta = Meta.sanitize((entry.meta as any).meta)\n\n const existing = resourcesById.get(resourceId)\n if (existing) {\n const ownerFields = [...existing.ownerFields, fieldPath]\n let meta = existing.meta\n let metaOrigin = existing.metaOrigin\n let metaConflicts = existing.metaConflicts\n\n if (resourceMeta) {\n const merged = Meta.mergeCanonical(\n { meta, origin: metaOrigin, conflicts: metaConflicts },\n { origin: fieldPath, meta: resourceMeta },\n )\n meta = merged.meta\n metaOrigin = merged.origin\n metaConflicts = merged.conflicts\n }\n\n resourcesById.set(resourceId, {\n ...existing,\n ownerFields,\n meta,\n metaOrigin,\n metaConflicts,\n })\n } else {\n resourcesById.set(resourceId, {\n resourceId,\n // Use a simple identifier string for now; may evolve into a structured form based on key rules.\n keySelector: `StateTrait.source@${fieldPath}`,\n ownerFields: [fieldPath],\n meta: resourceMeta,\n metaOrigin: resourceMeta ? fieldPath : undefined,\n })\n }\n\n planSteps.push({\n id: `source:${fieldPath}`,\n kind: 'source-refresh',\n targetFieldPath: fieldPath,\n resourceId,\n keySelectorId: `StateTrait.source@${fieldPath}`,\n })\n\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps) {\n for (const dep of deps) {\n ensureField(dep)\n edges.push({\n id: `source-dep:${dep}->${fieldPath}`,\n from: dep,\n to: fieldPath,\n kind: 'source-dep',\n })\n }\n }\n } else if (entry.kind === 'externalStore') {\n planSteps.push({\n id: `external-store:${fieldPath}`,\n kind: 'external-store-sync',\n targetFieldPath: fieldPath,\n })\n } else if (entry.kind === 'check') {\n planSteps.push({\n id: `check:${fieldPath}`,\n kind: 'check-validate',\n targetFieldPath: fieldPath,\n })\n\n // If the rule explicitly declares deps, add Graph edges (for ReverseClosure scoped validate).\n if (trait.deps.length > 0) {\n for (const dep of trait.deps) {\n ensureField(dep)\n edges.push({\n id: `check-dep:${dep}->${fieldPath}`,\n from: dep,\n to: fieldPath,\n kind: 'check-dep',\n })\n }\n }\n }\n }\n\n for (const field of fieldMap.values()) {\n nodes.push({\n id: field.id,\n field,\n traits: field.traits,\n meta: nodeMetaByFieldPath.get(field.id),\n })\n }\n\n const graph: StateTraitGraph = {\n _tag: 'StateTraitGraph',\n nodes,\n edges,\n resources: Array.from(resourcesById.values()),\n }\n\n const plan: StateTraitPlan = {\n _tag: 'StateTraitPlan',\n steps: planSteps,\n }\n\n return { graph, plan }\n}\n\n/**\n * Performs a simple cycle detection for link edges.\n *\n * - Only considers edges with kind = 'link'; computed/source do not participate in the first version.\n * - On cycle detection, throws an error with path context to avoid infinite updates at runtime.\n */\nconst assertNoLinkCycles = (edges: ReadonlyArray<StateTraitGraphEdge>): void => {\n const adjacency = new Map<string, string[]>()\n\n for (const edge of edges) {\n if (edge.kind !== 'link') continue\n const list = adjacency.get(edge.from) ?? []\n list.push(edge.to)\n adjacency.set(edge.from, list)\n }\n\n const visited = new Set<string>()\n const stack = new Set<string>()\n\n const dfs = (node: string): void => {\n if (stack.has(node)) {\n throw new Error(\n `[StateTrait.build] link cycle detected at field \"${node}\". ` +\n 'Please check link traits for circular dependencies.',\n )\n }\n if (visited.has(node)) return\n visited.add(node)\n stack.add(node)\n\n const nexts = adjacency.get(node)\n if (nexts) {\n for (const to of nexts) {\n dfs(to)\n }\n }\n\n stack.delete(node)\n }\n\n for (const node of adjacency.keys()) {\n if (!visited.has(node)) {\n dfs(node)\n }\n }\n}\n\nconst collectSchemaPaths = (\n entries: ReadonlyArray<StateTraitEntry<any, string>>,\n): ReadonlyArray<StateTraitSchemaPathRef> => {\n const byKey = new Map<string, StateTraitSchemaPathRef>()\n\n const add = (ref: StateTraitSchemaPathRef): void => {\n if (!ref.path) return\n const k = `${ref.kind}|${ref.entryKind}|${ref.entryFieldPath}|${ref.ruleName ?? ''}|${ref.path}`\n byKey.set(k, ref)\n }\n\n const getCheckWritebackPath = (entry: Extract<StateTraitEntry<any, string>, { readonly kind: 'check' }>): string => {\n const wb = (entry.meta as any)?.writeback\n const p = wb && typeof wb === 'object' ? (wb as any).path : undefined\n const writebackPath = typeof p === 'string' && p.startsWith('errors.') ? p : undefined\n\n if (writebackPath) return writebackPath\n\n const fieldPath = entry.fieldPath\n if (fieldPath.endsWith('[]')) {\n return `errors.${fieldPath.slice(0, -2)}`\n }\n return `errors.${fieldPath}`\n }\n\n for (const entry of entries) {\n add({\n kind: 'fieldPath',\n entryKind: entry.kind,\n entryFieldPath: entry.fieldPath,\n path: entry.fieldPath,\n })\n\n if (entry.kind === 'computed' || entry.kind === 'source') {\n const deps = ((entry.meta as any)?.deps ?? []) as ReadonlyArray<string>\n for (const dep of deps) {\n add({\n kind: 'dep',\n entryKind: entry.kind,\n entryFieldPath: entry.fieldPath,\n path: dep,\n })\n }\n }\n\n if (entry.kind === 'link') {\n add({\n kind: 'link_from',\n entryKind: 'link',\n entryFieldPath: entry.fieldPath,\n path: entry.meta.from as string,\n })\n }\n\n if (entry.kind === 'check') {\n add({\n kind: 'check_writeback',\n entryKind: 'check',\n entryFieldPath: entry.fieldPath,\n path: getCheckWritebackPath(entry),\n })\n\n const rules = ((entry.meta as any)?.rules ?? {}) as Record<string, any>\n for (const name of Object.keys(rules)) {\n const rule = rules[name]\n if (!rule || typeof rule !== 'object') continue\n const deps = (rule.deps ?? []) as ReadonlyArray<string>\n for (const dep of deps) {\n add({\n kind: 'dep',\n entryKind: 'check',\n entryFieldPath: entry.fieldPath,\n ruleName: name,\n path: dep,\n })\n }\n }\n }\n }\n\n return Array.from(byKey.entries())\n .sort((a, b) => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0))\n .map(([, v]) => v)\n}\n\n/**\n * Builds a StateTraitProgram from the given stateSchema and trait spec.\n *\n * - Pure function: does not depend on external Env / global state.\n * - Current implementation focuses on:\n * - Normalizing Spec into entries.\n * - Building a lightweight Graph / Plan from entries.\n * - Running basic cycle detection for link edges.\n *\n * If we later need finer-grained dependency analysis (e.g. static analysis for computed/key),\n * evolve it inside this module without changing the public API surface.\n */\nexport const build = <S extends object>(\n stateSchema: Schema.Schema<S>,\n spec: StateTraitSpec<S>,\n): StateTraitProgram<S> => {\n const entries = normalizeSpec(spec) as ReadonlyArray<StateTraitEntry<S, string>>\n const nodeMetaByFieldPath = collectNodeMeta(spec)\n\n // Phase 4 (US2): require explicit deps (Graph/diagnostics/replay treat deps as the single dependency source of truth).\n for (const entry of entries) {\n if (entry.kind === 'computed') {\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps === undefined) {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for computed \"${entry.fieldPath}\". ` +\n 'Please use StateTrait.computed({ deps: [...], get: ... }).',\n )\n }\n }\n if (entry.kind === 'source') {\n const deps = (entry.meta as any).deps as ReadonlyArray<string> | undefined\n if (deps === undefined) {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for source \"${entry.fieldPath}\". ` +\n 'Please provide meta.deps for StateTrait.source({ deps: [...], ... }).',\n )\n }\n }\n if (entry.kind === 'check') {\n const rules = ((entry.meta as any)?.rules ?? {}) as Record<string, any>\n for (const name of Object.keys(rules)) {\n const rule = rules[name]\n if (typeof rule === 'function' || !rule || typeof rule !== 'object') {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for check \"${entry.fieldPath}\" rule \"${name}\". ` +\n 'Please use { deps: [...], validate: ... } form.',\n )\n }\n if ((rule as any).deps === undefined) {\n throw new Error(\n `[StateTrait.build] Missing explicit deps for check \"${entry.fieldPath}\" rule \"${name}\". ` +\n 'Please provide deps: [...].',\n )\n }\n }\n }\n }\n\n const { graph, plan } = buildGraph(entries, nodeMetaByFieldPath)\n\n // Run a cycle check for link edges to avoid obvious configuration errors.\n assertNoLinkCycles(graph.edges)\n\n return {\n stateSchema,\n spec,\n entries: entries as ReadonlyArray<StateTraitEntry<any, string>>,\n graph,\n plan,\n convergeIr: buildConvergeIr(stateSchema as any, entries as ReadonlyArray<StateTraitEntry<any, string>>),\n schemaPaths: collectSchemaPaths(entries as ReadonlyArray<StateTraitEntry<any, string>>),\n }\n}\n","import { Effect } from 'effect'\nimport type { ModuleRuntime as PublicModuleRuntime } from './module.js'\nimport type {\n StateTraitProgram,\n TraitConvergeGenerationEvidence,\n TraitConvergePlanCacheEvidence,\n} from '../../state-trait/model.js'\nimport type * as StateTraitConverge from '../../state-trait/converge.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport { setRuntimeInternals } from './runtimeInternalsAccessor.js'\nimport type * as RowId from '../../state-trait/rowid.js'\n\nexport type TraitState = {\n program: StateTraitProgram<any> | undefined\n convergeStaticIrDigest: string | undefined\n convergePlanCache: StateTraitConverge.ConvergePlanCache | undefined\n convergeGeneration: TraitConvergeGenerationEvidence\n pendingCacheMissReason: TraitConvergePlanCacheEvidence['missReason'] | undefined\n /**\n * Number of times the pending miss reason has been updated since the last txn window.\n * Used for generation-thrash self-protection (multiple bumps before any txn runs).\n */\n pendingCacheMissReasonCount: number\n lastConvergeIrKeys: { readonly writersKey: string; readonly depsKey: string } | undefined\n listConfigs: ReadonlyArray<RowId.ListConfig>\n}\n\nexport const installInternalHooks = <S, A>(args: {\n readonly runtime: PublicModuleRuntime<S, A>\n readonly runtimeInternals: RuntimeInternals\n}): Effect.Effect<void, never, never> =>\n Effect.sync(() => {\n const { runtime, runtimeInternals } = args\n\n setRuntimeInternals(runtime as any, runtimeInternals)\n })\n","import { Deferred, Effect, ServiceMap } from 'effect'\n\nexport type RootContextLifecycleState = 'uninitialized' | 'merged' | 'ready' | 'failed'\n\nexport type RootContextLifecycleReasonCode =\n | 'root_context::merge_duplicate'\n | 'root_context::ready_without_merge'\n | 'root_context::ready_after_failed'\n | 'root_context::ready_duplicate'\n\nexport interface RootContextLifecycle {\n readonly state: RootContextLifecycleState\n readonly reasonCode?: RootContextLifecycleReasonCode\n}\n\nexport class RootContextLifecycleError extends Error {\n readonly _tag = 'RootContextLifecycleError'\n\n constructor(\n readonly reasonCode: RootContextLifecycleReasonCode,\n readonly fromState: RootContextLifecycleState,\n message: string,\n ) {\n super(message)\n this.name = 'RootContextLifecycleError'\n }\n}\n\nexport interface RootContext {\n context: ServiceMap.ServiceMap<any> | undefined\n readonly ready: Deferred.Deferred<ServiceMap.ServiceMap<any>, never>\n lifecycle: RootContextLifecycle\n readonly appId?: string\n readonly appModuleIds?: ReadonlyArray<string>\n}\n\nclass RootContextTagImpl extends ServiceMap.Service<RootContextTagImpl, RootContext>()('@logixjs/core/RootContext') {}\n\nexport const RootContextTag = RootContextTagImpl\n\nconst failRootContextTransition = (\n root: RootContext,\n reasonCode: RootContextLifecycleReasonCode,\n message: string,\n): RootContextLifecycleError => {\n const fromState = root.lifecycle.state\n root.lifecycle = { state: 'failed', reasonCode }\n return new RootContextLifecycleError(reasonCode, fromState, message)\n}\n\nexport const makeRootContext = (args?: {\n readonly appId?: string\n readonly appModuleIds?: ReadonlyArray<string>\n}): Effect.Effect<RootContext, never, never> =>\n Effect.gen(function* () {\n const ready = yield* Deferred.make<ServiceMap.ServiceMap<any>>()\n return {\n context: undefined,\n ready,\n lifecycle: { state: 'uninitialized' },\n appId: args?.appId,\n appModuleIds: args?.appModuleIds,\n } satisfies RootContext\n })\n\nexport const mergeRootContext = (\n root: RootContext,\n context: ServiceMap.ServiceMap<any>,\n): Effect.Effect<RootContext, RootContextLifecycleError, never> =>\n Effect.gen(function* () {\n if (root.context !== undefined || root.lifecycle.state !== 'uninitialized') {\n return yield* Effect.fail(\n failRootContextTransition(\n root,\n 'root_context::merge_duplicate',\n '[Logix] RootContext merge duplicated during app assembly.',\n ),\n )\n }\n\n root.context = context\n root.lifecycle = { state: 'merged' }\n return root\n })\n\nexport const readyRootContext = (root: RootContext): Effect.Effect<void, RootContextLifecycleError, never> =>\n Effect.gen(function* () {\n if (root.lifecycle.state === 'failed') {\n return yield* Effect.fail(\n failRootContextTransition(\n root,\n 'root_context::ready_after_failed',\n '[Logix] RootContext ready attempted after lifecycle entered failed state.',\n ),\n )\n }\n\n if (root.context === undefined || root.lifecycle.state === 'uninitialized') {\n return yield* Effect.fail(\n failRootContextTransition(\n root,\n 'root_context::ready_without_merge',\n '[Logix] RootContext ready attempted before merge during app assembly.',\n ),\n )\n }\n\n const readySucceeded = yield* Deferred.succeed(root.ready, root.context)\n if (!readySucceeded || root.lifecycle.state === 'ready') {\n return yield* Effect.fail(\n failRootContextTransition(\n root,\n 'root_context::ready_duplicate',\n '[Logix] RootContext ready was already completed before app assembly finished.',\n ),\n )\n }\n\n root.lifecycle = { state: 'ready' }\n })\n","import { Cause, Deferred, Effect, Fiber, Layer, Option, PubSub, Queue, Ref, ServiceMap, Scope, Stream } from 'effect'\nimport * as Debug from '../DebugSink.js'\nimport { toSerializableErrorSummary } from '../errorSummary.js'\nimport * as TaskRunner from '../TaskRunner.js'\nimport { isDevEnv } from '../env.js'\nimport { getRuntimeInternals } from '../runtimeInternalsAccessor.js'\nimport * as Identity from './identity.js'\nimport * as ProcessConcurrency from './concurrency.js'\nimport * as ProcessEvents from './events.js'\nimport * as Meta from './meta.js'\nimport { resolveSchemaAst } from './selectorSchema.js'\nimport { compileProcessTriggerStartPlan, type ProcessTriggerStartPlan } from './triggerStartPlan.js'\nimport { makeNonPlatformTriggerStreamFactory } from './triggerStreams.js'\nimport { moduleRuntimeTagFromModuleId } from '../../../serviceId.js'\nimport type {\n ProcessControlRequest,\n ProcessDefinition,\n ProcessEvent,\n ProcessInstallation,\n ProcessInstanceIdentity,\n ProcessInstanceStatus,\n ProcessPlatformEvent,\n ProcessScope,\n ProcessTrigger,\n ProcessTriggerSpec,\n SerializableErrorSummary,\n} from './protocol.js'\nimport * as Supervision from './supervision.js'\n\ntype InstallationKey = string\ntype ProcessInstanceId = string\n\ntype ProcessInstallMode = 'switch' | 'exhaust'\n\ntype PlatformEventTriggerSpec = Extract<ProcessTriggerSpec, { readonly kind: 'platformEvent' }>\n\ntype InstallationState = {\n readonly identity: {\n readonly processId: string\n readonly scope: ProcessScope\n }\n readonly scopeKey: string\n readonly definition: ProcessDefinition\n env: ServiceMap.ServiceMap<any>\n forkScope: Scope.Scope\n readonly process: Effect.Effect<void, any, unknown>\n readonly kind: Meta.ProcessMeta['kind']\n readonly platformEventTriggerIndex: ReadonlyMap<string, ReadonlyArray<PlatformEventTriggerSpec>>\n readonly platformEventNames: ReadonlyArray<string>\n readonly startPlan: ProcessTriggerStartPlan\n enabled: boolean\n installedAt?: string\n nextRunSeq: number\n supervision: Supervision.SupervisionState\n currentInstanceId?: ProcessInstanceId\n pendingStart?: { readonly forkScope: Scope.Scope }\n}\n\ntype InstanceState = {\n readonly installationKey: InstallationKey\n readonly processInstanceId: ProcessInstanceId\n readonly identity: ProcessInstanceIdentity\n readonly processId: string\n readonly scope: ProcessScope\n readonly forkScope: Scope.Scope\n readonly platformTriggersQueue: Queue.Queue<ProcessTrigger>\n status: ProcessInstanceStatus\n nextEventSeq: number\n nextTriggerSeq: number\n fiber?: Fiber.Fiber<unknown, unknown>\n}\n\nexport interface ProcessRuntime {\n readonly install: <E, R>(\n process: Effect.Effect<void, E, R>,\n options: {\n readonly scope: ProcessScope\n readonly enabled?: boolean\n readonly installedAt?: string\n readonly mode?: ProcessInstallMode\n },\n ) => Effect.Effect<ProcessInstallation | undefined, never, R>\n readonly listInstallations: (filter?: {\n readonly scopeType?: ProcessScope['type']\n readonly scopeKey?: string\n }) => Effect.Effect<ReadonlyArray<ProcessInstallation>>\n readonly getInstanceStatus: (processInstanceId: string) => Effect.Effect<ProcessInstanceStatus | undefined>\n readonly controlInstance: (processInstanceId: string, request: ProcessControlRequest) => Effect.Effect<void>\n readonly deliverPlatformEvent: (event: ProcessPlatformEvent) => Effect.Effect<void>\n readonly events: Stream.Stream<ProcessEvent>\n readonly getEventsSnapshot: () => Effect.Effect<ReadonlyArray<ProcessEvent>>\n}\n\nexport class ProcessRuntimeTag extends ServiceMap.Service<ProcessRuntimeTag, ProcessRuntime>()('@logixjs/core/ProcessRuntime') {}\n\nconst currentProcessTrigger = ServiceMap.Reference<ProcessTrigger | undefined>('@logixjs/core/ProcessRuntime.currentTrigger', {\n defaultValue: () => undefined,\n})\nconst currentProcessEventBudget = ServiceMap.Reference<Ref.Ref<ProcessEvents.ProcessRunEventBudgetState> | undefined>(\n '@logixjs/core/ProcessRuntime.currentEventBudget',\n {\n defaultValue: () => undefined,\n },\n)\nconst RUNTIME_BOOT_EVENT = 'runtime:boot' as const\nconst PROCESS_EVENT_HISTORY_MAX_CAPACITY = 0xffff_fffe\n\nconst deriveDebugModuleId = (processId: string): string => `process:${processId}`\n\ntype ProcessDispatchPayload = NonNullable<ProcessEvent['dispatch']>\n\nconst deriveTxnAnchor = (event: ProcessEvent): { readonly txnSeq?: number; readonly txnId?: string } => {\n const trigger: any = event.trigger\n if (!trigger) return {}\n if (\n (trigger.kind === 'moduleAction' || trigger.kind === 'moduleStateChange') &&\n typeof trigger.instanceId === 'string' &&\n typeof trigger.txnSeq === 'number' &&\n Number.isFinite(trigger.txnSeq) &&\n trigger.txnSeq >= 1\n ) {\n const txnSeq = Math.floor(trigger.txnSeq)\n return {\n txnSeq,\n txnId: `${trigger.instanceId}::t${txnSeq}`,\n }\n }\n return {}\n}\n\ntype ProcessTriggerChainKernel = {\n readonly assignTriggerSeq: (trigger: ProcessTrigger) => ProcessTrigger\n readonly run: (trigger: ProcessTrigger, fatal: Deferred.Deferred<Cause.Cause<any>>) => Effect.Effect<void>\n readonly onDrop: (trigger: ProcessTrigger) => Effect.Effect<void>\n}\n\nconst makeProcessTriggerChainKernel = (args: {\n readonly shouldRecordChainEvents: boolean\n readonly nextTriggerSeq: () => number\n readonly makeBudgetState: (trigger: ProcessTrigger) => ProcessEvents.ProcessRunEventBudgetState\n readonly emitTriggerEvent: (trigger: ProcessTrigger, severity: ProcessEvent['severity']) => Effect.Effect<void>\n readonly runWithoutChainBudget: (\n trigger: ProcessTrigger,\n fatal: Deferred.Deferred<Cause.Cause<any>>,\n ) => Effect.Effect<void>\n}): ProcessTriggerChainKernel => {\n const assignTriggerSeq = (trigger: ProcessTrigger): ProcessTrigger => {\n if (!args.shouldRecordChainEvents) {\n return trigger\n }\n return {\n ...trigger,\n triggerSeq: args.nextTriggerSeq(),\n }\n }\n\n const run = (trigger: ProcessTrigger, fatal: Deferred.Deferred<Cause.Cause<any>>): Effect.Effect<void> => {\n if (!args.shouldRecordChainEvents) {\n return args.runWithoutChainBudget(trigger, fatal)\n }\n\n return Effect.gen(function* () {\n const budgetRef = yield* Ref.make(args.makeBudgetState(trigger))\n return yield* Effect.provideService(args.emitTriggerEvent(trigger, 'info').pipe(Effect.flatMap(() => args.runWithoutChainBudget(trigger, fatal))), currentProcessEventBudget, budgetRef)\n })\n }\n\n const onDrop = (trigger: ProcessTrigger): Effect.Effect<void> => args.emitTriggerEvent(trigger, 'warning')\n\n return {\n assignTriggerSeq,\n run,\n onDrop,\n }\n}\n\nconst shouldNoopDueToSyncTxn = (scope: ProcessScope, kind: string): Effect.Effect<boolean, never, never> => {\n const moduleId = scope.type === 'moduleInstance' ? scope.moduleId : undefined\n const instanceId = scope.type === 'moduleInstance' ? scope.instanceId : undefined\n return Effect.provideService(\n TaskRunner.shouldNoopInSyncTransactionFiber({\n moduleId,\n instanceId,\n code: 'process::invalid_usage',\n severity: 'error',\n message:\n 'ProcessRuntime scheduling is not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint:\n \"Trigger/schedule Process outside the transaction window (e.g. in a watcher's run section or a separate fiber); \" +\n 'do not trigger Process directly inside a reducer / synchronous transaction body.',\n kind,\n }),\n TaskRunner.inSyncTransactionFiber,\n false,\n )\n}\n\nconst resolveRuntimeStateSchemaAst = (runtime: unknown): ReturnType<typeof resolveSchemaAst> => {\n try {\n const internals = getRuntimeInternals(runtime as any)\n return resolveSchemaAst(internals.stateSchema)\n } catch {\n return undefined\n }\n}\n\nconst withModuleHint = (error: Error, moduleId: string): Error => {\n const hint = (error as any).hint\n if (typeof hint === 'string' && hint.length > 0) {\n if (!hint.includes('moduleId=')) {\n ;(error as any).hint = `moduleId=${moduleId}\\n${hint}`\n }\n return error\n }\n ;(error as any).hint = `moduleId=${moduleId}`\n return error\n}\n\nconst actionIdFromUnknown = (action: unknown): string | undefined => {\n if (!action || typeof action !== 'object') return undefined\n const anyAction = action as any\n if (typeof anyAction._tag === 'string' && anyAction._tag.length > 0) return anyAction._tag\n if (typeof anyAction.type === 'string' && anyAction.type.length > 0) return anyAction.type\n return undefined\n}\n\nconst buildPlatformEventTriggerIndex = (\n definition: ProcessDefinition,\n): ReadonlyMap<string, ReadonlyArray<PlatformEventTriggerSpec>> => {\n const index = new Map<string, PlatformEventTriggerSpec[]>()\n for (const trigger of definition.triggers) {\n if (trigger.kind !== 'platformEvent') continue\n const current = index.get(trigger.platformEvent)\n if (current) {\n current.push(trigger)\n } else {\n index.set(trigger.platformEvent, [trigger])\n }\n }\n return index\n}\n\nconst syncPlatformEventInstallations = (options: {\n installationKey: InstallationKey,\n previousEventNames: ReadonlyArray<string>\n nextTriggerIndex: ReadonlyMap<string, ReadonlyArray<PlatformEventTriggerSpec>>\n installationsByEventName: Map<string, Set<InstallationKey>>\n}): ReadonlyArray<string> => {\n for (const eventName of options.previousEventNames) {\n const current = options.installationsByEventName.get(eventName)\n if (!current) continue\n current.delete(options.installationKey)\n if (current.size === 0) {\n options.installationsByEventName.delete(eventName)\n }\n }\n\n const nextEventNames = Array.from(options.nextTriggerIndex.keys())\n for (const eventName of nextEventNames) {\n const current = options.installationsByEventName.get(eventName)\n if (current) {\n current.add(options.installationKey)\n } else {\n options.installationsByEventName.set(eventName, new Set([options.installationKey]))\n }\n }\n\n return nextEventNames\n}\n\nexport const make = (options?: {\n readonly maxEventHistory?: number\n}): Effect.Effect<ProcessRuntime, never, Scope.Scope> =>\n Effect.gen(function* () {\n const runtimeScope = yield* Effect.scope\n const requestedMaxEventHistory =\n typeof options?.maxEventHistory === 'number' &&\n Number.isFinite(options.maxEventHistory) &&\n options.maxEventHistory >= 0\n ? Math.floor(options.maxEventHistory)\n : 500\n const maxEventHistory = Math.min(requestedMaxEventHistory, PROCESS_EVENT_HISTORY_MAX_CAPACITY)\n\n const installations = new Map<InstallationKey, InstallationState>()\n const installationsByPlatformEvent = new Map<string, Set<InstallationKey>>()\n const instances = new Map<ProcessInstanceId, InstanceState>()\n\n const eventHistoryCapacity = maxEventHistory > 0 ? maxEventHistory : 0\n const eventHistoryRing: ProcessEvent[] = []\n let eventHistoryStart = 0\n let eventHistorySize = 0\n const eventsHub = yield* PubSub.sliding<ProcessEvent>(Math.max(1, Math.min(2048, maxEventHistory)))\n\n const appendEventHistory = (event: ProcessEvent): void => {\n if (eventHistoryCapacity <= 0) {\n eventHistorySize = 0\n eventHistoryStart = 0\n return\n }\n\n if (eventHistorySize < eventHistoryCapacity) {\n const writeIndex = (eventHistoryStart + eventHistorySize) % eventHistoryCapacity\n if (writeIndex === eventHistoryRing.length) {\n eventHistoryRing.push(event)\n } else {\n eventHistoryRing[writeIndex] = event\n }\n eventHistorySize += 1\n return\n }\n\n eventHistoryRing[eventHistoryStart] = event\n eventHistoryStart = (eventHistoryStart + 1) % eventHistoryCapacity\n }\n\n const snapshotEventHistory = (): ReadonlyArray<ProcessEvent> => {\n if (eventHistoryCapacity <= 0 || eventHistorySize === 0) {\n return []\n }\n\n const snapshot = new Array<ProcessEvent>(eventHistorySize)\n for (let index = 0; index < eventHistorySize; index += 1) {\n const ringIndex = (eventHistoryStart + index) % eventHistoryCapacity\n snapshot[index] = eventHistoryRing[ringIndex] as ProcessEvent\n }\n return snapshot\n }\n\n const recordDebugEvent = (event: ProcessEvent): Effect.Effect<void> =>\n Effect.gen(function* () {\n const diagnosticsLevel = yield* Effect.service(Debug.currentDiagnosticsLevel).pipe(Effect.orDie)\n\n // diagnostics=off: avoid entering Debug sinks (near-zero cost); error cases are exposed via diagnostic events.\n if (diagnosticsLevel === 'off') {\n return\n }\n\n const processId = event.identity.identity.processId\n const processInstanceId = Identity.processInstanceIdFromIdentity(event.identity)\n const moduleId = deriveDebugModuleId(processId)\n const { txnSeq, txnId } = deriveTxnAnchor(event)\n\n yield* Debug.record({\n type: event.type,\n moduleId,\n instanceId: processInstanceId,\n identity: event.identity,\n severity: event.severity,\n eventSeq: event.eventSeq,\n timestampMs: event.timestampMs,\n trigger: event.trigger,\n dispatch: event.dispatch,\n error: event.error,\n budgetEnvelope: (event as any).budgetEnvelope,\n degrade: (event as any).degrade,\n txnSeq,\n txnId,\n } as any)\n })\n\n const publishEvent = (event: ProcessEvent): Effect.Effect<void> =>\n Effect.gen(function* () {\n appendEventHistory(event)\n yield* PubSub.publish(eventsHub, event)\n yield* recordDebugEvent(event)\n })\n\n const emit = (event: ProcessEvent): Effect.Effect<void> =>\n Effect.gen(function* () {\n const budgetRef = yield* Effect.service(currentProcessEventBudget).pipe(Effect.orDie)\n if (budgetRef) {\n const decision = yield* Ref.modify(budgetRef, (state) => {\n const [nextDecision, nextState] = ProcessEvents.applyProcessRunEventBudget(state, event)\n return [nextDecision, nextState] as const\n })\n\n if (decision._tag === 'emit' || decision._tag === 'emitSummary') {\n yield* publishEvent(decision.event)\n }\n return\n }\n\n const enforced = ProcessEvents.enforceProcessEventMaxBytes(event)\n yield* publishEvent(enforced.event)\n })\n\n const emitErrorDiagnostic = (\n scope: ProcessScope,\n processId: string,\n code: string,\n message: string,\n hint?: string,\n ): Effect.Effect<void> => {\n if (!isDevEnv()) {\n return Effect.void\n }\n const moduleId = scope.type === 'moduleInstance' ? scope.moduleId : undefined\n const instanceId = scope.type === 'moduleInstance' ? scope.instanceId : undefined\n return Debug.record({\n type: 'diagnostic',\n moduleId,\n instanceId,\n code,\n severity: 'error',\n message,\n hint,\n actionTag: processId,\n kind: 'process_runtime',\n })\n }\n\n const resolveMissingDependencies = (installation: InstallationState): ReadonlyArray<string> => {\n const requires = installation.startPlan.dependencyModuleIds\n if (requires.length === 0) return []\n\n const missing: string[] = []\n for (const dep of requires) {\n if (typeof dep !== 'string' || dep.length === 0) continue\n\n const tag = moduleRuntimeTagFromModuleId(dep)\n const found = ServiceMap.getOption(installation.env, tag)\n if (Option.isNone(found)) {\n missing.push(dep)\n }\n }\n return missing\n }\n\n const buildModuleRuntimeRegistry = (\n installation: InstallationState,\n env: ServiceMap.ServiceMap<any>,\n ): ReadonlyMap<string, unknown> => {\n const registry = new Map<string, unknown>()\n for (const moduleId of installation.startPlan.dependencyModuleIds) {\n if (typeof moduleId !== 'string' || moduleId.length === 0 || registry.has(moduleId)) continue\n const tag = moduleRuntimeTagFromModuleId(moduleId)\n const found = ServiceMap.getOption(env, tag)\n if (Option.isSome(found)) {\n registry.set(moduleId, found.value)\n }\n }\n return registry\n }\n\n const stopInstance = (\n instance: InstanceState,\n reason: ProcessInstanceStatus['stoppedReason'],\n ): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (\n instance.status.status === 'stopped' ||\n instance.status.status === 'failed' ||\n instance.status.status === 'stopping'\n ) {\n return\n }\n\n const fiber = instance.fiber\n instance.status = {\n ...instance.status,\n status: 'stopping',\n stoppedReason: reason,\n }\n\n if (fiber) {\n yield* Fiber.interrupt(fiber)\n }\n\n yield* Queue.shutdown(instance.platformTriggersQueue)\n\n instance.status = {\n ...instance.status,\n status: 'stopped',\n stoppedReason: reason,\n }\n instance.fiber = undefined\n\n const evt: ProcessEvent = {\n type: 'process:stop',\n identity: instance.identity,\n severity: 'info',\n eventSeq: instance.nextEventSeq++,\n timestampMs: Date.now(),\n }\n yield* emit(evt)\n\n const installation = installations.get(instance.installationKey)\n if (installation?.pendingStart) {\n installation.pendingStart = undefined\n yield* startInstallation(instance.installationKey)\n }\n })\n\n const startInstallation: (installationKey: InstallationKey) => Effect.Effect<void, never, never> = (installationKey) =>\n Effect.gen(function* () {\n const installation = installations.get(installationKey)\n if (!installation) return\n installation.pendingStart = undefined\n\n const noop = yield* shouldNoopDueToSyncTxn(installation.identity.scope, 'process_start_in_transaction')\n if (noop) return\n\n // Do not start again if an active instance already exists.\n const currentId = installation.currentInstanceId\n if (currentId) {\n const current = instances.get(currentId)\n if (current && (current.status.status === 'running' || current.status.status === 'starting')) {\n return\n }\n }\n\n const runSeq = installation.nextRunSeq++\n const identity: ProcessInstanceIdentity = {\n identity: installation.identity,\n runSeq,\n }\n const processInstanceId = Identity.processInstanceIdFromIdentity(identity)\n\n const platformTriggersQueue = yield* Queue.sliding<ProcessTrigger>(64)\n\n const instanceState: InstanceState = {\n installationKey,\n processInstanceId,\n identity,\n processId: installation.identity.processId,\n scope: installation.identity.scope,\n forkScope: installation.forkScope,\n platformTriggersQueue,\n status: {\n identity,\n status: 'starting',\n },\n nextEventSeq: 1,\n nextTriggerSeq: 1,\n }\n\n instances.set(processInstanceId, instanceState)\n installation.currentInstanceId = processInstanceId\n\n // When forkScope is disposed (e.g. uiSubtree unmount), ensure the instance transitions to stopped and emits a stop event.\n // - Do not rely on unstable \"whether interruption reaches catchAllCause\" behavior.\n // - Do not double-register on runtimeScope; the runtime finalizer already stops all instances.\n if (installation.forkScope !== runtimeScope) {\n yield* Scope.addFinalizer(\n installation.forkScope as Scope.Closeable,\n Effect.suspend(() => {\n const status = instanceState.status.status\n if (status === 'stopped' || status === 'failed' || status === 'stopping') {\n return Effect.void\n }\n return stopInstance(instanceState, 'scopeDisposed')\n }).pipe(Effect.catchCause(() => Effect.void)),\n )\n }\n\n // start event: indicates the instance has entered the start flow (fiber has been forked).\n yield* emit({\n type: 'process:start',\n identity,\n severity: 'info',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n })\n\n const missing = resolveMissingDependencies(installation)\n if (missing.length > 0) {\n const hint = isDevEnv()\n ? [\n 'Strict scope dependency resolution: missing required modules in the current scope.',\n `missing: ${missing.join(', ')}`,\n '',\n 'fix:',\n '- Provide the missing module implementation(s) in the same scope via imports.',\n ` Example: RootModule.implement({ imports: [${missing[0]}.implement(...).impl], processes: [...] })`,\n '- Do not rely on cross-scope fallbacks / guessing instances.',\n ].join('\\n')\n : undefined\n\n const error: SerializableErrorSummary = {\n message: `Missing dependencies in scope: ${missing.join(', ')}`,\n code: 'process::missing_dependency',\n hint,\n }\n\n instanceState.status = {\n ...instanceState.status,\n status: 'failed',\n stoppedReason: 'failed',\n lastError: error,\n }\n\n yield* emit({\n type: 'process:error',\n identity,\n severity: 'error',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error,\n })\n\n yield* emitErrorDiagnostic(\n installation.identity.scope,\n installation.identity.processId,\n 'process::missing_dependency',\n error.message,\n hint,\n )\n return\n }\n\n const shouldRecordChainEvents = installation.definition.diagnosticsLevel !== 'off'\n\n const baseEnv = installation.env\n const moduleRuntimeRegistry = buildModuleRuntimeRegistry(installation, baseEnv)\n\n const makeWrappedEnv = (): ServiceMap.ServiceMap<any> => {\n if (!shouldRecordChainEvents) {\n return baseEnv\n }\n\n const requires = installation.startPlan.dispatchTracingModuleIds\n if (requires.length === 0) {\n return baseEnv\n }\n\n let nextEnv = baseEnv\n\n for (const moduleId of requires) {\n if (typeof moduleId !== 'string' || moduleId.length === 0) continue\n const tag = moduleRuntimeTagFromModuleId(moduleId)\n const found = ServiceMap.getOption(baseEnv, tag)\n if (Option.isNone(found)) continue\n const runtime = found.value as any\n\n const recordDispatch = (action: unknown) =>\n Effect.gen(function* () {\n const trigger = yield* Effect.service(currentProcessTrigger).pipe(Effect.orDie)\n if (!trigger) return\n\n const actionId = actionIdFromUnknown(action) ?? 'unknown'\n const dispatchModuleId = typeof runtime.moduleId === 'string' ? runtime.moduleId : moduleId\n const dispatchInstanceId = typeof runtime.instanceId === 'string' ? runtime.instanceId : 'unknown'\n\n yield* emit(\n makeDispatchEvent(trigger, {\n moduleId: dispatchModuleId,\n instanceId: dispatchInstanceId,\n actionId,\n }),\n )\n })\n\n const wrapped = {\n ...runtime,\n dispatch: (action: unknown) => runtime.dispatch(action).pipe(Effect.tap(() => recordDispatch(action))),\n dispatchLowPriority: (action: unknown) =>\n runtime.dispatchLowPriority(action).pipe(Effect.tap(() => recordDispatch(action))),\n dispatchBatch: (actions: ReadonlyArray<unknown>) =>\n runtime\n .dispatchBatch(actions)\n .pipe(Effect.tap(() => Effect.forEach(actions, recordDispatch, { discard: true }))),\n }\n\n nextEnv = ServiceMap.add(nextEnv, tag, wrapped)\n }\n\n return nextEnv\n }\n\n const wrappedEnv = makeWrappedEnv()\n const providedProcess = Effect.provide(installation.process, wrappedEnv)\n\n const nextProcessEventMeta = () => ({\n identity,\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n })\n\n const makeDispatchEvent = (\n trigger: ProcessTrigger,\n dispatch: ProcessDispatchPayload,\n ): ProcessEvent => ({\n type: 'process:dispatch',\n trigger,\n dispatch,\n severity: 'info',\n ...nextProcessEventMeta(),\n })\n\n const makeTriggerEvent = (\n trigger: ProcessTrigger,\n severity: ProcessEvent['severity'],\n error?: SerializableErrorSummary,\n ): ProcessEvent => ({\n type: 'process:trigger',\n trigger,\n severity,\n ...(error ? { error } : null),\n ...nextProcessEventMeta(),\n })\n\n const makeTriggerStream = makeNonPlatformTriggerStreamFactory({\n moduleRuntimeRegistry,\n shouldRecordChainEvents,\n actionIdFromUnknown,\n resolveRuntimeStateSchemaAst,\n withModuleHint,\n emitSelectorWarning: (trigger, warning) => emit(makeTriggerEvent(trigger, 'warning', warning)),\n })\n\n const makeRun = (trigger: ProcessTrigger, fatal: Deferred.Deferred<Cause.Cause<any>>): Effect.Effect<void> =>\n Effect.provideService(providedProcess.pipe(\n Effect.catchCause((cause) => {\n if (Cause.hasInterruptsOnly(cause)) {\n return Effect.void\n }\n return Deferred.succeed(fatal, cause).pipe(\n Effect.asVoid,\n Effect.catch(() => Effect.void),\n )\n }),\n ), currentProcessTrigger, trigger)\n\n const emitTriggerEvent = (trigger: ProcessTrigger, severity: ProcessEvent['severity']): Effect.Effect<void> => {\n if (!shouldRecordChainEvents) {\n return Effect.void\n }\n\n return emit(makeTriggerEvent(trigger, severity))\n }\n const triggerChainKernel = makeProcessTriggerChainKernel({\n shouldRecordChainEvents,\n nextTriggerSeq: () => instanceState.nextTriggerSeq++,\n makeBudgetState: (trigger) =>\n ProcessEvents.makeProcessRunEventBudgetState({\n runId: ProcessEvents.makeProcessRunBudgetRunId(identity, trigger),\n }),\n emitTriggerEvent,\n runWithoutChainBudget: makeRun,\n })\n\n const policy = installation.definition.concurrency\n const bootTrigger = installation.startPlan.bootTrigger\n\n const streamReady = yield* Deferred.make<void>()\n const markStreamReady: Effect.Effect<void> = Deferred.succeed(streamReady, undefined).pipe(Effect.asVoid)\n\n const instanceProgram = Effect.gen(function* () {\n const fatal = yield* Deferred.make<Cause.Cause<any>>()\n\n const platformEventStream: Stream.Stream<ProcessTrigger> = Stream.fromQueue(\n instanceState.platformTriggersQueue,\n )\n\n const nonPlatformTriggers = installation.startPlan.nonPlatformTriggers\n\n const streams = yield* Effect.forEach(nonPlatformTriggers, makeTriggerStream)\n\n const triggerStream = Stream.mergeAll([platformEventStream, ...streams], {\n concurrency: 'unbounded',\n })\n\n const reportQueueOverflow = (\n info: ProcessConcurrency.ProcessTriggerQueueOverflowInfo,\n ): Effect.Effect<void> => {\n const err = new Error('Process trigger queue overflow (serial maxQueue guard).')\n ;(err as any).code = 'process::serial_queue_overflow'\n ;(err as any).hint = [\n `mode=${info.mode}`,\n `queue: current=${info.currentLength} peak=${info.peak}`,\n `maxQueue: configured=${info.limit.configured} guard=${info.limit.guard}`,\n `policy: ${JSON.stringify(info.policy)}`,\n '',\n 'fix:',\n '- Configure concurrency.maxQueue (serial) to a finite value, or switch to mode=latest/drop to avoid unbounded backlog.',\n ].join('\\n')\n return Deferred.succeed(fatal, Cause.fail(err)).pipe(\n Effect.asVoid,\n Effect.catch(() => Effect.void),\n )\n }\n\n const runnerFiber = yield* Effect.forkScoped(\n ProcessConcurrency.runProcessTriggerStream({\n stream: triggerStream,\n policy,\n assignTriggerSeq: triggerChainKernel.assignTriggerSeq,\n run: (trigger) => triggerChainKernel.run(trigger, fatal),\n onDrop: triggerChainKernel.onDrop,\n onQueueOverflow: reportQueueOverflow,\n }),\n )\n\n // Ensure the trigger stream fiber has started pulling, otherwise moduleAction/moduleStateChange events\n // may be published before any subscriber exists (PubSub streams drop events without subscribers).\n //\n // We rely on the fiber reaching \"Suspended\" (typically blocked on a queue take) as a proxy that the\n // subscription has been established.\n for (let i = 0; i < 64; i++) {\n const exitOpt = yield* Fiber.await(runnerFiber).pipe(Effect.timeoutOption(0))\n if (Option.isSome(exitOpt)) {\n break\n }\n yield* Effect.yieldNow\n }\n yield* markStreamReady\n\n if (bootTrigger) {\n yield* Queue.offer(instanceState.platformTriggersQueue, bootTrigger)\n }\n\n const cause = yield* Deferred.await(fatal)\n yield* Fiber.interrupt(runnerFiber)\n return yield* Effect.failCause(cause)\n })\n\n const processFiberEffect = Effect.provideService(\n Effect.scoped(instanceProgram).pipe(\n Effect.ensuring(markStreamReady),\n Effect.catchCause((cause) =>\n Effect.gen(function* () {\n if (Cause.hasInterruptsOnly(cause)) {\n if (instanceState.status.status === 'stopping') {\n return\n }\n\n instanceState.status = {\n ...instanceState.status,\n status: 'stopped',\n stoppedReason: 'scopeDisposed',\n }\n instanceState.fiber = undefined\n\n yield* Effect.uninterruptible(\n emit({\n type: 'process:stop',\n identity,\n severity: 'info',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n }),\n )\n\n const pendingInstallation = installations.get(installationKey)\n if (pendingInstallation?.pendingStart) {\n pendingInstallation.pendingStart = undefined\n yield* startInstallation(installationKey)\n }\n return\n }\n\n const primary = Option.getOrElse(Cause.findErrorOption(cause), () => {\n const defects = cause.reasons.filter(Cause.isDieReason).map((reason) => reason.defect)\n return defects[0] ?? cause\n })\n const summary = toSerializableErrorSummary(primary)\n const error: SerializableErrorSummary = summary.errorSummary as any\n\n instanceState.status = {\n ...instanceState.status,\n status: 'failed',\n stoppedReason: 'failed',\n lastError: error,\n }\n\n yield* emit({\n type: 'process:error',\n identity,\n severity: 'error',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error,\n })\n\n const decision = Supervision.onFailure(\n installation.definition.errorPolicy,\n installation.supervision,\n Date.now(),\n )\n installation.supervision = decision.nextState\n\n if (decision.decision === 'restart') {\n yield* emit({\n type: 'process:restart',\n identity,\n severity: 'warning',\n eventSeq: instanceState.nextEventSeq++,\n timestampMs: Date.now(),\n error,\n })\n yield* startInstallation(installationKey)\n } else {\n yield* emitErrorDiagnostic(\n installation.identity.scope,\n installation.identity.processId,\n 'process::failed_stop',\n 'Process failed and stopped (failStop / restart limit reached).',\n `processId=${installation.identity.processId} scopeKey=${installation.scopeKey} failures=${decision.withinWindowFailures} maxRestarts=${decision.maxRestarts}`,\n )\n }\n }),\n ),\n ),\n TaskRunner.inSyncTransactionFiber,\n false,\n )\n\n const fiber = yield* Effect.forkIn(processFiberEffect, installation.forkScope)\n\n instanceState.fiber = fiber as Fiber.Fiber<unknown, unknown>\n instanceState.status = {\n ...instanceState.status,\n status: 'running',\n }\n\n // Hard guarantee: block until trigger subscriptions are acquired (or the instance fiber failed early).\n yield* Deferred.await(streamReady)\n })\n\n const install = <E, R>(\n process: Effect.Effect<void, E, R>,\n options: {\n readonly scope: ProcessScope\n readonly enabled?: boolean\n readonly installedAt?: string\n readonly mode?: ProcessInstallMode\n },\n ): Effect.Effect<ProcessInstallation | undefined, never, R> =>\n Effect.gen(function* () {\n const meta = Meta.getMeta(process)\n if (!meta) {\n return undefined\n }\n\n const env = yield* Effect.services<R>()\n const forkScopeOpt = yield* Effect.serviceOption(Scope.Scope)\n const forkScope = Option.isSome(forkScopeOpt) ? forkScopeOpt.value : runtimeScope\n\n const scopeKey = Identity.scopeKeyFromScope(options.scope)\n const identity = {\n processId: meta.definition.processId,\n scope: options.scope,\n } as const\n\n const installationKey = Identity.installationKeyFromIdentity(identity)\n const derived = Effect.suspend(() => process)\n Meta.attachMeta(derived, {\n ...meta,\n installationScope: options.scope,\n })\n\n const nextPlatformEventTriggerIndex = buildPlatformEventTriggerIndex(meta.definition)\n const nextStartPlan = compileProcessTriggerStartPlan(meta.definition)\n const existing = installations.get(installationKey)\n if (existing) {\n const updated: InstallationState = {\n ...existing,\n definition: meta.definition,\n env: env as ServiceMap.ServiceMap<any>,\n forkScope,\n process: derived as unknown as Effect.Effect<void, any, unknown>,\n kind: meta.kind ?? 'process',\n platformEventTriggerIndex: nextPlatformEventTriggerIndex,\n platformEventNames: syncPlatformEventInstallations({\n installationKey,\n previousEventNames: existing.platformEventNames,\n nextTriggerIndex: nextPlatformEventTriggerIndex,\n installationsByEventName: installationsByPlatformEvent,\n }),\n startPlan: nextStartPlan,\n enabled: options.enabled ?? existing.enabled,\n installedAt: options.installedAt ?? existing.installedAt,\n }\n installations.set(installationKey, updated)\n\n if (!updated.enabled) {\n updated.pendingStart = undefined\n return {\n identity,\n enabled: updated.enabled,\n installedAt: updated.installedAt,\n } satisfies ProcessInstallation\n }\n\n const currentId = updated.currentInstanceId\n const current = currentId ? instances.get(currentId) : undefined\n const status = current?.status.status\n\n if (status === 'running' || status === 'starting') {\n const mode: ProcessInstallMode = options.mode ?? 'switch'\n if (mode === 'switch' && current && current.forkScope !== forkScope) {\n updated.pendingStart = { forkScope }\n yield* Scope.addFinalizer(\n forkScope,\n Effect.sync(() => {\n const installation = installations.get(installationKey)\n if (!installation) return\n if (installation.pendingStart?.forkScope === forkScope) {\n installation.pendingStart = undefined\n }\n }),\n )\n } else {\n updated.pendingStart = undefined\n }\n return {\n identity,\n enabled: updated.enabled,\n installedAt: updated.installedAt,\n } satisfies ProcessInstallation\n }\n\n if (status === 'stopping') {\n const mode: ProcessInstallMode = options.mode ?? 'switch'\n if (mode === 'switch') {\n updated.pendingStart = { forkScope }\n yield* Scope.addFinalizer(\n forkScope,\n Effect.sync(() => {\n const installation = installations.get(installationKey)\n if (!installation) return\n if (installation.pendingStart?.forkScope === forkScope) {\n installation.pendingStart = undefined\n }\n }),\n )\n } else {\n updated.pendingStart = undefined\n }\n return {\n identity,\n enabled: updated.enabled,\n installedAt: updated.installedAt,\n } satisfies ProcessInstallation\n }\n\n updated.pendingStart = undefined\n yield* startInstallation(installationKey)\n return {\n identity,\n enabled: updated.enabled,\n installedAt: updated.installedAt,\n } satisfies ProcessInstallation\n }\n\n const nextPlatformEventNames = syncPlatformEventInstallations({\n installationKey,\n previousEventNames: [],\n nextTriggerIndex: nextPlatformEventTriggerIndex,\n installationsByEventName: installationsByPlatformEvent,\n })\n\n const installation: InstallationState = {\n identity,\n scopeKey,\n definition: meta.definition,\n env: env as ServiceMap.ServiceMap<any>,\n forkScope,\n process: derived as unknown as Effect.Effect<void, any, unknown>,\n kind: meta.kind ?? 'process',\n platformEventTriggerIndex: nextPlatformEventTriggerIndex,\n platformEventNames: nextPlatformEventNames,\n startPlan: nextStartPlan,\n enabled: options.enabled ?? true,\n installedAt: options.installedAt,\n nextRunSeq: 1,\n supervision: Supervision.initialState(),\n pendingStart: undefined,\n }\n\n installations.set(installationKey, installation)\n\n if (installation.enabled) {\n yield* startInstallation(installationKey)\n }\n\n return {\n identity,\n enabled: installation.enabled,\n installedAt: installation.installedAt,\n } satisfies ProcessInstallation\n })\n\n const listInstallations: ProcessRuntime['listInstallations'] = (filter) =>\n Effect.sync(() => {\n const scopeType = filter?.scopeType\n const scopeKey = filter?.scopeKey\n const out: ProcessInstallation[] = []\n for (const installation of installations.values()) {\n if (scopeType && installation.identity.scope.type !== scopeType) continue\n if (scopeKey && installation.scopeKey !== scopeKey) continue\n out.push({\n identity: installation.identity,\n enabled: installation.enabled,\n installedAt: installation.installedAt,\n })\n }\n return out\n })\n\n const getInstanceStatus: ProcessRuntime['getInstanceStatus'] = (processInstanceId) =>\n Effect.sync(() => instances.get(processInstanceId)?.status)\n\n const controlInstance: ProcessRuntime['controlInstance'] = (processInstanceId, request) =>\n Effect.suspend(() => {\n const instance = instances.get(processInstanceId)\n if (!instance) {\n return Effect.void\n }\n\n return shouldNoopDueToSyncTxn(instance.scope, 'process_control_in_transaction').pipe(\n Effect.flatMap((noop) => {\n if (noop) {\n return Effect.void\n }\n\n if (request.action === 'stop') {\n return stopInstance(instance, 'manualStop')\n }\n\n if (request.action === 'restart') {\n return stopInstance(instance, 'manualStop').pipe(\n Effect.flatMap(() => {\n const installation = installations.get(instance.installationKey)\n if (!installation) {\n return Effect.void\n }\n installation.currentInstanceId = undefined\n return startInstallation(instance.installationKey)\n }),\n )\n }\n\n // start: only applies to stopped instances; reuses current runSeq without incrementing.\n if (request.action === 'start') {\n if (instance.status.status === 'running' || instance.status.status === 'starting') {\n return Effect.void\n }\n\n const installation = installations.get(instance.installationKey)\n if (!installation) {\n return Effect.void\n }\n installation.currentInstanceId = undefined\n return startInstallation(instance.installationKey)\n }\n\n return Effect.void\n }),\n )\n })\n\n const deliverPlatformEvent: ProcessRuntime['deliverPlatformEvent'] = (event) =>\n Effect.gen(function* () {\n const noop = yield* TaskRunner.shouldNoopInSyncTransactionFiber({\n code: 'process::invalid_usage',\n severity: 'error',\n message:\n 'ProcessRuntime platform events are not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint: 'Deliver platformEvent outside the transaction window.',\n kind: 'process_platform_event_in_transaction',\n })\n if (noop) return\n\n const eventName = event.eventName\n const installationKeys = installationsByPlatformEvent.get(eventName)\n if (!installationKeys || installationKeys.size === 0) {\n return\n }\n\n yield* Effect.forEach(\n installationKeys,\n (installationKey) =>\n Effect.suspend(() => {\n const installation = installations.get(installationKey)\n if (!installation) {\n return Effect.void\n }\n\n const currentInstanceId = installation.currentInstanceId\n if (!currentInstanceId) {\n return Effect.void\n }\n\n const instance = instances.get(currentInstanceId)\n if (!instance) {\n return Effect.void\n }\n\n if (instance.status.status !== 'starting' && instance.status.status !== 'running') {\n return Effect.void\n }\n\n const specs = installation.platformEventTriggerIndex.get(eventName)\n if (!specs || specs.length === 0) {\n return Effect.void\n }\n\n return Effect.forEach(\n specs,\n (spec) =>\n Queue.offer(instance.platformTriggersQueue, {\n kind: 'platformEvent',\n name: spec.name,\n platformEvent: spec.platformEvent,\n } satisfies ProcessTrigger),\n { discard: true },\n )\n }),\n { discard: true },\n )\n })\n\n const eventsStream: ProcessRuntime['events'] = Stream.fromPubSub(eventsHub)\n\n const getEventsSnapshot: ProcessRuntime['getEventsSnapshot'] = () => Effect.sync(snapshotEventHistory)\n\n yield* Effect.addFinalizer(() =>\n Effect.gen(function* () {\n for (const installation of installations.values()) {\n installation.pendingStart = undefined\n }\n for (const instance of instances.values()) {\n if (instance.fiber) {\n yield* stopInstance(instance, 'scopeDisposed')\n }\n }\n }).pipe(\n Effect.catchCause((cause) =>\n Effect.sync(() => {\n // Finalizers must not throw; best-effort logging only.\n if (isDevEnv()) {\n // eslint-disable-next-line no-console\n console.warn('[ProcessRuntime] finalizer failed', Cause.pretty(cause))\n }\n })),\n ),\n )\n\n return {\n install,\n listInstallations,\n getInstanceStatus,\n controlInstance,\n deliverPlatformEvent,\n events: eventsStream,\n getEventsSnapshot,\n } satisfies ProcessRuntime\n })\n\nexport const layer = (options?: { readonly maxEventHistory?: number }): Layer.Layer<ProcessRuntimeTag, never, never> =>\n Layer.effect(ProcessRuntimeTag, make(options))\n","import { Effect, Fiber, Option, Ref, Scope, Stream } from 'effect'\nimport * as LatestFiberSlot from '../LatestFiberSlot.js'\nimport type { TaskRunnerMode } from '../TaskRunner.js'\nimport type { ProcessConcurrencyPolicy, ProcessTrigger } from './protocol.js'\n\nexport const DEFAULT_SERIAL_QUEUE_GUARD_LIMIT = 4096\nexport const DEFAULT_PARALLEL_LIMIT = 16\nconst QUEUE_COMPACTION_MIN_CONSUMED = 64\n\nexport const toTaskRunnerMode = (policy: ProcessConcurrencyPolicy): TaskRunnerMode => {\n switch (policy.mode) {\n case 'latest':\n return 'latest'\n case 'serial':\n return 'task'\n case 'drop':\n return 'exhaust'\n case 'parallel':\n return 'parallel'\n }\n}\n\nexport type ResolvedQueueLimit = {\n /** User-configured limit; treated as unlimited when omitted (still bounded by the guard). */\n readonly configured: number | 'unbounded'\n /** Runtime-enforced guard limit (prevents unbounded memory growth). */\n readonly guard: number\n}\n\nexport const resolveQueueLimit = (\n maxQueue: unknown,\n options?: {\n readonly defaultGuard?: number\n },\n): ResolvedQueueLimit => {\n const defaultGuard = options?.defaultGuard ?? DEFAULT_SERIAL_QUEUE_GUARD_LIMIT\n\n const configured =\n typeof maxQueue === 'number' && Number.isFinite(maxQueue) && maxQueue >= 0 ? Math.floor(maxQueue) : 'unbounded'\n\n return {\n configured,\n guard: configured === 'unbounded' ? defaultGuard : configured,\n }\n}\n\nexport type ProcessTriggerQueueOverflowInfo = {\n readonly mode: 'serial' | 'parallel'\n readonly currentLength: number\n readonly peak: number\n readonly limit: ResolvedQueueLimit\n readonly policy: ProcessConcurrencyPolicy\n}\n\ntype TriggerQueueState = {\n queue: ProcessTrigger[]\n queueStart: number\n peak: number\n}\n\nconst queueLength = (state: TriggerQueueState): number => state.queue.length - state.queueStart\n\nconst compactQueueIfNeeded = (state: TriggerQueueState): void => {\n if (state.queueStart === 0) return\n\n if (state.queueStart >= state.queue.length) {\n state.queue = []\n state.queueStart = 0\n return\n }\n\n // Keep dequeue O(1) on the hot path and compact only occasionally.\n if (state.queueStart >= QUEUE_COMPACTION_MIN_CONSUMED && state.queueStart * 2 >= state.queue.length) {\n state.queue = state.queue.slice(state.queueStart)\n state.queueStart = 0\n }\n}\n\nconst enqueueTrigger = (state: TriggerQueueState, trigger: ProcessTrigger): number => {\n state.queue.push(trigger)\n const size = queueLength(state)\n if (size > state.peak) {\n state.peak = size\n }\n return size\n}\n\nconst dequeueTrigger = (state: TriggerQueueState): ProcessTrigger | undefined => {\n if (state.queueStart >= state.queue.length) {\n state.queue = []\n state.queueStart = 0\n return undefined\n }\n const next = state.queue[state.queueStart]\n state.queueStart += 1\n compactQueueIfNeeded(state)\n return next\n}\n\nexport const runProcessTriggerStream = (args: {\n readonly stream: Stream.Stream<ProcessTrigger>\n readonly policy: ProcessConcurrencyPolicy\n readonly assignTriggerSeq: (trigger: ProcessTrigger) => ProcessTrigger\n /** run a trigger to completion (the caller decides what a \\\"run\\\" means). */\n readonly run: (trigger: ProcessTrigger) => Effect.Effect<void, never, Scope.Scope>\n /** invoked when a trigger is dropped (only for mode=drop). */\n readonly onDrop: (trigger: ProcessTrigger) => Effect.Effect<void>\n /** invoked when internal queue guard is exceeded (fail-stop by default). */\n readonly onQueueOverflow: (info: ProcessTriggerQueueOverflowInfo) => Effect.Effect<void>\n readonly defaultParallelLimit?: number\n readonly defaultQueueGuard?: number\n}): Effect.Effect<void, never, Scope.Scope> =>\n Effect.gen(function* () {\n const policy = args.policy\n const defaultQueueGuard = args.defaultQueueGuard ?? DEFAULT_SERIAL_QUEUE_GUARD_LIMIT\n\n if (policy.mode === 'latest') {\n const stateRef = yield* LatestFiberSlot.make()\n\n const onTrigger = (trigger0: ProcessTrigger): Effect.Effect<void, never, Scope.Scope> =>\n Effect.gen(function* () {\n const trigger = args.assignTriggerSeq(trigger0)\n\n const [prevFiber, prevRunningId, runId] = yield* LatestFiberSlot.beginRun(stateRef)\n\n if (prevFiber && prevRunningId !== 0) {\n yield* Fiber.interrupt(prevFiber)\n }\n\n const fiber = yield* args\n .run(trigger)\n .pipe(Effect.ensuring(LatestFiberSlot.clearIfCurrent(stateRef, runId)), Effect.forkScoped)\n\n yield* LatestFiberSlot.setFiberIfCurrent(stateRef, runId, fiber)\n })\n\n return yield* Stream.runForEach(args.stream, onTrigger)\n }\n\n const busyRef = yield* Ref.make(false)\n const serialStateRef = yield* Ref.make({\n running: false,\n queue: [] as ProcessTrigger[],\n queueStart: 0,\n peak: 0,\n })\n const parallelStateRef = yield* Ref.make({\n active: 0,\n queue: [] as ProcessTrigger[],\n queueStart: 0,\n peak: 0,\n })\n\n const serialQueueLimit = resolveQueueLimit(policy.maxQueue, { defaultGuard: defaultQueueGuard })\n const parallelQueueLimit = resolveQueueLimit(undefined, { defaultGuard: defaultQueueGuard })\n const parallelLimit =\n typeof policy.maxParallel === 'number' && Number.isFinite(policy.maxParallel) && policy.maxParallel >= 1\n ? Math.floor(policy.maxParallel)\n : (args.defaultParallelLimit ?? DEFAULT_PARALLEL_LIMIT)\n\n const drainSerial = (): Effect.Effect<void, never, Scope.Scope> =>\n Effect.suspend(() =>\n Ref.modify(serialStateRef, (state) => {\n if (state.running || queueLength(state) === 0) {\n return [Option.none(), state] as const\n }\n const next = dequeueTrigger(state)\n if (next === undefined) {\n return [Option.none(), state] as const\n }\n state.running = true\n return [Option.some(next), state] as const\n }).pipe(\n Effect.flatMap((next) =>\n Option.match(next, {\n onNone: () => Effect.void,\n onSome: (trigger) =>\n Effect.forkScoped(\n args\n .run(trigger)\n .pipe(\n Effect.ensuring(\n Ref.update(serialStateRef, (s) => {\n s.running = false\n return s\n }),\n ),\n Effect.flatMap(() => drainSerial()),\n ),\n ).pipe(Effect.asVoid),\n }),\n ),\n ),\n )\n\n const drainParallel = (): Effect.Effect<void, never, Scope.Scope> =>\n Effect.suspend(() =>\n Ref.modify(parallelStateRef, (state) => {\n if (state.active >= parallelLimit || queueLength(state) === 0) {\n return [Option.none(), state] as const\n }\n const next = dequeueTrigger(state)\n if (next === undefined) {\n return [Option.none(), state] as const\n }\n state.active += 1\n return [Option.some(next), state] as const\n }).pipe(\n Effect.flatMap((next) =>\n Option.match(next, {\n onNone: () => Effect.void,\n onSome: (trigger) =>\n Effect.forkScoped(\n args.run(trigger).pipe(\n Effect.ensuring(\n Ref.update(parallelStateRef, (s) => {\n s.active = Math.max(0, s.active - 1)\n return s\n }),\n ),\n Effect.flatMap(() => drainParallel()),\n ),\n ).pipe(Effect.asVoid, Effect.flatMap(() => drainParallel())),\n }),\n ),\n ),\n )\n\n const onTrigger = (trigger0: ProcessTrigger): Effect.Effect<void, never, Scope.Scope> =>\n Effect.gen(function* () {\n const trigger = args.assignTriggerSeq(trigger0)\n\n if (policy.mode === 'drop') {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? ([false, busy] as const) : ([true, true] as const),\n )\n if (!acquired) {\n yield* args.onDrop(trigger)\n return\n }\n\n yield* Effect.forkScoped(args.run(trigger).pipe(Effect.ensuring(Ref.set(busyRef, false))))\n return\n }\n\n if (policy.mode === 'parallel') {\n const [nextSize, peak] = yield* Ref.modify(parallelStateRef, (state) => {\n const size = enqueueTrigger(state, trigger)\n return [[size, state.peak] as const, state] as const\n })\n\n if (nextSize > parallelQueueLimit.guard) {\n yield* args.onQueueOverflow({\n mode: 'parallel',\n currentLength: nextSize,\n peak,\n limit: parallelQueueLimit,\n policy,\n })\n return\n }\n\n yield* drainParallel()\n return\n }\n\n // serial\n const [nextSize, peak] = yield* Ref.modify(serialStateRef, (state) => {\n const size = enqueueTrigger(state, trigger)\n return [[size, state.peak] as const, state] as const\n })\n\n if (nextSize > serialQueueLimit.guard) {\n yield* args.onQueueOverflow({\n mode: 'serial',\n currentLength: nextSize,\n peak,\n limit: serialQueueLimit,\n policy,\n })\n return\n }\n\n yield* drainSerial()\n })\n\n return yield* Stream.runForEach(args.stream, onTrigger)\n })\n","import type { ProcessEvent } from './protocol.js'\nimport {\n makeRunBudgetEnvelopeV1,\n makeRunDegradeMarkerV1,\n type RunDegradeReasonV1,\n} from '../diagnosticsBudget.js'\n\nexport const PROCESS_EVENT_MAX_BYTES = 4 * 1024\nexport const PROCESS_EVENT_MAX_EVENTS_PER_RUN = 50\nexport const PROCESS_EVENT_RESERVED_EVENTS_FOR_SUMMARY = 1\n\nexport type ProcessRunEventBudgetState = {\n readonly runId: string\n readonly maxEvents: number\n readonly maxBytes: number\n readonly emitted: number\n readonly dropped: number\n readonly downgraded: number\n readonly summaryEmitted: boolean\n}\n\nconst PROCESS_RUN_BUDGET_RUN_ID_UNKNOWN = 'process:run:unknown'\n\nconst scopeKeyFromProcessEventIdentity = (eventIdentity: ProcessEvent['identity']['identity']['scope']): string => {\n switch (eventIdentity.type) {\n case 'app':\n return `app:${eventIdentity.appId}`\n case 'moduleInstance':\n return `module:${eventIdentity.moduleId}:${eventIdentity.instanceId}`\n case 'uiSubtree':\n return `subtree:${eventIdentity.subtreeId}`\n }\n}\n\nexport const makeProcessRunBudgetRunId = (\n identity: ProcessEvent['identity'],\n trigger?: ProcessEvent['trigger'],\n): string => {\n const processId = identity.identity.processId\n const scopeKey = scopeKeyFromProcessEventIdentity(identity.identity.scope)\n const runSeq = identity.runSeq\n const triggerSeq =\n trigger && typeof trigger.triggerSeq === 'number' && Number.isFinite(trigger.triggerSeq) && trigger.triggerSeq >= 1\n ? Math.floor(trigger.triggerSeq)\n : undefined\n return triggerSeq != null\n ? `${processId}@${scopeKey}::r${runSeq}::g${triggerSeq}`\n : `${processId}@${scopeKey}::r${runSeq}`\n}\n\nexport const makeProcessRunEventBudgetState = (options?: {\n readonly runId?: string\n readonly maxEvents?: number\n readonly maxBytes?: number\n}): ProcessRunEventBudgetState => ({\n runId:\n typeof options?.runId === 'string' && options.runId.length > 0 ? options.runId : PROCESS_RUN_BUDGET_RUN_ID_UNKNOWN,\n maxEvents:\n typeof options?.maxEvents === 'number' && Number.isFinite(options.maxEvents) && options.maxEvents >= 0\n ? Math.floor(options.maxEvents)\n : PROCESS_EVENT_MAX_EVENTS_PER_RUN,\n maxBytes:\n typeof options?.maxBytes === 'number' && Number.isFinite(options.maxBytes) && options.maxBytes >= 0\n ? Math.floor(options.maxBytes)\n : PROCESS_EVENT_MAX_BYTES,\n emitted: 0,\n dropped: 0,\n downgraded: 0,\n summaryEmitted: false,\n})\n\nexport type ProcessRunEventBudgetDecision =\n | {\n readonly _tag: 'emit'\n readonly event: ProcessEvent\n }\n | {\n readonly _tag: 'emitSummary'\n readonly event: ProcessEvent\n }\n | {\n readonly _tag: 'drop'\n }\n\nconst attachProcessRunBudgetEnvelope = (\n event: ProcessEvent,\n state: ProcessRunEventBudgetState,\n degradeReason?: RunDegradeReasonV1,\n): ProcessEvent => ({\n ...event,\n budgetEnvelope: makeRunBudgetEnvelopeV1({\n domain: 'process',\n runId: state.runId,\n limits: {\n maxEvents: state.maxEvents,\n maxBytes: state.maxBytes,\n },\n usage: {\n emitted: state.emitted,\n dropped: state.dropped,\n downgraded: state.downgraded,\n },\n }),\n degrade: makeRunDegradeMarkerV1(Boolean(degradeReason), degradeReason),\n})\n\nconst finalizeBudgetedProcessEvent = (args: {\n readonly event: ProcessEvent\n readonly state: ProcessRunEventBudgetState\n readonly maxBytes: number\n readonly degradeReason?: RunDegradeReasonV1\n}): { readonly event: ProcessEvent; readonly state: ProcessRunEventBudgetState } => {\n const withEnvelope = attachProcessRunBudgetEnvelope(args.event, args.state, args.degradeReason)\n if (estimateEventBytes(withEnvelope) <= args.maxBytes) {\n return { event: withEnvelope, state: args.state }\n }\n\n // Envelope itself may push the event over the byte budget.\n // In this case we degrade to a slimmer event (keep degrade marker, drop envelope).\n let nextState: ProcessRunEventBudgetState = {\n ...args.state,\n downgraded: args.state.downgraded + 1,\n }\n\n const compact: ProcessEvent = {\n ...args.event,\n degrade: makeRunDegradeMarkerV1(true, args.degradeReason ?? 'payload_oversized'),\n }\n\n const enforcedCompact = enforceProcessEventMaxBytes(compact, { maxBytes: args.maxBytes })\n if (enforcedCompact.downgraded) {\n nextState = {\n ...nextState,\n downgraded: nextState.downgraded + 1,\n }\n }\n\n return {\n event: enforcedCompact.event,\n state: nextState,\n }\n}\n\nconst makeBudgetSummaryEvent = (args: {\n readonly sourceEvent: ProcessEvent\n readonly maxEvents: number\n readonly maxBytes: number\n readonly emitted: number\n readonly dropped: number\n readonly downgraded: number\n}): ProcessEvent => ({\n type: 'process:trigger',\n identity: args.sourceEvent.identity,\n trigger: args.sourceEvent.trigger,\n severity: 'warning',\n eventSeq: args.sourceEvent.eventSeq,\n timestampMs: args.sourceEvent.timestampMs,\n error: {\n message: 'Process run event budget exceeded; further trigger/dispatch events are suppressed.',\n code: 'process::event_budget_exceeded',\n hint: `maxEvents=${args.maxEvents} maxBytes=${args.maxBytes} emitted=${args.emitted} dropped=${args.dropped} downgraded=${args.downgraded}`,\n },\n})\n\nexport const applyProcessRunEventBudget = (\n state: ProcessRunEventBudgetState,\n event: ProcessEvent,\n): readonly [ProcessRunEventBudgetDecision, ProcessRunEventBudgetState] => {\n const maxEvents = Math.max(0, state.maxEvents)\n const maxBytes = Math.max(0, state.maxBytes)\n\n if (state.summaryEmitted) {\n return [\n { _tag: 'drop' },\n {\n ...state,\n dropped: state.dropped + 1,\n },\n ]\n }\n\n const reserve = PROCESS_EVENT_RESERVED_EVENTS_FOR_SUMMARY\n const allowedRegular = Math.max(0, maxEvents - reserve)\n\n if (state.emitted < allowedRegular) {\n const enforced = enforceProcessEventMaxBytes(event, { maxBytes })\n const baseState: ProcessRunEventBudgetState = {\n ...state,\n emitted: state.emitted + 1,\n downgraded: state.downgraded + (enforced.downgraded ? 1 : 0),\n }\n const finalized = finalizeBudgetedProcessEvent({\n event: enforced.event,\n state: baseState,\n maxBytes,\n degradeReason: enforced.downgraded ? 'payload_oversized' : undefined,\n })\n return [\n { _tag: 'emit', event: finalized.event },\n finalized.state,\n ]\n }\n\n const dropped = state.dropped + 1\n const summary = makeBudgetSummaryEvent({\n sourceEvent: event,\n maxEvents,\n maxBytes,\n emitted: state.emitted,\n dropped,\n downgraded: state.downgraded,\n })\n const enforcedSummary = enforceProcessEventMaxBytes(summary, { maxBytes })\n const baseState: ProcessRunEventBudgetState = {\n ...state,\n emitted: Math.min(maxEvents, state.emitted + 1),\n dropped,\n downgraded: state.downgraded + (enforcedSummary.downgraded ? 1 : 0),\n summaryEmitted: true,\n }\n const finalized = finalizeBudgetedProcessEvent({\n event: enforcedSummary.event,\n state: baseState,\n maxBytes,\n degradeReason: 'budget_exceeded',\n })\n\n return [\n { _tag: 'emitSummary', event: finalized.event },\n finalized.state,\n ]\n}\n\nexport const estimateEventBytes = (event: ProcessEvent): number => {\n const json = JSON.stringify(event)\n return typeof Buffer !== 'undefined' ? Buffer.byteLength(json, 'utf8') : new TextEncoder().encode(json).length\n}\n\nconst truncateChars = (value: string, maxLen: number): string =>\n value.length <= maxLen ? value : value.slice(0, maxLen)\n\nconst normalizeErrorSummary = (error: NonNullable<ProcessEvent['error']>): NonNullable<ProcessEvent['error']> => {\n const message = typeof error.message === 'string' && error.message.length > 0 ? error.message : 'Error'\n\n const hint = typeof error.hint === 'string' && error.hint.length > 0 ? truncateChars(error.hint, 1024) : undefined\n\n return {\n name: typeof error.name === 'string' && error.name.length > 0 ? error.name : undefined,\n message: truncateChars(message, 256),\n code: typeof error.code === 'string' && error.code.length > 0 ? error.code : undefined,\n hint,\n }\n}\n\nexport const enforceProcessEventMaxBytes = (\n event: ProcessEvent,\n options?: {\n readonly maxBytes?: number\n },\n): { readonly event: ProcessEvent; readonly downgraded: boolean } => {\n const maxBytes = options?.maxBytes ?? PROCESS_EVENT_MAX_BYTES\n\n let downgraded = false\n let next: ProcessEvent = event\n\n if (event.error) {\n const normalized = normalizeErrorSummary(event.error)\n if (\n normalized.message !== event.error.message ||\n normalized.hint !== event.error.hint ||\n normalized.code !== event.error.code ||\n normalized.name !== event.error.name\n ) {\n downgraded = true\n next = { ...event, error: normalized }\n }\n }\n\n if (estimateEventBytes(next) <= maxBytes) {\n return { event: next, downgraded }\n }\n\n // Further trimming is applied to error.hint only (common trigger: multi-line hints).\n if (!next.error?.hint) {\n // Nothing left to trim; best-effort return.\n return { event: next, downgraded: true }\n }\n\n const hint = next.error.hint\n const steps = [512, 256, 128, 64, 32, 0]\n\n for (const maxLen of steps) {\n const trimmed = maxLen === 0 ? undefined : truncateChars(hint, maxLen)\n const candidate: ProcessEvent = {\n ...next,\n error: {\n ...next.error,\n hint: trimmed,\n },\n }\n if (estimateEventBytes(candidate) <= maxBytes) {\n return { event: candidate, downgraded: true }\n }\n }\n\n // Fallback: remove hint and shorten message (process:error must still have a message).\n const fallback: ProcessEvent = next.error\n ? ({\n ...next,\n error: {\n ...next.error,\n message: truncateChars(next.error.message, 96),\n hint: undefined,\n },\n } satisfies ProcessEvent)\n : next\n\n return { event: fallback, downgraded: true }\n}\n","import type { Effect } from 'effect'\nimport type { ProcessDefinition, ProcessScope } from './protocol.js'\n\nexport type ProcessMeta = {\n readonly definition: ProcessDefinition\n /**\n * installationScope: part of Static IR.\n * - For instance-scope / subtree-scope scenarios, the runtime overwrites this field during installation with a derived effect.\n * - This field is for export/diagnostics only; it does not participate in stable identity derivation (see identity.ts).\n */\n readonly installationScope?: ProcessScope\n readonly kind?: 'process' | 'link' | 'declarativeLink'\n}\n\nexport const PROCESS_META = Symbol.for('@logixjs/core/processMeta')\n\nexport type ProcessEffect<E = never, R = never> = Effect.Effect<void, E, R> & {\n readonly [PROCESS_META]?: ProcessMeta\n}\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nexport const getMeta = (effect: Effect.Effect<void, any, any>): ProcessMeta | undefined =>\n (effect as ProcessEffect)[PROCESS_META]\n\nexport const getDefinition = (effect: Effect.Effect<void, any, any>): ProcessDefinition | undefined =>\n getMeta(effect)?.definition\n\nexport const attachMeta = <E, R>(effect: Effect.Effect<void, E, R>, meta: ProcessMeta): ProcessEffect<E, R> => {\n defineHidden(effect as any, PROCESS_META, meta)\n return effect as ProcessEffect<E, R>\n}\n","import * as SchemaAST from 'effect/SchemaAST'\n\nexport type DotPathSegment = string | number\n\ntype DotPathParseResult =\n | { readonly ok: true; readonly segments: ReadonlyArray<DotPathSegment> }\n | { readonly ok: false; readonly error: Error }\n\nexport type DotPathSelectorResult =\n | {\n readonly ok: true\n readonly selector: (state: unknown) => unknown\n readonly segments: ReadonlyArray<DotPathSegment>\n }\n | { readonly ok: false; readonly error: Error }\n\nconst isRecord = (value: unknown): value is Record<string, unknown> => typeof value === 'object' && value !== null\n\nexport const resolveSchemaAst = (schema: unknown): SchemaAST.AST | undefined => {\n if (!isRecord(schema)) return undefined\n const ast = (schema as any).ast as SchemaAST.AST | undefined\n if (!ast) return undefined\n if (typeof ast !== 'object' && typeof ast !== 'function') return undefined\n return ast\n}\n\nconst makeDotPathError = (path: string, message: string, hint?: string): Error => {\n const err = new Error(message)\n ;(err as any).code = 'process::invalid_dot_path'\n ;(err as any).hint =\n hint ??\n [\n \"Expected dot-path syntax: segments separated by '.', numeric segments represent array indices.\",\n `path: ${path}`,\n '',\n 'examples:',\n '- count',\n '- user.name',\n '- items.0.id',\n ].join('\\n')\n return err\n}\n\nconst makeSchemaMismatchError = (path: string): Error =>\n makeDotPathError(\n path,\n 'Invalid dot-path: path does not match the state schema.',\n [\n 'The module state schema does not contain the requested dot-path.',\n `path: ${path}`,\n '',\n 'fix:',\n '- Ensure the path exists in the state schema.',\n '- Use numeric segments for array indices (e.g. items.0.id).',\n ].join('\\n'),\n )\n\nconst parseDotPath = (path: string): DotPathParseResult => {\n if (typeof path !== 'string' || path.length === 0) {\n return { ok: false, error: makeDotPathError(String(path), 'dot-path must be a non-empty string') }\n }\n\n const raw = path.split('.')\n if (raw.length === 0) {\n return { ok: false, error: makeDotPathError(path, 'dot-path must contain at least one segment') }\n }\n\n const segments: DotPathSegment[] = []\n for (let i = 0; i < raw.length; i++) {\n const seg = raw[i]!\n if (seg.length === 0) {\n return {\n ok: false,\n error: makeDotPathError(\n path,\n `Invalid dot-path: empty segment at index ${i}.`,\n [\n \"Expected dot-path syntax: segments separated by '.', numeric segments represent array indices.\",\n `path: ${path}`,\n '',\n 'examples:',\n '- count',\n '- user.name',\n '- items.0.id',\n '',\n 'fix:',\n '- Remove consecutive dots or trailing dots.',\n ].join('\\n'),\n ),\n }\n }\n\n if (/^[0-9]+$/.test(seg)) {\n const n = Number(seg)\n if (!Number.isFinite(n) || n < 0) {\n return {\n ok: false,\n error: makeDotPathError(path, `Invalid array index segment \"${seg}\" at index ${i}.`),\n }\n }\n segments.push(Math.floor(n))\n continue\n }\n\n segments.push(seg)\n }\n\n return { ok: true, segments }\n}\n\nconst resolveAstForPath = (\n ast: SchemaAST.AST,\n segments: ReadonlyArray<DotPathSegment>,\n seen: Set<SchemaAST.AST>,\n): SchemaAST.AST | undefined => {\n let current = SchemaAST.toType(ast)\n\n while (SchemaAST.isSuspend(current)) {\n if (seen.has(current)) return undefined\n seen.add(current)\n current = SchemaAST.toType(current.thunk())\n }\n\n if (segments.length === 0) return current\n\n if (SchemaAST.isUnion(current)) {\n for (const node of current.types) {\n const resolved = resolveAstForPath(node, segments, seen)\n if (resolved) return resolved\n }\n return undefined\n }\n\n if (SchemaAST.isArrays(current)) {\n const [head, ...tail] = segments\n if (typeof head !== 'number') return undefined\n const element =\n head < current.elements.length ? current.elements[head] : current.rest.length > 0 ? current.rest[0] : undefined\n return element ? resolveAstForPath(element, tail, seen) : undefined\n }\n\n if (SchemaAST.isObjects(current)) {\n const [head, ...tail] = segments\n if (head === undefined) return undefined\n\n if (typeof head === 'string') {\n for (const ps of current.propertySignatures) {\n if (String(ps.name) !== head) continue\n return resolveAstForPath(ps.type, tail, seen)\n }\n }\n\n for (const sig of current.indexSignatures) {\n const param = SchemaAST.toType(sig.parameter as unknown as SchemaAST.AST)\n const tag = (param as any)?._tag\n const acceptsString = tag === 'String' || tag === 'TemplateLiteral'\n const acceptsNumber = tag === 'Number'\n if (typeof head === 'string' && acceptsString) {\n return resolveAstForPath(sig.type, tail, seen)\n }\n if (typeof head === 'number' && acceptsNumber) {\n return resolveAstForPath(sig.type, tail, seen)\n }\n }\n }\n\n const tag = (current as any)?._tag\n if (tag === 'AnyKeyword' || tag === 'UnknownKeyword' || tag === 'ObjectKeyword' || tag === 'Declaration') {\n return current\n }\n\n return undefined\n}\n\nconst selectBySegments =\n (segments: ReadonlyArray<DotPathSegment>) =>\n (state: unknown): unknown => {\n let current: unknown = state\n for (const seg of segments) {\n if (current == null) return undefined\n if (typeof seg === 'number') {\n if (Array.isArray(current)) {\n current = current[seg]\n continue\n }\n if (isRecord(current)) {\n current = current[String(seg)]\n continue\n }\n return undefined\n }\n if (isRecord(current)) {\n current = current[seg]\n continue\n }\n return undefined\n }\n return current\n }\n\nexport const makeSchemaSelector = (path: string, schemaAst?: SchemaAST.AST): DotPathSelectorResult => {\n const parsed = parseDotPath(path)\n if (!parsed.ok) return { ok: false, error: parsed.error }\n\n if (schemaAst) {\n const resolved = resolveAstForPath(schemaAst, parsed.segments, new Set())\n if (!resolved) {\n return { ok: false, error: makeSchemaMismatchError(path) }\n }\n }\n\n return {\n ok: true,\n selector: selectBySegments(parsed.segments),\n segments: parsed.segments,\n }\n}\n","import { Duration, Effect, Option, Ref, Stream } from 'effect'\nimport { isDevEnv } from '../env.js'\nimport * as ReadQuery from '../ReadQuery.js'\nimport { makeSchemaSelector } from './selectorSchema.js'\nimport {\n buildSelectorWarningHint,\n evaluateSelectorWarning,\n initialSelectorDiagnosticsState,\n makeSelectorDiagnosticsConfig,\n makeSelectorSamplingTracker,\n} from './selectorDiagnostics.js'\nimport type { ProcessTrigger, ProcessTriggerSpec, SerializableErrorSummary } from './protocol.js'\n\nexport type NonPlatformTriggerSpec = Exclude<ProcessTriggerSpec, { readonly kind: 'platformEvent' }>\ntype TimerTriggerSpec = Extract<NonPlatformTriggerSpec, { readonly kind: 'timer' }>\ntype ModuleActionTriggerSpec = Extract<NonPlatformTriggerSpec, { readonly kind: 'moduleAction' }>\ntype ModuleStateChangeTriggerSpec = Extract<NonPlatformTriggerSpec, { readonly kind: 'moduleStateChange' }>\ntype SchemaAstLike = Parameters<typeof makeSchemaSelector>[1]\ntype CachedSchemaAstEntry = { readonly ast: SchemaAstLike }\n\ntype TriggerStreamFactoryOptions = {\n readonly moduleRuntimeRegistry: ReadonlyMap<string, unknown>\n readonly shouldRecordChainEvents: boolean\n readonly actionIdFromUnknown: (action: unknown) => string | undefined\n readonly resolveRuntimeStateSchemaAst: (runtime: unknown) => SchemaAstLike\n readonly withModuleHint: (error: Error, moduleId: string) => Error\n readonly emitSelectorWarning: (trigger: ProcessTrigger, warning: SerializableErrorSummary) => Effect.Effect<void>\n}\n\nconst makeInvalidTriggerKindError = (spec: never): Error =>\n Object.assign(\n new Error(`[ProcessRuntime] unreachable non-platform trigger kind: ${String((spec as any)?.kind ?? 'unknown')}`),\n { code: 'process::invalid_trigger_kind' },\n )\n\nconst makeInvalidTimerIdError = (timerId: string): Error => {\n const err = new Error(`[ProcessRuntime] invalid timerId (expected DurationInput): ${timerId}`)\n ;(err as any).code = 'process::invalid_timer_id'\n ;(err as any).hint =\n \"timerId must be a valid DurationInput string, e.g. '10 millis', '1 seconds', '5 minutes'.\"\n return err\n}\n\nconst makeMissingActionStreamError = (moduleId: string): Error => {\n const err = new Error('ModuleRuntime does not provide actions$ (required for moduleAction trigger).')\n ;(err as any).code = 'process::missing_action_stream'\n ;(err as any).hint = `moduleId=${moduleId}`\n return err\n}\n\nconst makeMissingActionMetaStreamError = (moduleId: string): Error => {\n const err = new Error('ModuleRuntime does not provide actionsWithMeta$ (required for moduleAction trigger).')\n ;(err as any).code = 'process::missing_action_meta_stream'\n ;(err as any).hint = `moduleId=${moduleId}`\n return err\n}\n\nconst makeMissingChangesStreamError = (moduleId: string): Error => {\n const err = new Error('ModuleRuntime does not provide changesWithMeta (required for moduleStateChange trigger).')\n ;(err as any).code = 'process::missing_changes_stream'\n ;(err as any).hint = `moduleId=${moduleId}`\n return err\n}\n\nconst nowMs = (): number => {\n if (typeof performance !== 'undefined' && typeof performance.now === 'function') {\n return performance.now()\n }\n return Date.now()\n}\n\nconst isWeakMapKey = (value: unknown): value is object =>\n (typeof value === 'object' && value !== null) || typeof value === 'function'\n\nconst makeModuleStateChangeReadQuery = (args: {\n readonly moduleId: string\n readonly path: string\n readonly selector: (state: unknown) => unknown\n}): ReadQuery.ReadQuery<unknown, unknown> =>\n ReadQuery.make({\n selectorId: `process:moduleStateChange:${args.moduleId}:${args.path}`,\n debugKey: `process.moduleStateChange:${args.moduleId}.${args.path}`,\n reads: [args.path],\n select: args.selector,\n equalsKind: 'objectIs',\n })\n\nexport const makeNonPlatformTriggerStreamFactory = (options: TriggerStreamFactoryOptions) => {\n const moduleRuntimeCache = new Map<string, any>()\n const runtimeSchemaAstCache = new WeakMap<object, CachedSchemaAstEntry>()\n\n const resolveModuleRuntime = (moduleId: string): Effect.Effect<any, Error> =>\n Effect.gen(function* () {\n if (moduleRuntimeCache.has(moduleId)) {\n return moduleRuntimeCache.get(moduleId)\n }\n\n const runtime = options.moduleRuntimeRegistry.get(moduleId)\n if (runtime === undefined) {\n return yield* Effect.fail(new Error(`Missing module runtime in scope: ${moduleId}`))\n }\n\n moduleRuntimeCache.set(moduleId, runtime as any)\n return runtime\n })\n\n const resolveRuntimeStateSchemaAst = (runtime: unknown): SchemaAstLike => {\n if (!isWeakMapKey(runtime)) {\n return options.resolveRuntimeStateSchemaAst(runtime)\n }\n\n const cached = runtimeSchemaAstCache.get(runtime)\n if (cached) {\n return cached.ast\n }\n\n const ast = options.resolveRuntimeStateSchemaAst(runtime)\n runtimeSchemaAstCache.set(runtime, { ast })\n return ast\n }\n\n const makeTimerTriggerStream = (spec: TimerTriggerSpec): Effect.Effect<Stream.Stream<ProcessTrigger>, Error> =>\n Effect.gen(function* () {\n const interval = Duration.fromInput(spec.timerId as Duration.Input)\n if (!interval) {\n return yield* Effect.fail(makeInvalidTimerIdError(spec.timerId))\n }\n\n return Stream.tick(interval).pipe(\n Stream.map(\n () =>\n ({\n kind: 'timer',\n name: spec.name,\n timerId: spec.timerId,\n }) satisfies ProcessTrigger,\n ),\n )\n })\n\n const makeModuleActionTriggerStream = (\n spec: ModuleActionTriggerSpec,\n ): Effect.Effect<Stream.Stream<ProcessTrigger>, Error> =>\n Effect.gen(function* () {\n const runtime = yield* resolveModuleRuntime(spec.moduleId)\n const buildModuleActionTrigger = (txnSeq: number): ProcessTrigger => ({\n kind: 'moduleAction',\n name: spec.name,\n moduleId: spec.moduleId,\n instanceId: runtime.instanceId as string,\n actionId: spec.actionId,\n txnSeq,\n })\n\n if (!options.shouldRecordChainEvents) {\n const stream = runtime.actions$ as Stream.Stream<any> | undefined\n if (!stream) {\n return yield* Effect.fail(makeMissingActionStreamError(spec.moduleId))\n }\n\n return stream.pipe(\n Stream.filter((action: any) => options.actionIdFromUnknown(action) === spec.actionId),\n Stream.map(() => buildModuleActionTrigger(1)),\n )\n }\n\n const stream = runtime.actionsWithMeta$ as Stream.Stream<any> | undefined\n if (!stream) {\n return yield* Effect.fail(makeMissingActionMetaStreamError(spec.moduleId))\n }\n\n return stream.pipe(\n Stream.filter((evt: any) => options.actionIdFromUnknown(evt.value) === spec.actionId),\n Stream.map((evt: any) => {\n const txnSeq = evt?.meta?.txnSeq\n return buildModuleActionTrigger(typeof txnSeq === 'number' ? txnSeq : 1)\n }),\n )\n })\n\n const makeModuleStateChangeTriggerStream = (\n spec: ModuleStateChangeTriggerSpec,\n ): Effect.Effect<Stream.Stream<ProcessTrigger>, Error> =>\n Effect.gen(function* () {\n const runtime = yield* resolveModuleRuntime(spec.moduleId)\n const schemaAst = resolveRuntimeStateSchemaAst(runtime)\n const selectorResult = makeSchemaSelector(spec.path, schemaAst)\n if (!selectorResult.ok) {\n return yield* Effect.fail(options.withModuleHint(selectorResult.error, spec.moduleId))\n }\n\n const selectorBase = selectorResult.selector\n const buildModuleStateChangeTrigger = (txnSeq: unknown): ProcessTrigger => ({\n kind: 'moduleStateChange',\n name: spec.name,\n moduleId: spec.moduleId,\n instanceId: runtime.instanceId as string,\n path: spec.path,\n txnSeq: typeof txnSeq === 'number' ? txnSeq : 1,\n })\n\n const buildModuleStateChangeBaseStream = (\n selector: (state: unknown) => unknown,\n ): Effect.Effect<Stream.Stream<ProcessTrigger>, Error> =>\n Effect.gen(function* () {\n const readQuery = makeModuleStateChangeReadQuery({\n moduleId: spec.moduleId,\n path: spec.path,\n selector,\n })\n const changesReadQueryWithMeta = runtime.changesReadQueryWithMeta as\n | ((input: ReadQuery.ReadQueryInput<unknown, unknown>) => Stream.Stream<any>)\n | undefined\n\n if (typeof changesReadQueryWithMeta === 'function') {\n return changesReadQueryWithMeta(readQuery).pipe(\n Stream.map((evt: any) => buildModuleStateChangeTrigger(evt?.meta?.txnSeq)),\n )\n }\n\n const changesWithMeta = runtime.changesWithMeta as ((selector: (state: unknown) => unknown) => Stream.Stream<any>) | undefined\n if (typeof changesWithMeta !== 'function') {\n return yield* Effect.fail(makeMissingChangesStreamError(spec.moduleId))\n }\n\n let hasPrevValue = false\n let prevValue: unknown\n\n return changesWithMeta(selector).pipe(\n Stream.filter((evt: any) => {\n if (hasPrevValue && Object.is(prevValue, evt.value)) {\n return false\n }\n hasPrevValue = true\n prevValue = evt.value\n return true\n }),\n Stream.map((evt: any) => buildModuleStateChangeTrigger(evt?.meta?.txnSeq)),\n )\n })\n\n if (!options.shouldRecordChainEvents) {\n return yield* buildModuleStateChangeBaseStream(selectorBase)\n }\n\n const selectorDiagnosticsConfig = makeSelectorDiagnosticsConfig(isDevEnv())\n const selectorDiagnosticsRef = yield* Ref.make(initialSelectorDiagnosticsState(Date.now()))\n const selectorSampling = makeSelectorSamplingTracker(selectorDiagnosticsConfig)\n\n const selector = (state: unknown): unknown => {\n if (!selectorSampling.onSelectorCall()) {\n return selectorBase(state)\n }\n\n const t0 = nowMs()\n const value = selectorBase(state)\n const dt = nowMs() - t0\n\n selectorSampling.recordSample(dt)\n return value\n }\n\n const maybeWarnSelector = (trigger: ProcessTrigger): Effect.Effect<void> => {\n return Effect.gen(function* () {\n const now = Date.now()\n const sampling = selectorSampling.snapshot()\n\n const decision = yield* Ref.modify(selectorDiagnosticsRef, (state) =>\n evaluateSelectorWarning(state, now, {\n config: selectorDiagnosticsConfig,\n sampling: {\n sampled: sampling.sampled,\n maxSampleMs: sampling.maxSampleMs,\n },\n }),\n )\n\n if (!decision.shouldWarn) {\n return\n }\n\n const code = decision.tooFrequent ? 'process::selector_high_frequency' : 'process::selector_slow'\n const hint = buildSelectorWarningHint({\n moduleId: spec.moduleId,\n path: spec.path,\n decision,\n config: selectorDiagnosticsConfig,\n sampling,\n })\n selectorSampling.resetSampling()\n\n yield* options.emitSelectorWarning(trigger, {\n message: 'moduleStateChange selector diagnostics warning',\n code,\n hint,\n })\n })\n }\n\n const baseStream = yield* buildModuleStateChangeBaseStream(selector)\n\n return baseStream.pipe(Stream.tap(maybeWarnSelector))\n })\n\n return (spec: NonPlatformTriggerSpec): Effect.Effect<Stream.Stream<ProcessTrigger>, Error> =>\n Effect.gen(function* () {\n switch (spec.kind) {\n case 'timer':\n return yield* makeTimerTriggerStream(spec)\n case 'moduleAction':\n return yield* makeModuleActionTriggerStream(spec)\n case 'moduleStateChange':\n return yield* makeModuleStateChangeTriggerStream(spec)\n default:\n return yield* Effect.fail(makeInvalidTriggerKindError(spec))\n }\n })\n}\n","import { fnv1a32, stableStringify } from '../../digest.js'\nimport type * as DebugSink from './DebugSink.js'\n\nexport type ReadLane = 'static' | 'dynamic'\n\nexport type ReadProducer = 'aot' | 'jit' | 'manual' | 'dynamic'\n\nexport type ReadQueryFallbackReason = 'missingDeps' | 'unsupportedSyntax' | 'unstableSelectorId' | 'missingBuildGrade'\n\nexport type EqualsKind = 'objectIs' | 'shallowStruct' | 'custom'\n\nexport type ReadQueryStrictGateRule =\n | 'denyFallbackReason'\n | 'requireStatic:global'\n | 'requireStatic:selectorId'\n | 'requireStatic:module'\n\nexport type ReadQueryQualitySource = 'build' | 'runtime_jit' | 'runtime_dynamic_fallback'\n\nexport interface ReadQueryStrictGateGrade {\n readonly evaluatedAt: 'build' | 'runtime'\n readonly verdict: 'PASS' | 'WARN' | 'FAIL'\n readonly rule?: ReadQueryStrictGateRule\n readonly fallbackReason?: ReadQueryFallbackReason\n}\n\nexport interface ReadQueryQualityMeta {\n readonly source: ReadQueryQualitySource\n readonly strictGate?: ReadQueryStrictGateGrade\n readonly reportId?: string\n readonly missingBuildGrade?: boolean\n}\n\nexport interface ReadsDigest {\n readonly count: number\n readonly hash: number\n}\n\nexport interface ReadQueryStaticIr {\n readonly selectorId: string\n readonly debugKey?: string\n readonly lane: ReadLane\n readonly producer: ReadProducer\n readonly reads?: ReadonlyArray<string | number>\n readonly readsDigest?: ReadsDigest\n readonly fallbackReason?: ReadQueryFallbackReason\n readonly equalsKind: EqualsKind\n}\n\nexport interface ReadQuery<S, V> {\n readonly selectorId: string\n readonly debugKey?: string\n readonly reads: ReadonlyArray<string | number>\n readonly select: (state: S) => V\n readonly equalsKind: EqualsKind\n readonly equals?: (previous: V, next: V) => boolean\n}\n\nexport interface ReadQueryCompiled<S, V> extends ReadQuery<S, V> {\n readonly lane: ReadLane\n readonly producer: ReadProducer\n readonly readsDigest?: ReadsDigest\n readonly fallbackReason?: ReadQueryFallbackReason\n readonly staticIr: ReadQueryStaticIr\n readonly quality?: ReadQueryQualityMeta\n}\n\nexport type ReadQueryInput<S, V> = ((state: S) => V) | ReadQuery<S, V>\n\nexport function isReadQuery<S, V>(input: ReadQueryInput<S, V>): input is ReadQuery<S, V>\nexport function isReadQuery(input: unknown): input is ReadQuery<any, any>\nexport function isReadQuery(input: unknown): input is ReadQuery<any, any> {\n if (!input || (typeof input !== 'object' && typeof input !== 'function')) return false\n const maybe = input as any\n return typeof maybe.selectorId === 'string' && typeof maybe.select === 'function' && Array.isArray(maybe.reads)\n}\n\nexport function isReadQueryCompiled<S, V>(input: ReadQueryInput<S, V> | ReadQueryCompiled<S, V>): input is ReadQueryCompiled<S, V>\nexport function isReadQueryCompiled(input: unknown): input is ReadQueryCompiled<any, any>\nexport function isReadQueryCompiled(input: unknown): input is ReadQueryCompiled<any, any> {\n if (!input || typeof input !== 'object') return false\n const maybe = input as any\n return (\n typeof maybe.selectorId === 'string' &&\n typeof maybe.select === 'function' &&\n Array.isArray(maybe.reads) &&\n maybe.staticIr != null &&\n typeof maybe.lane === 'string' &&\n typeof maybe.producer === 'string'\n )\n}\n\nexport const hasBuildQualityGrade = (compiled: ReadQueryCompiled<any, any>): boolean =>\n compiled.quality?.source === 'build' && compiled.quality.strictGate?.evaluatedAt === 'build'\n\nexport const shouldEvaluateStrictGateAtRuntime = (compiled: ReadQueryCompiled<any, any>): boolean =>\n compiled.lane === 'dynamic' && !hasBuildQualityGrade(compiled)\n\nexport const markRuntimeMissingBuildGrade = <S, V>(compiled: ReadQueryCompiled<S, V>): ReadQueryCompiled<S, V> => {\n if (compiled.lane !== 'dynamic' || hasBuildQualityGrade(compiled)) return compiled\n if (compiled.quality?.missingBuildGrade === true && compiled.fallbackReason === 'missingBuildGrade') return compiled\n return {\n ...compiled,\n fallbackReason: 'missingBuildGrade',\n staticIr: {\n ...compiled.staticIr,\n fallbackReason: 'missingBuildGrade',\n },\n quality: {\n ...(compiled.quality ?? {}),\n source: 'runtime_dynamic_fallback',\n missingBuildGrade: true,\n },\n }\n}\n\nconst normalizeReads = (reads: ReadonlyArray<string | number>): ReadonlyArray<string | number> => {\n const unique: Array<string | number> = []\n const seen = new Set<string>()\n for (const r of reads) {\n const key = typeof r === 'number' ? `n:${r}` : `s:${r}`\n if (seen.has(key)) continue\n seen.add(key)\n unique.push(r)\n }\n return unique.slice().sort()\n}\n\nconst toHash32Number = (hex: string): number => Number.parseInt(hex, 16)\n\nconst makeReadsDigest = (reads: ReadonlyArray<string | number>): ReadsDigest => {\n const normalized = normalizeReads(reads)\n return {\n count: normalized.length,\n hash: toHash32Number(fnv1a32(stableStringify(normalized))),\n }\n}\n\nconst computeSelectorId = (value: unknown): string => `rq_${fnv1a32(stableStringify(value))}`\n\nlet nextUnstableSelectorSeq = 0\nconst unstableSelectorIdByFn = new WeakMap<Function, string>()\n\nconst computeUnstableSelectorId = (selector: Function): string => {\n const existing = unstableSelectorIdByFn.get(selector)\n if (existing) return existing\n nextUnstableSelectorSeq += 1\n const selectorId = `rq_u${nextUnstableSelectorSeq}`\n unstableSelectorIdByFn.set(selector, selectorId)\n return selectorId\n}\n\nconst unwrapParens = (input: string): string => {\n const trimmed = input.trim()\n if (trimmed.startsWith('(') && trimmed.endsWith(')')) {\n const inner = trimmed.slice(1, -1).trim()\n // Only unwrap the outermost pair of parentheses to avoid accidentally breaking syntax.\n if (!inner.startsWith('(') || !inner.endsWith(')')) {\n return inner\n }\n }\n return trimmed\n}\n\nconst extractArrow = (source: string): { readonly param: string; readonly body: string } | undefined => {\n const idx = source.indexOf('=>')\n if (idx < 0) return undefined\n const left = source.slice(0, idx).trim()\n const right = source.slice(idx + 2).trim()\n\n const paramRaw = unwrapParens(left)\n if (!/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(paramRaw)) return undefined\n\n // block body: { return expr }\n if (right.startsWith('{')) {\n const m = right.match(/^\\{\\s*return\\s+(.+?);?\\s*\\}\\s*$/s)\n if (!m) return undefined\n return { param: paramRaw, body: m[1]?.trim() ?? '' }\n }\n\n return { param: paramRaw, body: right }\n}\n\nconst extractFunctionReturn = (source: string): { readonly param: string; readonly body: string } | undefined => {\n const trimmed = source.trim()\n const m = trimmed.match(\n /^function\\s*(?:[A-Za-z_$][A-Za-z0-9_$]*\\s*)?\\(\\s*([A-Za-z_$][A-Za-z0-9_$]*)\\s*\\)\\s*\\{\\s*return\\s+(.+?);?\\s*\\}\\s*$/s,\n )\n if (!m) return undefined\n\n const param = m[1]?.trim() ?? ''\n if (!/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(param)) return undefined\n\n const body = m[2]?.trim() ?? ''\n if (body.length === 0) return undefined\n\n return { param, body }\n}\n\ntype ParsedSelector =\n | { readonly kind: 'path'; readonly path: string }\n | { readonly kind: 'struct'; readonly entries: ReadonlyArray<readonly [string, string]> }\n\nconst tryParseSelectorSource = (source: string): ParsedSelector | undefined => {\n const arrowOrFn = extractArrow(source) ?? extractFunctionReturn(source)\n if (!arrowOrFn) return undefined\n\n const expr = unwrapParens(arrowOrFn.body).trim().replace(/;$/, '').trim()\n\n // path: s.a.b\n {\n const re = new RegExp(`^${arrowOrFn.param}\\\\.([A-Za-z0-9_$]+(?:\\\\.[A-Za-z0-9_$]+)*)$`)\n const m = expr.match(re)\n if (m) {\n return { kind: 'path', path: m[1] }\n }\n }\n\n // struct: ({ a: s.a, b: s.b })\n if (expr.startsWith('{') && expr.endsWith('}')) {\n const inner = expr.slice(1, -1).trim()\n if (inner.length === 0) return { kind: 'struct', entries: [] }\n\n const parts = inner\n .split(',')\n .map((p) => p.trim())\n .filter((p) => p.length > 0)\n\n const entries: Array<readonly [string, string]> = []\n\n for (const part of parts) {\n const idx = part.indexOf(':')\n if (idx < 0) return undefined\n const key = part.slice(0, idx).trim()\n const value = part.slice(idx + 1).trim()\n\n if (!/^[A-Za-z_$][A-Za-z0-9_$]*$/.test(key)) return undefined\n\n const re = new RegExp(`^${arrowOrFn.param}\\\\.([A-Za-z0-9_$]+(?:\\\\.[A-Za-z0-9_$]+)*)$`)\n const m = value.match(re)\n if (!m) return undefined\n entries.push([key, m[1]] as const)\n }\n\n // Sort entries to ensure a stable selectorId.\n entries.sort((a, b) => (a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0))\n return { kind: 'struct', entries }\n }\n\n return undefined\n}\n\ntype ReadQueryStaticTemplate = {\n readonly selectorId: string\n readonly reads: ReadonlyArray<string | number>\n readonly readsDigest: ReadsDigest\n readonly equalsKind: EqualsKind\n}\n\nconst READ_QUERY_TEMPLATE_CACHE_MAX = 2048\nconst readQueryTemplateByFn = new WeakMap<Function, ReadQueryStaticTemplate>()\nconst readQueryTemplateBySource = new Map<string, ReadQueryStaticTemplate>()\n\nconst lruGet = <K, V>(map: Map<K, V>, key: K): V | undefined => {\n const value = map.get(key)\n if (value === undefined) return undefined\n map.delete(key)\n map.set(key, value)\n return value\n}\n\nconst lruSet = <K, V>(map: Map<K, V>, key: K, value: V, maxSize: number) => {\n if (map.has(key)) map.delete(key)\n map.set(key, value)\n if (map.size <= maxSize) return\n const oldestKey = map.keys().next().value as K | undefined\n if (oldestKey !== undefined) {\n map.delete(oldestKey)\n }\n}\n\nconst safeToString = (fn: Function): string => {\n try {\n return fn.toString()\n } catch {\n return ''\n }\n}\n\nexport const make = <S, V>(spec: ReadQuery<S, V>): ReadQuery<S, V> => spec\n\nexport const compile = <S, V>(input: ReadQueryInput<S, V>): ReadQueryCompiled<S, V> => {\n if (isReadQuery(input)) {\n const reads = normalizeReads(input.reads)\n const readsDigest = reads.length > 0 ? makeReadsDigest(reads) : undefined\n\n const equalsKind = input.equalsKind\n const staticIr: ReadQueryStaticIr = {\n selectorId: input.selectorId,\n debugKey: input.debugKey,\n lane: 'static',\n producer: 'manual',\n reads,\n readsDigest,\n equalsKind,\n }\n\n return {\n ...input,\n reads,\n lane: 'static',\n producer: 'manual',\n readsDigest,\n staticIr,\n }\n }\n\n const selector = input\n const debugKey =\n (typeof (selector as any)?.debugKey === 'string' && (selector as any).debugKey.length > 0\n ? (selector as any).debugKey\n : undefined) ??\n (typeof (selector as any).name === 'string' && (selector as any).name.length > 0\n ? (selector as any).name\n : undefined)\n\n const declaredReads: ReadonlyArray<string> | undefined = Array.isArray((selector as any)?.fieldPaths)\n ? ((selector as any).fieldPaths as ReadonlyArray<unknown>).filter((x): x is string => typeof x === 'string')\n : undefined\n\n if (declaredReads && declaredReads.length > 0) {\n const reads = normalizeReads(declaredReads)\n const readsDigest = makeReadsDigest(reads)\n const selectorId = computeSelectorId({ kind: 'reads', reads })\n\n const staticIr: ReadQueryStaticIr = {\n selectorId,\n debugKey,\n lane: 'static',\n producer: 'jit',\n reads,\n readsDigest,\n equalsKind: 'objectIs',\n }\n\n return {\n selectorId,\n debugKey,\n reads,\n select: selector as any,\n equalsKind: 'objectIs',\n lane: 'static',\n producer: 'jit',\n readsDigest,\n staticIr,\n }\n }\n\n const cachedByFn = readQueryTemplateByFn.get(selector as unknown as Function)\n if (cachedByFn) {\n const staticIr: ReadQueryStaticIr = {\n selectorId: cachedByFn.selectorId,\n debugKey,\n lane: 'static',\n producer: 'jit',\n reads: cachedByFn.reads,\n readsDigest: cachedByFn.readsDigest,\n equalsKind: cachedByFn.equalsKind,\n }\n\n return {\n selectorId: cachedByFn.selectorId,\n debugKey,\n reads: cachedByFn.reads,\n select: selector as any,\n equalsKind: cachedByFn.equalsKind,\n lane: 'static',\n producer: 'jit',\n readsDigest: cachedByFn.readsDigest,\n staticIr,\n }\n }\n\n const srcTrimmed = safeToString(selector as unknown as Function).trim()\n const cachedBySource = srcTrimmed.length > 0 ? lruGet(readQueryTemplateBySource, srcTrimmed) : undefined\n if (cachedBySource) {\n readQueryTemplateByFn.set(selector as unknown as Function, cachedBySource)\n const staticIr: ReadQueryStaticIr = {\n selectorId: cachedBySource.selectorId,\n debugKey,\n lane: 'static',\n producer: 'jit',\n reads: cachedBySource.reads,\n readsDigest: cachedBySource.readsDigest,\n equalsKind: cachedBySource.equalsKind,\n }\n\n return {\n selectorId: cachedBySource.selectorId,\n debugKey,\n reads: cachedBySource.reads,\n select: selector as any,\n equalsKind: cachedBySource.equalsKind,\n lane: 'static',\n producer: 'jit',\n readsDigest: cachedBySource.readsDigest,\n staticIr,\n }\n }\n\n const parsed = srcTrimmed.length > 0 ? tryParseSelectorSource(srcTrimmed) : undefined\n\n if (parsed?.kind === 'path') {\n const reads = [parsed.path]\n const readsDigest = makeReadsDigest(reads)\n const selectorId = computeSelectorId({ kind: 'path', path: parsed.path })\n const template: ReadQueryStaticTemplate = { selectorId, reads, readsDigest, equalsKind: 'objectIs' }\n readQueryTemplateByFn.set(selector as unknown as Function, template)\n lruSet(readQueryTemplateBySource, srcTrimmed, template, READ_QUERY_TEMPLATE_CACHE_MAX)\n\n const staticIr: ReadQueryStaticIr = {\n selectorId,\n debugKey,\n lane: 'static',\n producer: 'jit',\n reads,\n readsDigest,\n equalsKind: 'objectIs',\n }\n\n return {\n selectorId,\n debugKey,\n reads,\n select: selector as any,\n equalsKind: 'objectIs',\n lane: 'static',\n producer: 'jit',\n readsDigest,\n staticIr,\n }\n }\n\n if (parsed?.kind === 'struct') {\n const reads = normalizeReads(parsed.entries.map(([, path]) => path))\n const readsDigest = makeReadsDigest(reads)\n const selectorId = computeSelectorId({ kind: 'struct', entries: parsed.entries })\n const template: ReadQueryStaticTemplate = { selectorId, reads, readsDigest, equalsKind: 'shallowStruct' }\n readQueryTemplateByFn.set(selector as unknown as Function, template)\n lruSet(readQueryTemplateBySource, srcTrimmed, template, READ_QUERY_TEMPLATE_CACHE_MAX)\n\n const staticIr: ReadQueryStaticIr = {\n selectorId,\n debugKey,\n lane: 'static',\n producer: 'jit',\n reads,\n readsDigest,\n equalsKind: 'shallowStruct',\n }\n\n return {\n selectorId,\n debugKey,\n reads,\n select: selector as any,\n equalsKind: 'shallowStruct',\n lane: 'static',\n producer: 'jit',\n readsDigest,\n staticIr,\n }\n }\n\n // Dynamic lane fallback (no stable reads / unsupported subset)\n const baseFallbackReason: ReadQueryFallbackReason =\n srcTrimmed.includes('=>') || srcTrimmed.startsWith('function') ? 'unsupportedSyntax' : 'missingDeps'\n\n const lowDiscriminabilitySource = srcTrimmed.length === 0 || srcTrimmed.includes('[native code]')\n const fallbackReason: ReadQueryFallbackReason =\n !debugKey || lowDiscriminabilitySource ? 'unstableSelectorId' : baseFallbackReason\n\n const selectorId =\n fallbackReason === 'unstableSelectorId'\n ? computeUnstableSelectorId(selector as unknown as Function)\n : computeSelectorId({ kind: 'dynamic', debugKey, src: srcTrimmed })\n const staticIr: ReadQueryStaticIr = {\n selectorId,\n debugKey,\n lane: 'dynamic',\n producer: 'dynamic',\n fallbackReason,\n equalsKind: 'objectIs',\n }\n\n return {\n selectorId,\n debugKey,\n reads: [],\n select: selector as any,\n equalsKind: 'objectIs',\n lane: 'dynamic',\n producer: 'dynamic',\n fallbackReason,\n staticIr,\n }\n}\n\nexport interface ReadQueryStrictGateConfig {\n readonly mode: 'off' | 'warn' | 'error'\n readonly requireStatic?: {\n readonly selectorIds?: ReadonlyArray<string>\n readonly modules?: ReadonlyArray<string>\n }\n readonly denyFallbackReasons?: ReadonlyArray<ReadQueryFallbackReason>\n}\n\nexport interface ReadQueryStrictGateViolationDetails {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly selectorId: string\n readonly debugKey?: string\n readonly fallbackReason: ReadQueryFallbackReason\n readonly rule: ReadQueryStrictGateRule\n}\n\nexport interface ReadQueryStrictGateError extends Error {\n readonly _tag: 'ReadQueryStrictGateError'\n readonly details: ReadQueryStrictGateViolationDetails\n}\n\nexport type ReadQueryStrictGateDiagnosticEvent = Extract<DebugSink.Event, { readonly type: 'diagnostic' }>\n\nexport type ReadQueryStrictGateDecision =\n | { readonly verdict: 'PASS' }\n | {\n readonly verdict: 'WARN'\n readonly diagnostic: ReadQueryStrictGateDiagnosticEvent\n readonly details: ReadQueryStrictGateViolationDetails\n }\n | {\n readonly verdict: 'FAIL'\n readonly diagnostic: ReadQueryStrictGateDiagnosticEvent\n readonly error: ReadQueryStrictGateError\n readonly details: ReadQueryStrictGateViolationDetails\n }\n\nconst filterNonEmpty = (values: ReadonlyArray<string> | undefined): ReadonlyArray<string> | undefined => {\n if (!values || values.length === 0) return undefined\n const next = values.map((v) => v.trim()).filter((v) => v.length > 0)\n return next.length > 0 ? next : undefined\n}\n\nconst makeStrictGateDiagnostic = (args: {\n readonly config: ReadQueryStrictGateConfig\n readonly details: ReadQueryStrictGateViolationDetails\n}): ReadQueryStrictGateDiagnosticEvent => {\n const severity = args.config.mode === 'warn' ? ('warning' as const) : ('error' as const)\n\n return {\n type: 'diagnostic',\n moduleId: args.details.moduleId,\n instanceId: args.details.instanceId,\n txnSeq: args.details.txnSeq,\n code: 'read_query::strict_gate',\n severity,\n message: `ReadQuery strict gate violated: selector entered dynamic lane (selectorId=${args.details.selectorId}, reason=${args.details.fallbackReason}).`,\n hint:\n 'Fix: make the selector statically compilable (AOT/JIT) or pass an explicit ReadQuery; ' +\n 'or disable/narrow the gate via RuntimeOptions.readQuery.strictGate.requireStatic / denyFallbackReasons.',\n kind: 'read_query:strict_gate',\n trigger: {\n kind: 'read_query',\n name: 'strict_gate',\n details: args.details,\n },\n }\n}\n\nconst makeStrictGateError = (args: {\n readonly details: ReadQueryStrictGateViolationDetails\n}): ReadQueryStrictGateError =>\n Object.assign(\n new Error(\n `[ReadQueryStrictGateError] selector entered dynamic lane (selectorId=${args.details.selectorId}, reason=${args.details.fallbackReason}).`,\n ),\n {\n _tag: 'ReadQueryStrictGateError' as const,\n details: args.details,\n },\n ) as ReadQueryStrictGateError\n\nexport const resolveBuildGradeStrictGateDecision = (args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly compiled: ReadQueryCompiled<any, any>\n}): ReadQueryStrictGateDecision | undefined => {\n if (args.compiled.quality?.source !== 'build') return undefined\n const grade = args.compiled.quality.strictGate\n if (!grade || grade.evaluatedAt !== 'build') return undefined\n if (grade.verdict === 'PASS') return undefined\n\n const fallbackReason = (grade.fallbackReason ?? args.compiled.fallbackReason ?? 'missingDeps') as ReadQueryFallbackReason\n const details: ReadQueryStrictGateViolationDetails = {\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n txnSeq: args.txnSeq,\n selectorId: args.compiled.selectorId,\n debugKey: args.compiled.debugKey,\n fallbackReason,\n rule: grade.rule ?? 'requireStatic:global',\n }\n\n const config: ReadQueryStrictGateConfig = {\n mode: grade.verdict === 'WARN' ? 'warn' : 'error',\n }\n const diagnostic = makeStrictGateDiagnostic({ config, details })\n\n if (grade.verdict === 'WARN') {\n return { verdict: 'WARN', diagnostic, details }\n }\n\n return {\n verdict: 'FAIL',\n diagnostic,\n error: makeStrictGateError({ details }),\n details,\n }\n}\n\nexport const evaluateStrictGate = (args: {\n readonly config: ReadQueryStrictGateConfig\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly compiled: ReadQueryCompiled<any, any>\n}): ReadQueryStrictGateDecision => {\n if (args.config.mode === 'off') {\n return { verdict: 'PASS' }\n }\n\n if (args.compiled.lane !== 'dynamic') {\n return { verdict: 'PASS' }\n }\n\n const fallbackReason = (args.compiled.fallbackReason ?? 'missingDeps') as ReadQueryFallbackReason\n\n const denyFallbackReasons = args.config.denyFallbackReasons\n const denied = !!denyFallbackReasons?.includes(fallbackReason)\n\n const selectorIds = filterNonEmpty(args.config.requireStatic?.selectorIds)\n const modules = filterNonEmpty(args.config.requireStatic?.modules)\n const hasCoverageFilter = !!selectorIds || !!modules\n\n const matchedSelectorId = !!selectorIds?.includes(args.compiled.selectorId)\n const matchedModule = !!modules?.includes(args.moduleId)\n const coveredByRequireStatic = !hasCoverageFilter || matchedSelectorId || matchedModule\n\n if (!denied && !coveredByRequireStatic) {\n return { verdict: 'PASS' }\n }\n\n const rule: ReadQueryStrictGateRule = denied\n ? 'denyFallbackReason'\n : !hasCoverageFilter\n ? 'requireStatic:global'\n : matchedSelectorId\n ? 'requireStatic:selectorId'\n : 'requireStatic:module'\n\n const details: ReadQueryStrictGateViolationDetails = {\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n txnSeq: args.txnSeq,\n selectorId: args.compiled.selectorId,\n debugKey: args.compiled.debugKey,\n fallbackReason,\n rule,\n }\n\n const diagnostic = makeStrictGateDiagnostic({ config: args.config, details })\n\n if (args.config.mode === 'warn') {\n return { verdict: 'WARN', diagnostic, details }\n }\n\n const error = makeStrictGateError({ details })\n return { verdict: 'FAIL', diagnostic, error, details }\n}\n","import { ServiceMap } from 'effect'\n\n/**\n * ServiceId contract (specs/078-module-service-manifest/contracts/service-id.md):\n * - Stable string derived from Context.Tag by priority: tag.key ?? tag.id ?? tag._id\n * - tag.toString() is forbidden as an identity source (diagnostics only).\n */\n\nexport type ServiceId = string\n\nconst asNonEmptyString = (value: unknown): string | undefined =>\n typeof value === 'string' && value.length > 0 ? value : undefined\n\nexport const fromTag = (tag: ServiceMap.Key<any, any>): ServiceId | undefined => {\n const anyTag = tag as any\n return asNonEmptyString(anyTag.key) ?? asNonEmptyString(anyTag.id) ?? asNonEmptyString(anyTag._id)\n}\n\nexport const requireFromTag = (tag: ServiceMap.Key<any, any>, options?: { readonly hint?: string }): ServiceId => {\n const id = fromTag(tag)\n if (id) return id\n\n const hint =\n options?.hint ??\n 'Define the Tag with a stable string key, e.g. `class X extends Context.Tag(\"my-svc/x\")<X, Service>() {}`.'\n\n throw new Error(`[InvalidServiceId] Tag is missing a stable id (tag.key/tag.id/tag._id).\\n${hint}`)\n}\n\n\nconst tagCache = new Map<string, ServiceMap.Key<any, any>>()\n\nexport const tagFromServiceId = <S = unknown>(serviceId: ServiceId): ServiceMap.Key<any, S> => {\n const cached = tagCache.get(serviceId)\n if (cached) return cached as ServiceMap.Key<any, S>\n const created = ServiceMap.Service<any, S>(serviceId) as ServiceMap.Key<any, S>\n tagCache.set(serviceId, created)\n return created\n}\n\nexport const moduleRuntimeTagFromModuleId = <S = unknown>(moduleId: string): ServiceMap.Key<any, S> =>\n tagFromServiceId<S>(`@logixjs/Module/${moduleId}`)\n","import { Effect, PubSub, Scope } from 'effect'\nimport { isPrefixOf, normalizeFieldPath, type FieldPath, type FieldPathIdRegistry } from '../../field-path.js'\nimport type { ReadQueryCompiled } from './ReadQuery.js'\nimport type { TxnDirtyEvidenceSnapshot } from './StateTransaction.js'\nimport type { StateChangeWithMeta, StateCommitMeta } from './module.js'\nimport * as Debug from './DebugSink.js'\n\ntype ReadRootKey = string\n\ntype IndexedRootCandidate<S> = {\n readonly selectorId: string\n readonly entry: SelectorEntry<S, any>\n readonly readsForRoot: ReadonlyArray<FieldPath>\n}\n\ntype SelectorEvalEventPolicy = 'always' | 'sampled'\n\ntype SelectorEntry<S, V> = {\n readonly selectorId: string\n readonly readQuery: ReadQueryCompiled<S, V>\n readonly reads: ReadonlyArray<FieldPath>\n readonly readsByRootKey: ReadonlyMap<ReadRootKey, ReadonlyArray<FieldPath>>\n readonly readRootKeys: ReadonlyArray<ReadRootKey>\n readonly hub: PubSub.PubSub<StateChangeWithMeta<V>>\n subscriberCount: number\n lastScheduledTxnSeq: number\n cachedAtTxnSeq: number\n hasValue: boolean\n cachedValue: V | undefined\n}\n\nexport interface SelectorGraph<S> {\n readonly ensureEntry: <V>(\n readQuery: ReadQueryCompiled<S, V>,\n ) => Effect.Effect<SelectorEntry<S, V>, never, Scope.Scope>\n readonly releaseEntry: (selectorId: string) => void\n /**\n * O(1) check: whether any selector entries exist.\n *\n * Important perf contract:\n * - Avoid triggering DirtySet construction on commit when there are no selectors at all.\n */\n readonly hasAnyEntries: () => boolean\n readonly onCommit: (\n state: S,\n meta: StateCommitMeta,\n dirty: TxnDirtyEvidenceSnapshot,\n diagnosticsLevel: Debug.DiagnosticsLevel,\n onSelectorChanged?: (selectorId: string) => void,\n ) => Effect.Effect<void, never, never>\n}\n\nconst getReadRootKeyFromPath = (path: FieldPath): ReadRootKey => path[0] ?? ''\n\nconst overlaps = (a: FieldPath, b: FieldPath): boolean => isPrefixOf(a, b) || isPrefixOf(b, a)\n\nconst isRedundantDirtyRoot = (existingDirtyRoots: ReadonlyArray<FieldPath>, dirtyRoot: FieldPath): boolean => {\n for (const existing of existingDirtyRoots) {\n if (isPrefixOf(existing, dirtyRoot)) {\n return true\n }\n }\n return false\n}\n\nconst upsertDirtyRoot = (existingDirtyRoots: Array<FieldPath>, dirtyRoot: FieldPath): boolean => {\n if (isRedundantDirtyRoot(existingDirtyRoots, dirtyRoot)) {\n return false\n }\n\n let nextLength = 0\n for (let i = 0; i < existingDirtyRoots.length; i++) {\n const existing = existingDirtyRoots[i]!\n if (isPrefixOf(dirtyRoot, existing)) {\n continue\n }\n existingDirtyRoots[nextLength] = existing\n nextLength += 1\n }\n existingDirtyRoots.length = nextLength\n existingDirtyRoots.push(dirtyRoot)\n return true\n}\n\nconst equalsShallowStruct = (a: unknown, b: unknown): boolean => {\n if (Object.is(a, b)) return true\n if (!a || !b) return false\n if (typeof a !== 'object' || typeof b !== 'object') return false\n if (Array.isArray(a) || Array.isArray(b)) return false\n\n const aObj = a as Record<string, unknown>\n const bObj = b as Record<string, unknown>\n const aKeys = Object.keys(aObj)\n const bKeys = Object.keys(bObj)\n if (aKeys.length !== bKeys.length) return false\n for (const k of aKeys) {\n if (!Object.prototype.hasOwnProperty.call(bObj, k)) return false\n if (!Object.is(aObj[k], bObj[k])) return false\n }\n return true\n}\n\nconst equalsValue = <V>(query: ReadQueryCompiled<any, V>, a: V, b: V): boolean => {\n if (query.equalsKind === 'custom' && typeof query.equals === 'function') {\n return query.equals(a, b)\n }\n if (query.equalsKind === 'shallowStruct') {\n return equalsShallowStruct(a, b)\n }\n return Object.is(a, b)\n}\n\nconst nowMs = (): number => {\n const perf = (globalThis as any).performance as { now?: () => number } | undefined\n if (perf && typeof perf.now === 'function') {\n return perf.now()\n }\n return Date.now()\n}\n\nconst SAMPLED_SELECTOR_EVAL_SLOW_THRESHOLD_MS = 4\n\nconst shouldEvaluateEntryForDirtyRoots = <S>(args: {\n readonly entry: SelectorEntry<S, any>\n readonly dirty: TxnDirtyEvidenceSnapshot\n readonly getDirtyPath: (id: number) => FieldPath | undefined\n readonly hasRegistry: boolean\n}): boolean => {\n if (args.dirty.dirtyAll) return true\n if (args.entry.reads.length === 0) return true\n if (!args.hasRegistry) return true\n\n for (const dirtyPathId of args.dirty.dirtyPathIds) {\n const dirtyPath = args.getDirtyPath(dirtyPathId)\n if (!dirtyPath) return true\n\n const dirtyRootKey = getReadRootKeyFromPath(dirtyPath)\n const readsForRoot = args.entry.readsByRootKey.get(dirtyRootKey)\n if (!readsForRoot || readsForRoot.length === 0) {\n continue\n }\n\n for (const read of readsForRoot) {\n if (overlaps(dirtyPath, read)) {\n return true\n }\n }\n }\n\n return false\n}\n\nconst evaluateEntry = <S>(args: {\n readonly entry: SelectorEntry<S, any>\n readonly selectorId: string\n readonly state: S\n readonly meta: StateCommitMeta\n readonly emitEvalEvent: boolean\n readonly evalEventPolicy: SelectorEvalEventPolicy\n readonly moduleId: string\n readonly instanceId: string\n readonly onSelectorChanged?: (selectorId: string) => void\n}): Effect.Effect<void, never, never> =>\n Effect.gen(function* () {\n let next: any\n const evalStartedAt = args.emitEvalEvent ? nowMs() : undefined\n\n try {\n next = args.entry.readQuery.select(args.state)\n } catch {\n if (args.emitEvalEvent) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n txnSeq: args.meta.txnSeq,\n txnId: args.meta.txnId,\n code: 'read_query::eval_error',\n severity: 'error',\n message: 'ReadQuery selector threw during evaluation.',\n hint: 'Selectors must be pure and not throw; check the selector implementation and inputs.',\n kind: 'read_query_eval_error',\n trigger: { kind: 'read_query', name: 'selector:eval', details: { selectorId: args.selectorId } },\n })\n }\n return\n }\n\n const evalMs = args.emitEvalEvent && evalStartedAt != null ? Math.max(0, nowMs() - evalStartedAt) : undefined\n const hadValue = args.entry.hasValue\n const prev = args.entry.cachedValue as any\n const equal = hadValue ? equalsValue(args.entry.readQuery as any, prev, next) : false\n const changed = !hadValue || !equal\n\n if (changed) {\n args.entry.cachedValue = next\n args.entry.hasValue = true\n args.entry.cachedAtTxnSeq = args.meta.txnSeq\n args.onSelectorChanged?.(args.selectorId)\n\n yield* PubSub.publish(args.entry.hub as any, {\n value: args.entry.cachedValue,\n meta: args.meta,\n } satisfies StateChangeWithMeta<any>)\n }\n\n const shouldEmitEvalTrace =\n args.emitEvalEvent &&\n (args.evalEventPolicy === 'always' || changed || (evalMs != null && evalMs >= SAMPLED_SELECTOR_EVAL_SLOW_THRESHOLD_MS))\n\n if (!shouldEmitEvalTrace) {\n return\n }\n\n yield* Debug.record({\n type: 'trace:selector:eval',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n txnSeq: args.meta.txnSeq,\n txnId: args.meta.txnId,\n data: {\n selectorId: args.selectorId,\n lane: args.entry.readQuery.lane,\n producer: args.entry.readQuery.producer,\n fallbackReason: args.entry.readQuery.fallbackReason,\n readsDigest: args.entry.readQuery.readsDigest,\n equalsKind: args.entry.readQuery.equalsKind,\n changed,\n evalMs,\n },\n })\n })\n\nexport const make = <S>(args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly getFieldPathIdRegistry?: () => FieldPathIdRegistry | undefined\n}): SelectorGraph<S> => {\n const { moduleId, instanceId, getFieldPathIdRegistry } = args\n\n const selectorsById = new Map<string, SelectorEntry<S, any>>()\n const indexByReadRoot = new Map<ReadRootKey, Set<string>>()\n const selectorsWithoutReads = new Set<string>()\n\n const hasAnyEntries: SelectorGraph<S>['hasAnyEntries'] = () => selectorsById.size > 0\n\n const ensureEntry: SelectorGraph<S>['ensureEntry'] = (readQuery) => {\n const existing = selectorsById.get(readQuery.selectorId)\n if (existing) {\n return Effect.succeed(existing as any)\n }\n\n return Effect.gen(function* () {\n const hub = yield* PubSub.unbounded<StateChangeWithMeta<any>>()\n\n const reads: Array<FieldPath> = []\n const readsByRootKey = new Map<ReadRootKey, FieldPath[]>()\n const readRootKeys: Array<ReadRootKey> = []\n for (const rawRead of readQuery.reads) {\n if (typeof rawRead !== 'string') continue\n const read = normalizeFieldPath(rawRead)\n if (read == null) continue\n\n reads.push(read)\n const rootKey = getReadRootKeyFromPath(read)\n const bucket = readsByRootKey.get(rootKey)\n if (bucket) {\n bucket.push(read)\n } else {\n readsByRootKey.set(rootKey, [read])\n readRootKeys.push(rootKey)\n }\n }\n if (readRootKeys.length === 0) {\n selectorsWithoutReads.add(readQuery.selectorId)\n } else {\n for (const rootKey of readRootKeys) {\n const set = indexByReadRoot.get(rootKey)\n if (set) {\n set.add(readQuery.selectorId)\n } else {\n indexByReadRoot.set(rootKey, new Set([readQuery.selectorId]))\n }\n }\n }\n\n const entry: SelectorEntry<S, any> = {\n selectorId: readQuery.selectorId,\n readQuery: readQuery as any,\n reads,\n readsByRootKey,\n readRootKeys,\n hub,\n subscriberCount: 0,\n lastScheduledTxnSeq: -1,\n cachedAtTxnSeq: 0,\n hasValue: false,\n cachedValue: undefined,\n }\n selectorsById.set(readQuery.selectorId, entry)\n return entry as any\n })\n }\n\n const releaseEntry: SelectorGraph<S>['releaseEntry'] = (selectorId) => {\n const entry = selectorsById.get(selectorId)\n if (!entry) return\n entry.subscriberCount = Math.max(0, entry.subscriberCount - 1)\n if (entry.subscriberCount > 0) return\n\n selectorsById.delete(selectorId)\n selectorsWithoutReads.delete(selectorId)\n for (const rootKey of entry.readRootKeys) {\n const set = indexByReadRoot.get(rootKey)\n if (!set) continue\n set.delete(selectorId)\n if (set.size === 0) {\n indexByReadRoot.delete(rootKey)\n }\n }\n }\n\n const onCommit: SelectorGraph<S>['onCommit'] = (state, meta, dirty, diagnosticsLevel, onSelectorChanged) =>\n Effect.gen(function* () {\n if (selectorsById.size === 0) return\n\n const emitEvalEvent =\n diagnosticsLevel === 'light' || diagnosticsLevel === 'full' || diagnosticsLevel === 'sampled'\n const evalEventPolicy: SelectorEvalEventPolicy = diagnosticsLevel === 'sampled' ? 'sampled' : 'always'\n\n const registry: FieldPathIdRegistry | undefined =\n dirty.dirtyAll || dirty.dirtyPathIds.length === 0 ? undefined : getFieldPathIdRegistry?.()\n\n const getDirtyPath = (id: number): FieldPath | undefined => {\n if (!registry) return undefined\n if (!Number.isFinite(id)) return undefined\n const idx = Math.floor(id)\n if (idx < 0) return undefined\n const path = registry.fieldPaths[idx]\n return path && Array.isArray(path) ? path : undefined\n }\n\n const evaluateSubscribedEntry = (entry: SelectorEntry<S, any>, selectorId: string): Effect.Effect<void> => {\n if (entry.subscriberCount === 0) return Effect.void\n if (entry.lastScheduledTxnSeq === meta.txnSeq) return Effect.void\n\n // evaluateEntry is total (E=never): mark before execution to dedupe multi-root scans in the same txn.\n entry.lastScheduledTxnSeq = meta.txnSeq\n return evaluateEntry({\n entry,\n selectorId,\n state,\n meta,\n emitEvalEvent,\n evalEventPolicy,\n moduleId,\n instanceId,\n onSelectorChanged,\n })\n }\n\n const evaluateAllSubscribedSelectors = (): Effect.Effect<void, never, never> =>\n Effect.gen(function* () {\n for (const [selectorId, entry] of selectorsById.entries()) {\n yield* evaluateSubscribedEntry(entry, selectorId)\n }\n })\n\n const indexedCandidatesByRoot = new Map<ReadRootKey, ReadonlyArray<IndexedRootCandidate<S>>>()\n const getIndexedCandidatesForRoot = (rootKey: ReadRootKey): ReadonlyArray<IndexedRootCandidate<S>> => {\n const cached = indexedCandidatesByRoot.get(rootKey)\n if (cached) {\n return cached\n }\n\n const selectorIds = indexByReadRoot.get(rootKey)\n if (!selectorIds || selectorIds.size === 0) {\n indexedCandidatesByRoot.set(rootKey, [])\n return []\n }\n\n const indexed: Array<IndexedRootCandidate<S>> = []\n for (const selectorId of selectorIds) {\n const entry = selectorsById.get(selectorId)\n if (!entry) continue\n const readsForRoot = entry.readsByRootKey.get(rootKey)\n if (!readsForRoot || readsForRoot.length === 0) continue\n indexed.push({\n selectorId,\n entry,\n readsForRoot,\n })\n }\n\n indexedCandidatesByRoot.set(rootKey, indexed)\n return indexed\n }\n\n if (selectorsById.size === 1) {\n const entry = selectorsById.values().next().value\n if (!entry) return\n\n if (\n !shouldEvaluateEntryForDirtyRoots({\n entry,\n dirty,\n getDirtyPath,\n hasRegistry: registry != null,\n })\n ) {\n return\n }\n\n yield* evaluateSubscribedEntry(entry, entry.selectorId)\n return\n }\n\n if (dirty.dirtyAll) {\n yield* evaluateAllSubscribedSelectors()\n return\n }\n\n if (!registry) {\n yield* evaluateAllSubscribedSelectors()\n return\n }\n\n for (const selectorId of selectorsWithoutReads) {\n const entry = selectorsById.get(selectorId)\n if (!entry) continue\n yield* evaluateSubscribedEntry(entry, selectorId)\n }\n\n const dirtyRootsToProcessByRoot = new Map<ReadRootKey, Array<FieldPath>>()\n for (const dirtyPathId of dirty.dirtyPathIds) {\n const dirtyPath = getDirtyPath(dirtyPathId)\n if (!dirtyPath) {\n yield* evaluateAllSubscribedSelectors()\n return\n }\n\n const rootKey = getReadRootKeyFromPath(dirtyPath)\n\n const existingDirtyRoots = dirtyRootsToProcessByRoot.get(rootKey)\n if (existingDirtyRoots) {\n upsertDirtyRoot(existingDirtyRoots, dirtyPath)\n continue\n }\n dirtyRootsToProcessByRoot.set(rootKey, [dirtyPath])\n }\n\n for (const [rootKey, dirtyRootsForRoot] of dirtyRootsToProcessByRoot) {\n const candidates = getIndexedCandidatesForRoot(rootKey)\n if (candidates.length === 0) {\n continue\n }\n\n const hasRootLevelDirty = dirtyRootsForRoot.some((path) => path.length <= 1)\n\n for (const candidate of candidates) {\n const { entry, selectorId, readsForRoot } = candidate\n if (entry.subscriberCount === 0 || entry.lastScheduledTxnSeq === meta.txnSeq) continue\n\n if (hasRootLevelDirty) {\n yield* evaluateSubscribedEntry(entry, selectorId)\n continue\n }\n\n let overlapsAnyDirtyRoot = false\n for (const dirtyRootPath of dirtyRootsForRoot) {\n for (const read of readsForRoot) {\n if (overlaps(dirtyRootPath, read)) {\n overlapsAnyDirtyRoot = true\n break\n }\n }\n if (overlapsAnyDirtyRoot) {\n break\n }\n }\n if (!overlapsAnyDirtyRoot) continue\n\n yield* evaluateSubscribedEntry(entry, selectorId)\n }\n }\n })\n\n return { ensureEntry, releaseEntry, hasAnyEntries, onCommit }\n}\n","import { Cause, Deferred, Effect, Option, ServiceMap } from 'effect'\nimport type { LogicPlan, ModuleRuntime as PublicModuleRuntime } from './module.js'\nimport * as Lifecycle from './Lifecycle.js'\nimport * as ReducerDiagnostics from './ReducerDiagnostics.js'\nimport * as LifecycleDiagnostics from './LifecycleDiagnostics.js'\nimport * as LogicDiagnostics from './LogicDiagnostics.js'\nimport * as LogicUnitMeta from './LogicUnitMeta.js'\nimport * as Platform from './Platform.js'\nimport * as LogicPlanMarker from './LogicPlanMarker.js'\nimport { RootContextTag, type RootContext } from './RootContext.js'\n\ntype PhaseRef = LogicPlanMarker.PhaseRef\n\nconst createPhaseRef = (): PhaseRef => ({ current: 'run' })\n\nexport const runModuleLogics = <S, A, R>(args: {\n readonly tag: ServiceMap.Key<any, PublicModuleRuntime<S, A>>\n readonly logics: ReadonlyArray<Effect.Effect<any, any, R> | LogicPlan<any, R, any>>\n readonly runtime: PublicModuleRuntime<S, A>\n readonly lifecycle: Lifecycle.LifecycleManager\n readonly moduleId: string\n readonly instanceId: string\n}): Effect.Effect<void, unknown, any> => {\n const { tag, logics, runtime, lifecycle, moduleId, instanceId } = args\n const moduleIdForLogs = moduleId\n\n return Effect.gen(function* () {\n const withRuntimeAndLifecycle = <R2, E2, A2>(\n eff: Effect.Effect<A2, E2, R2>,\n phaseRef?: PhaseRef,\n logicUnit?: LogicDiagnostics.LogicUnitService,\n ) => {\n const withServices = Effect.provideService(\n Effect.provideService(eff, Lifecycle.LifecycleContext, lifecycle),\n tag,\n runtime,\n )\n\n // Annotate logs produced inside Logic effects (moduleId, etc.) so the Logger layer can correlate them to a Module.\n const annotated = Effect.annotateLogs({\n 'logix.moduleId': moduleIdForLogs,\n })(withServices as Effect.Effect<A2, E2, any>) as Effect.Effect<A2, E2, R2>\n\n const withLogicUnit = logicUnit\n ? Effect.provideService(annotated, LogicDiagnostics.LogicUnitServiceTag, logicUnit)\n : annotated\n\n if (!phaseRef) {\n return withLogicUnit\n }\n\n const phaseService: LogicDiagnostics.LogicPhaseService = {\n get current() {\n return phaseRef.current\n },\n }\n\n return Effect.provideService(withLogicUnit, LogicDiagnostics.LogicPhaseServiceTag, phaseService)\n }\n\n const withRootEnvIfAvailable = <A2, E2, R2>(eff: Effect.Effect<A2, E2, R2>): Effect.Effect<A2, E2, R2> =>\n Effect.gen(function* () {\n const rootOpt = yield* Effect.serviceOption(RootContextTag)\n if (Option.isNone(rootOpt)) {\n return yield* eff\n }\n const root = rootOpt.value as RootContext\n const rootEnv = root.context ?? (yield* Deferred.await(root.ready))\n\n // IMPORTANT:\n // - rootEnv contains the fully-assembled app Env (all modules/services), preventing \"missing service due to early Env capture\".\n // - currentEnv contains Provider overlays (e.g. React RuntimeProvider.layer / useRuntime layers) and module-local overrides.\n // Merge order: currentEnv overrides rootEnv for overlapping tags.\n const currentEnv = (yield* Effect.services<R2>()) as ServiceMap.ServiceMap<any>\n const mergedEnv = ServiceMap.merge(rootEnv as ServiceMap.ServiceMap<any>, currentEnv)\n\n return yield* Effect.provideServices(eff as any, mergedEnv as any)\n }) as any\n\n const formatSource = (source?: {\n readonly file: string\n readonly line: number\n readonly column: number\n }): string | undefined => (source ? `${source.file}:${source.line}:${source.column}` : undefined)\n\n const resolveLogicUnitService = (rawLogic: unknown, index: number): LogicDiagnostics.LogicUnitService => {\n const meta = LogicUnitMeta.getLogicUnitMeta(rawLogic)\n\n const logicUnitId = meta?.resolvedId ?? meta?.id ?? `logic#${index + 1}`\n\n const logicUnitIdKind = meta?.resolvedIdKind ?? (meta?.id ? 'explicit' : 'derived')\n\n const labelBase = meta?.resolvedName ?? meta?.name ?? logicUnitId\n\n const kind = meta?.resolvedKind ?? meta?.kind\n const kindPrefix = kind && kind.length > 0 ? `${kind}:` : ''\n\n const source = meta?.resolvedSource ?? meta?.source\n\n return {\n logicUnitId,\n logicUnitIdKind,\n logicUnitLabel: `logicUnit:${kindPrefix}${labelBase}`,\n path: formatSource(source),\n }\n }\n\n const handleLogicFailure = (cause: any) => {\n if (Cause.hasInterruptsOnly(cause)) {\n return Effect.failCause(cause)\n }\n\n const phaseErrorMarker = cause.reasons\n .filter((reason: any) => Cause.isFailReason(reason) || Cause.isDieReason(reason))\n .map((reason: any) => (Cause.isFailReason(reason) ? reason.error : reason.defect))\n .some((err: any) => err?._tag === 'LogicPhaseError')\n\n const base = lifecycle\n .notifyError(cause, {\n phase: 'run',\n hook: 'unknown',\n moduleId,\n instanceId,\n origin: 'logic.fork',\n })\n .pipe(\n Effect.tap(() => LifecycleDiagnostics.emitMissingOnErrorDiagnosticIfNeeded(lifecycle, moduleId)),\n Effect.tap(() => LifecycleDiagnostics.emitAssemblyFailureDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => ReducerDiagnostics.emitDiagnosticsFromCause(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitEnvServiceNotFoundDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitInvalidPhaseDiagnosticIfNeeded(cause, moduleId)),\n )\n\n // For LogicPhaseError: emit diagnostics only and avoid failing ModuleRuntime construction,\n // so runSync paths are not interrupted by AsyncFiberException.\n if (phaseErrorMarker) {\n return base\n }\n\n return base.pipe(Effect.flatMap(() => Effect.failCause(cause)))\n }\n\n const handleInitFailure = (cause: Cause.Cause<unknown>) =>\n Cause.hasInterruptsOnly(cause)\n ? Effect.failCause(cause)\n : Effect.void.pipe(\n Effect.tap(() => LifecycleDiagnostics.emitMissingOnErrorDiagnosticIfNeeded(lifecycle, moduleId)),\n Effect.tap(() => LifecycleDiagnostics.emitAssemblyFailureDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => ReducerDiagnostics.emitDiagnosticsFromCause(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitEnvServiceNotFoundDiagnosticIfNeeded(cause, moduleId)),\n Effect.tap(() => LogicDiagnostics.emitInvalidPhaseDiagnosticIfNeeded(cause, moduleId)),\n Effect.flatMap(() => Effect.failCause(cause)),\n )\n\n const isLogicPlan = (value: unknown): value is LogicPlan<any, any, any> =>\n Boolean(value && typeof value === 'object' && 'run' in (value as any) && 'setup' in (value as any))\n\n const hasLogicPhaseError = (cause: Cause.Cause<unknown>): boolean =>\n cause.reasons\n .filter((reason: any) => Cause.isFailReason(reason) || Cause.isDieReason(reason))\n .map((reason: any) => (Cause.isFailReason(reason) ? reason.error : reason.defect))\n .some((err: any) => err?._tag === 'LogicPhaseError')\n\n const normalizeToPlan = (value: unknown, defaultPhaseRef?: PhaseRef): LogicPlan<any, any, any> => {\n const phaseRef = LogicPlanMarker.getPhaseRef(value) ?? defaultPhaseRef ?? createPhaseRef()\n\n if (isLogicPlan(value)) {\n const plan = value as LogicPlan<any, any, any>\n if (!LogicPlanMarker.getPhaseRef(plan)) {\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n }\n return plan\n }\n\n const plan: LogicPlan<any, any, any> = {\n setup: Effect.void,\n run: value as Effect.Effect<any, any, any>,\n }\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n return plan\n }\n\n const makeNoopPlan = (phaseRef: PhaseRef): LogicPlan<any, any, any> => {\n const plan: LogicPlan<any, any, any> = {\n setup: Effect.void,\n run: Effect.void,\n }\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n LogicPlanMarker.markSkipRun(plan as any)\n return plan\n }\n\n const executeResolvedPlanImmediately = (\n plan: LogicPlan<any, any, any>,\n logicUnit: LogicDiagnostics.LogicUnitService,\n defaultPhaseRef?: PhaseRef,\n ): Effect.Effect<void, unknown, any> => {\n const hadPhaseRef = Boolean(LogicPlanMarker.getPhaseRef(plan))\n const phaseRef = LogicPlanMarker.getPhaseRef(plan) ?? defaultPhaseRef ?? createPhaseRef()\n if (!hadPhaseRef) {\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n }\n\n const setupPhase = withRuntimeAndLifecycle(plan.setup, phaseRef, logicUnit)\n const runPhase = withRootEnvIfAvailable(withRuntimeAndLifecycle(plan.run, phaseRef, logicUnit))\n\n phaseRef.current = 'setup'\n return setupPhase.pipe(\n Effect.catchCause(handleLogicFailure),\n Effect.flatMap(() =>\n LogicPlanMarker.isSkipRun(plan)\n ? Effect.void\n : Effect.sync(() => {\n phaseRef.current = 'run'\n }).pipe(\n Effect.flatMap(() => Effect.forkScoped(runPhase.pipe(Effect.catchCause(handleLogicFailure)))),\n Effect.asVoid,\n ),\n ),\n )\n }\n\n const resolveLogicPlanEffectToPlan = (\n value: unknown,\n logicUnit: LogicDiagnostics.LogicUnitService,\n phaseRef: PhaseRef,\n depth: number,\n ): Effect.Effect<LogicPlan<any, any, any>, unknown, any> => {\n if (!LogicPlanMarker.isLogicPlanEffect(value)) {\n return Effect.succeed(normalizeToPlan(value, phaseRef))\n }\n\n if (depth > 8) {\n return Effect.fail(new Error('Too many nested LogicPlanEffect resolutions (possible cyclic logic return).'))\n }\n\n // Keep phase guard active while resolving nested LogicPlanEffect so setup/run boundaries remain enforced.\n return withRuntimeAndLifecycle(value as Effect.Effect<any, any, any>, phaseRef, logicUnit).pipe(\n Effect.matchCauseEffect({\n onSuccess: (next) => resolveLogicPlanEffectToPlan(next, logicUnit, phaseRef, depth + 1),\n onFailure: (cause) => {\n if (hasLogicPhaseError(cause)) {\n return handleLogicFailure(cause).pipe(Effect.as(makeNoopPlan(phaseRef)))\n }\n return Effect.gen(function* () {\n yield* handleLogicFailure(cause)\n return yield* Effect.failCause(cause)\n })\n },\n }),\n )\n }\n\n const resolveCanonicalPlan = (\n rawLogic: Effect.Effect<any, any, R> | LogicPlan<any, R, any>,\n logicUnit: LogicDiagnostics.LogicUnitService,\n ): Effect.Effect<LogicPlan<any, any, any>, unknown, any> => {\n if (!LogicPlanMarker.isLogicPlanEffect(rawLogic)) {\n if (isLogicPlan(rawLogic)) {\n return Effect.succeed(normalizeToPlan(rawLogic))\n }\n\n const phaseRef = LogicPlanMarker.getPhaseRef(rawLogic) ?? createPhaseRef()\n const runEffect = rawLogic as Effect.Effect<any, any, any>\n\n // Canonical semantics keeps single-phase logic as run-only.\n // Compatibility: if run returns a marked LogicPlanEffect (async-built ModuleImpl pattern),\n // resolve and execute that nested plan immediately to preserve existing user behavior.\n const plan: LogicPlan<any, any, any> = {\n setup: Effect.void,\n run: runEffect.pipe(\n Effect.flatMap((value) =>\n !LogicPlanMarker.isLogicPlanEffect(value)\n ? Effect.void\n : resolveLogicPlanEffectToPlan(value, logicUnit, phaseRef, 0).pipe(\n Effect.flatMap((resolvedPlan) => executeResolvedPlanImmediately(resolvedPlan, logicUnit, phaseRef)),\n ),\n ),\n ),\n }\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n return Effect.succeed(plan)\n }\n\n // LogicPlanEffect: run once in setup phase to resolve the canonical plan.\n const phaseRef = LogicPlanMarker.getPhaseRef(rawLogic) ?? createPhaseRef()\n phaseRef.current = 'setup'\n\n return withRuntimeAndLifecycle(rawLogic as Effect.Effect<any, any, any>, phaseRef, logicUnit).pipe(\n Effect.matchCauseEffect({\n onSuccess: (value) => resolveLogicPlanEffectToPlan(value, logicUnit, phaseRef, 0),\n onFailure: (cause) => {\n if (hasLogicPhaseError(cause)) {\n return handleLogicFailure(cause).pipe(Effect.as(makeNoopPlan(phaseRef)))\n }\n return Effect.gen(function* () {\n yield* handleLogicFailure(cause)\n return yield* Effect.failCause(cause)\n })\n },\n }),\n )\n }\n\n const runCanonicalPlan = (\n plan: LogicPlan<any, any, any>,\n logicUnit: LogicDiagnostics.LogicUnitService,\n ): Effect.Effect<void, unknown, any> =>\n Effect.gen(function* () {\n const hadPhaseRef = Boolean(LogicPlanMarker.getPhaseRef(plan))\n const phaseRef = LogicPlanMarker.getPhaseRef(plan) ?? createPhaseRef()\n if (!hadPhaseRef) {\n LogicPlanMarker.attachPhaseRef(plan as any, phaseRef)\n }\n\n const setupPhase = withRuntimeAndLifecycle(plan.setup, phaseRef, logicUnit)\n const runPhase = withRootEnvIfAvailable(withRuntimeAndLifecycle(plan.run, phaseRef, logicUnit))\n\n phaseRef.current = 'setup'\n yield* setupPhase.pipe(Effect.catchCause(handleLogicFailure))\n\n if (LogicPlanMarker.isSkipRun(plan)) {\n return\n }\n\n pendingRunForks.push(\n Effect.sync(() => {\n phaseRef.current = 'run'\n }).pipe(\n Effect.flatMap(() => Effect.forkScoped(runPhase.pipe(Effect.catchCause(handleLogicFailure)))),\n Effect.asVoid,\n ),\n )\n })\n\n const pendingRunForks: Array<Effect.Effect<void, never, any>> = []\n\n let logicIndex = 0\n for (const rawLogic of logics) {\n const logicUnit = resolveLogicUnitService(rawLogic, logicIndex)\n logicIndex += 1\n\n const canonicalPlan = yield* resolveCanonicalPlan(rawLogic, logicUnit)\n yield* runCanonicalPlan(canonicalPlan, logicUnit)\n }\n\n // lifecycle initRequired: blocking gate (must complete before forking run fibers).\n yield* lifecycle.runInitRequired.pipe(Effect.catchCause(handleInitFailure))\n\n // platform signals: read Platform only after initRequired succeeds (avoid reading Env during setup).\n const platformOpt = yield* Effect.serviceOption(Platform.Tag)\n if (Option.isSome(platformOpt)) {\n const platform = platformOpt.value\n const snapshot = yield* lifecycle.getTaskSnapshot\n\n const platformPhaseRef: PhaseRef = { current: 'run' }\n const phaseService: LogicDiagnostics.LogicPhaseService = {\n get current() {\n return platformPhaseRef.current\n },\n }\n\n const providePlatformEnv = <A2, E2, R2>(eff: Effect.Effect<A2, E2, R2>): Effect.Effect<A2, E2, any> =>\n Effect.provideService(\n Effect.provideService(\n Effect.provideService(\n Effect.provideService(eff as Effect.Effect<A2, E2, any>, Platform.Tag, platform),\n Lifecycle.LifecycleContext,\n lifecycle,\n ),\n tag,\n runtime,\n ),\n LogicDiagnostics.LogicPhaseServiceTag,\n phaseService,\n )\n\n const register = (\n label: Lifecycle.Hook,\n subscribe: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>,\n ) =>\n Effect.forkScoped(\n subscribe(\n providePlatformEnv(\n label === 'suspend'\n ? lifecycle.runPlatformSuspend\n : label === 'resume'\n ? lifecycle.runPlatformResume\n : lifecycle.runPlatformReset,\n ).pipe(Effect.asVoid),\n ).pipe(\n Effect.catchCause((cause) =>\n lifecycle.notifyError(cause, {\n phase: 'platform',\n hook: label,\n moduleId,\n instanceId,\n origin: 'platform.subscribe',\n })),\n ),\n ).pipe(Effect.asVoid)\n\n if (snapshot.platformSuspend.length > 0) {\n yield* register('suspend', platform.lifecycle.onSuspend)\n }\n if (snapshot.platformResume.length > 0) {\n yield* register('resume', platform.lifecycle.onResume)\n }\n if (snapshot.platformReset.length > 0 && typeof platform.lifecycle.onReset === 'function') {\n yield* register('reset', platform.lifecycle.onReset)\n }\n }\n\n // Fork run fibers (start after init completes).\n yield* Effect.forEach(pendingRunForks, (eff) => eff, { discard: true })\n\n // lifecycle start: non-blocking (start after ready).\n yield* lifecycle.runStart\n\n // Give forked logics a scheduling chance so upper layers (e.g. Root processes) don't dispatch actions before logics are ready.\n yield* Effect.yieldNow\n })\n}\n","import { Cause, Chunk, Effect } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport type { LifecycleManager } from './Lifecycle.js'\n\nexport type UnhandledErrorKind = 'interrupt' | 'diagnostic' | 'assembly' | 'defect'\n\nexport const classifyUnhandledCause = (cause: Cause.Cause<unknown>): UnhandledErrorKind => {\n if (Cause.hasInterruptsOnly(cause)) {\n return 'interrupt'\n }\n\n const all = cause.reasons\n .filter((reason) => Cause.isFailReason(reason) || Cause.isDieReason(reason))\n .map((reason) => (Cause.isFailReason(reason) ? reason.error : reason.defect)) as ReadonlyArray<any>\n\n if (all.some((err) => err && typeof err === 'object' && err._tag === 'LogicPhaseError')) {\n return 'diagnostic'\n }\n\n if (all.some((err) => err && typeof err === 'object' && err.name === 'MissingModuleRuntimeError')) {\n return 'assembly'\n }\n\n return 'defect'\n}\n\n/**\n * When a Module hits a lifecycle error during Logic execution and no onError handler is registered,\n * emit a warning diagnostic suggesting adding $.lifecycle.onError at the beginning of the module logic.\n */\nexport const emitMissingOnErrorDiagnosticIfNeeded = (\n lifecycle: LifecycleManager,\n moduleId?: string,\n): Effect.Effect<void, never, any> =>\n lifecycle.hasOnErrorHandlers.pipe(\n Effect.flatMap((has) =>\n has || !moduleId\n ? Effect.void\n : Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'lifecycle::missing_on_error',\n severity: 'warning',\n message: `Module \"${moduleId}\" received a lifecycle error but has no $.lifecycle.onError handler registered.`,\n hint: \"Add $.lifecycle.onError((cause, context) => ...) at the beginning of this Module's logic to handle logic errors consistently.\",\n }),\n ),\n )\n\n/**\n * When a lifecycle error originates from \"assembly failure\" (e.g. missing Module runtime provider),\n * emit an error diagnostic with actionable fix suggestions.\n *\n * Notes:\n * - This diagnostic explains the error classification and does not change the original error semantics.\n * - If higher layers (e.g. React RuntimeProvider.onError) listen to both lifecycle:error and diagnostic(error),\n * they should de-duplicate or report based on context/phase to avoid duplicate alerts.\n */\nexport const emitAssemblyFailureDiagnosticIfNeeded = (\n cause: Cause.Cause<unknown>,\n moduleId?: string,\n): Effect.Effect<void, never, any> =>\n Effect.sync(() => {\n const defects = cause.reasons.filter(Cause.isDieReason).map((reason) => reason.defect)\n const missing = defects.find(\n (e) => e && typeof e === 'object' && (e as any).name === 'MissingModuleRuntimeError',\n ) as any\n\n if (!missing) {\n return Effect.void\n }\n\n const tokenId = typeof missing.tokenId === 'string' ? missing.tokenId : '<unknown module id>'\n const fix =\n Array.isArray(missing.fix) && missing.fix.every((l: unknown) => typeof l === 'string')\n ? (missing.fix as ReadonlyArray<string>).join('\\n')\n : undefined\n\n return Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'assembly::missing_module_runtime',\n severity: 'error',\n message: `Missing Module runtime provider for \"${tokenId}\".`,\n hint:\n fix ?? 'Provide the child implementation in the same scope (imports), or provide a root singleton at app root.',\n kind: 'assembly_failure',\n })\n }).pipe(Effect.flatten)\n","import { Cause, Effect, ServiceMap } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { isDevEnv } from './env.js'\n\nconst phaseDiagnosticsEnabled = (): boolean => isDevEnv()\n\n/**\n * Logic diagnostics:\n * - Currently focuses on initialization noise caused by missing Env services (\"Service not found\").\n *\n * Design intent:\n * - In recommended usage, Runtime / React layers provide Env correctly.\n * - In some startup timing windows, Logic may try to read services before Env is fully provided.\n * - Such errors often occur once, do not change final semantics, but pollute logs.\n *\n * Therefore we emit a warning diagnostic via Debug, explaining likely causes and investigation paths.\n * The real error semantics are still handled by lifecycle.onError / AppRuntime.onError.\n */\n\nconst SERVICE_NOT_FOUND_PREFIX = 'Service not found:'\n\n/**\n * If the Cause contains a `Service not found: ...` error, emit a warning diagnostic:\n * - code: logic::env_service_not_found\n * - message: the original error message\n * - hint: explains this is known startup timing noise and suggests what to check\n */\nexport const emitEnvServiceNotFoundDiagnosticIfNeeded = (\n cause: Cause.Cause<unknown>,\n moduleId?: string,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n let pretty: string\n try {\n pretty = Cause.pretty(cause)\n } catch {\n return\n }\n\n if (!pretty.includes(SERVICE_NOT_FOUND_PREFIX)) {\n return\n }\n\n // 1) Warning diagnostic for the missing Env service itself\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'logic::env_service_not_found',\n severity: 'warning',\n message: pretty,\n hint:\n 'Logic attempted to access an Env service before it was provided. This is a known initialization timing noise in Runtime/React integration. ' +\n \"If it happens once during early startup and everything works afterward, it's likely harmless; \" +\n 'if it persists or correlates with app issues, verify Runtime.make / RuntimeProvider.layer provides the service.',\n })\n\n // 2) In some cases (e.g. accessing Env too early during Logic setup), we also want to surface\n // logic::invalid_phase to suggest moving Env access to the run section.\n //\n // Because we cannot reliably determine the phase at this point, this is only a supplemental signal.\n // The real phase guard is still handled by LogicPhaseError + emitInvalidPhaseDiagnosticIfNeeded.\n yield* Debug.record({\n type: 'diagnostic',\n moduleId,\n code: 'logic::invalid_phase',\n severity: 'error',\n message: '$.use is not allowed before Env is fully ready.',\n hint:\n 'Avoid reading services during setup or before Env is ready; ' +\n 'move Env access to the Logic run section, or wrap init via $.lifecycle.onInitRequired.',\n kind: 'env_service_not_ready',\n })\n })\n\nexport interface LogicPhaseError extends Error {\n readonly _tag: 'LogicPhaseError'\n readonly kind: string\n readonly api?: string\n readonly phase: 'setup' | 'run'\n readonly moduleId?: string\n}\n\nexport interface LogicPhaseService {\n readonly current: 'setup' | 'run'\n}\n\nexport class LogicPhaseServiceTag extends ServiceMap.Service<\n LogicPhaseServiceTag,\n LogicPhaseService\n>()('@logixjs/LogicPhaseService') {}\n\n/**\n * LogicUnitService:\n * - Injected while executing each mounted logic unit (scope = the logic unit's setup/run fiber).\n * - Used for trait provenance and other \"bound to the current logic unit\" information (aligned with 022-module logicUnitId).\n *\n * Constraints:\n * - Read-only (must not mutate runtime state); only a provenance/diagnostics anchor.\n */\nexport interface LogicUnitService {\n readonly logicUnitId: string\n readonly logicUnitIdKind: 'explicit' | 'derived'\n readonly logicUnitLabel: string\n readonly path?: string\n}\n\nexport class LogicUnitServiceTag extends ServiceMap.Service<\n LogicUnitServiceTag,\n LogicUnitService\n>()('@logixjs/LogicUnitService') {}\n\nexport const makeLogicPhaseError = (\n kind: string,\n api: string,\n phase: 'setup' | 'run',\n moduleId?: string,\n): LogicPhaseError =>\n Object.assign(new Error(`[LogicPhaseError] ${api} is not allowed in ${phase} phase (kind=${kind}).`), {\n _tag: 'LogicPhaseError',\n kind,\n api,\n phase,\n moduleId,\n }) as LogicPhaseError\n\n/**\n * Extracts LogicPhaseError from a Cause and emits it as a diagnostic:\n * - code: logic::invalid_phase\n * - kind: concrete violation kind (e.g. use_in_setup)\n */\nexport const emitInvalidPhaseDiagnosticIfNeeded = (\n cause: Cause.Cause<unknown>,\n moduleId?: string,\n): Effect.Effect<void> =>\n Effect.gen(function* () {\n if (!phaseDiagnosticsEnabled()) {\n return\n }\n\n const allErrors = cause.reasons\n .filter((reason) => Cause.isFailReason(reason) || Cause.isDieReason(reason))\n .map((reason) => (Cause.isFailReason(reason) ? reason.error : reason.defect))\n\n for (const err of allErrors) {\n const logicErr = err as any\n if (logicErr && logicErr._tag === 'LogicPhaseError') {\n const phaseErr = logicErr as LogicPhaseError\n const hint =\n phaseErr.kind === 'use_in_setup' || phaseErr.kind === 'lifecycle_in_setup'\n ? 'The setup phase must not read Env/services or run long-lived logic; move the relevant calls to the run phase.'\n : phaseErr.kind === 'lifecycle_in_run'\n ? 'Do not register $.lifecycle.* in the run phase (setup-only). Move lifecycle registrations to the synchronous part of Module.logic builder (before return).'\n : phaseErr.kind === 'traits_in_run' || phaseErr.kind === 'traits_declare_in_run'\n ? 'Traits are frozen after setup; move $.traits.declare to LogicPlan.setup or the setup registration phase of Module.logic builder.'\n : 'Move logic to the run phase; keep setup for registrations only.'\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: phaseErr.moduleId ?? moduleId,\n code: 'logic::invalid_phase',\n severity: 'error',\n message: `${phaseErr.api ?? phaseErr.kind} is not allowed in ${phaseErr.phase} phase.`,\n hint,\n kind: phaseErr.kind,\n })\n\n // Return after the first LogicPhaseError match.\n return\n }\n }\n })\n","import { Effect, ServiceMap } from 'effect'\n\nexport interface Service {\n readonly lifecycle: {\n readonly onSuspend: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>\n readonly onResume: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>\n readonly onReset?: (eff: Effect.Effect<void, never, any>) => Effect.Effect<void, never, any>\n }\n\n /**\n * Platform signal broadcaster (for host integration and tests): triggers registered lifecycle handlers.\n *\n * Notes:\n * - The default implementation should be a safe no-op.\n * - Failure policy is decided by the platform implementation; the runtime should ensure \"do not terminate the instance by default\".\n */\n readonly emitSuspend: () => Effect.Effect<void, never, any>\n readonly emitResume: () => Effect.Effect<void, never, any>\n readonly emitReset: () => Effect.Effect<void, never, any>\n}\n\nexport class Tag extends ServiceMap.Service<Tag, Service>()('@logixjs/Platform') {}\n","import { Effect, Ref } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport type { RuntimeInternalsResolvedConcurrencyPolicy } from './RuntimeInternals.js'\n\ntype PressureKey = string\n\ntype PressureCooldownState = {\n readonly lastEmittedAtMs: number\n readonly suppressedCount: number\n}\n\nconst keyOf = (trigger: Debug.TriggerRef): PressureKey => {\n const name = typeof trigger.name === 'string' ? trigger.name : ''\n const details = trigger.details\n if (!details || typeof details !== 'object') {\n return `${trigger.kind}::${name}`\n }\n\n const source = details as Record<string, unknown>\n const dispatchEntry = typeof source.dispatchEntry === 'string' ? source.dispatchEntry : ''\n const channel = typeof source.channel === 'string' ? source.channel : ''\n const topicTag = typeof source.topicTag === 'string' ? source.topicTag : ''\n const actionTag = typeof source.actionTag === 'string' ? source.actionTag : ''\n\n return `${trigger.kind}::${name}::${dispatchEntry}::${channel}::${topicTag}::${actionTag}`\n}\n\nconst nowMs = Effect.clockWith((clock) => clock.currentTimeMillis)\n\nexport interface ConcurrencyDiagnostics {\n readonly emitPressureIfNeeded: (args: {\n readonly policy: RuntimeInternalsResolvedConcurrencyPolicy\n readonly trigger: Debug.TriggerRef\n readonly backlogCount?: number\n readonly inFlight?: number\n readonly saturatedDurationMs?: number\n }) => Effect.Effect<void>\n readonly emitUnboundedPolicyIfNeeded: (args: {\n readonly policy: RuntimeInternalsResolvedConcurrencyPolicy\n readonly trigger: Debug.TriggerRef\n }) => Effect.Effect<void>\n}\n\nexport const make = (args: {\n readonly moduleId: string | undefined\n readonly instanceId: string\n}): Effect.Effect<ConcurrencyDiagnostics> =>\n Effect.gen(function* () {\n const pressureCooldownByKeyRef = yield* Ref.make<Readonly<Record<PressureKey, PressureCooldownState>>>({})\n\n const unboundedEnabledEmittedRef = yield* Ref.make(false)\n const unboundedBlockedEmittedRef = yield* Ref.make(false)\n\n const emitPressureIfNeeded: ConcurrencyDiagnostics['emitPressureIfNeeded'] = (inArgs) =>\n Effect.gen(function* () {\n const policy = inArgs.policy\n\n const backlogCount = inArgs.backlogCount ?? 0\n const saturatedDurationMs = inArgs.saturatedDurationMs ?? 0\n\n const threshold = policy.pressureWarningThreshold\n const meetsThreshold =\n backlogCount >= threshold.backlogCount || saturatedDurationMs >= threshold.backlogDurationMs\n\n if (!meetsThreshold) {\n return\n }\n\n const cooldownMs = policy.warningCooldownMs\n const now = yield* nowMs\n const key = keyOf(inArgs.trigger)\n\n const decision = yield* Ref.modify(\n pressureCooldownByKeyRef,\n (\n byKey,\n ): readonly [\n { readonly _tag: 'emit'; readonly suppressedCount: number } | { readonly _tag: 'suppress' },\n Readonly<Record<PressureKey, PressureCooldownState>>,\n ] => {\n const prev = byKey[key]\n if (prev && now - prev.lastEmittedAtMs < cooldownMs) {\n return [\n { _tag: 'suppress' },\n {\n ...byKey,\n [key]: {\n lastEmittedAtMs: prev.lastEmittedAtMs,\n suppressedCount: prev.suppressedCount + 1,\n },\n },\n ] as const\n }\n\n const suppressedCount = prev?.suppressedCount ?? 0\n return [\n { _tag: 'emit', suppressedCount },\n {\n ...byKey,\n [key]: {\n lastEmittedAtMs: now,\n suppressedCount: 0,\n },\n },\n ] as const\n },\n )\n\n if (decision._tag === 'suppress') {\n return\n }\n\n const details: Record<string, unknown> = {\n configScope: policy.configScope,\n limit: policy.concurrencyLimit,\n backlogCount,\n saturatedDurationMs,\n threshold: {\n backlogCount: threshold.backlogCount,\n backlogDurationMs: threshold.backlogDurationMs,\n },\n cooldownMs,\n degradeStrategy: decision.suppressedCount > 0 ? ('cooldown' as const) : ('none' as const),\n suppressedCount: decision.suppressedCount,\n sampleRate: 1,\n droppedCount: 0,\n }\n if (typeof inArgs.inFlight === 'number' && Number.isFinite(inArgs.inFlight)) {\n details.inFlight = inArgs.inFlight\n }\n if (inArgs.trigger.details !== undefined) {\n const projected = (() => {\n try {\n return JSON.parse(JSON.stringify(inArgs.trigger.details))\n } catch {\n return undefined\n }\n })()\n if (projected && typeof projected === 'object') {\n details.source = projected\n }\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'concurrency::pressure',\n severity: 'warning',\n message: 'Concurrency pressure detected (backpressure / saturation).',\n hint: 'Reduce trigger frequency, split work, switch to runLatest or batch processing; or tune concurrency/backpressure limits via concurrencyPolicy.',\n kind: 'concurrency:pressure',\n trigger: {\n kind: inArgs.trigger.kind,\n name: inArgs.trigger.name,\n details,\n },\n })\n })\n\n const emitUnboundedPolicyIfNeeded: ConcurrencyDiagnostics['emitUnboundedPolicyIfNeeded'] = (inArgs) =>\n Effect.gen(function* () {\n const policy = inArgs.policy\n\n // 1) effective unbounded: emit only once (SC-004 / FR-004)\n if (policy.concurrencyLimit === 'unbounded' && policy.allowUnbounded === true) {\n const shouldEmit = yield* Ref.modify(unboundedEnabledEmittedRef, (emitted) =>\n emitted ? ([false, true] as const) : ([true, true] as const),\n )\n if (!shouldEmit) {\n return\n }\n\n const details: Record<string, unknown> = {\n configScope: policy.configScope,\n limit: policy.concurrencyLimit,\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'concurrency::unbounded_enabled',\n severity: 'error',\n message: 'Unbounded concurrency is enabled (risk: resource exhaustion).',\n hint:\n 'Enable only for short-lived, controlled, cancelable fan-out; prefer bounded concurrency and increase gradually; ' +\n 'avoid piling up long-running or never-ending tasks under unbounded.',\n kind: 'concurrency:unbounded_enabled',\n trigger: {\n kind: inArgs.trigger.kind,\n name: inArgs.trigger.name,\n details,\n },\n })\n return\n }\n\n // 2) requested unbounded without explicit allow: fall back to bounded + diagnostic (T023)\n const requestedUnbounded =\n policy.requestedConcurrencyLimit === 'unbounded' && policy.concurrencyLimit !== 'unbounded'\n\n if (!requestedUnbounded) {\n return\n }\n\n const shouldEmit = yield* Ref.modify(unboundedBlockedEmittedRef, (emitted) =>\n emitted ? ([false, true] as const) : ([true, true] as const),\n )\n if (!shouldEmit) {\n return\n }\n\n const details: Record<string, unknown> = {\n configScope: policy.configScope,\n limit: policy.requestedConcurrencyLimit,\n }\n\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: args.moduleId,\n instanceId: args.instanceId,\n code: 'concurrency::unbounded_requires_opt_in',\n severity: 'error',\n message: 'Unbounded concurrency was requested but is not allowed; falling back to bounded concurrency.',\n hint:\n 'If you really need unbounded, explicitly set concurrencyPolicy.allowUnbounded = true; ' +\n 'otherwise set concurrencyPolicy.concurrencyLimit to a positive integer limit.',\n kind: 'concurrency:unbounded_blocked',\n trigger: {\n kind: inArgs.trigger.kind,\n name: inArgs.trigger.name,\n details,\n },\n })\n })\n\n return { emitPressureIfNeeded, emitUnboundedPolicyIfNeeded }\n })\n","import { Deferred, Effect, Exit, Option, Schema, ServiceMap, Stream } from 'effect'\nimport { create } from 'mutative'\nimport type * as Logix from './module.js'\nimport * as Logic from './LogicMiddleware.js'\nimport * as Action from '../../action.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport { mutateWithPatchPaths } from './mutativePatches.js'\nimport * as FlowRuntime from './FlowRuntime.js'\nimport * as MatchBuilder from './MatchBuilder.js'\nimport * as Platform from './Platform.js'\nimport * as Lifecycle from './Lifecycle.js'\nimport * as Debug from './DebugSink.js'\nimport * as LogicDiagnostics from './LogicDiagnostics.js'\nimport { isDevEnv } from './env.js'\nimport type { JsonValue } from '../../observability/jsonValue.js'\nimport { RunSessionTag, type RunSession } from '../../observability/runSession.js'\nimport * as Root from '../../root.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport type * as ModuleTraits from './ModuleTraits.js'\nimport { getRuntimeInternals, setBoundInternals } from './runtimeInternalsAccessor.js'\nimport type { AnyModuleShape, ModuleRuntime, StateOf, ActionOf } from './module.js'\n\nconst DIRECT_STATE_WRITE_EFFECT = Symbol.for('logix.directStateWriteEffect')\n\ntype DirectStateWriteMetadata<Sh extends AnyModuleShape> =\n | { readonly kind: 'update'; readonly run: (prev: StateOf<Sh>) => StateOf<Sh> }\n | { readonly kind: 'mutate'; readonly run: (draft: Logic.Draft<StateOf<Sh>>) => void }\n\ntype DirectStateWriteEffect<Sh extends AnyModuleShape> = Effect.Effect<void, never, any> & {\n [DIRECT_STATE_WRITE_EFFECT]?: DirectStateWriteMetadata<Sh>\n}\n\nconst markDirectStateWriteEffect = <Sh extends AnyModuleShape, A extends Effect.Effect<void, never, any>>(\n effect: A,\n metadata: DirectStateWriteMetadata<Sh>,\n): A => {\n ;(effect as DirectStateWriteEffect<Sh>)[DIRECT_STATE_WRITE_EFFECT] = metadata\n return effect\n}\n\nconst getDirectStateWriteMetadata = <Sh extends AnyModuleShape>(\n value: unknown,\n): DirectStateWriteMetadata<Sh> | undefined => {\n if (!value || (typeof value !== 'object' && typeof value !== 'function')) return undefined\n return (value as DirectStateWriteEffect<Sh>)[DIRECT_STATE_WRITE_EFFECT]\n}\n\n// Local IntentBuilder factory; equivalent to the old internal/dsl/LogicBuilder.makeIntentBuilderFactory.\nconst LogicBuilderFactory = <Sh extends AnyModuleShape, R = never>(\n runtime: ModuleRuntime<StateOf<Sh>, ActionOf<Sh>>,\n runtimeInternals: RuntimeInternals,\n) => {\n const flowApi = FlowRuntime.make<Sh, R>(runtime, runtimeInternals)\n\n return <T>(stream: Stream.Stream<T>, triggerName?: string): Logic.IntentBuilder<T, Sh, R> => {\n const runWithStateTransaction: TaskRunner.TaskRunnerRuntime['runWithStateTransaction'] = (origin, body) =>\n runtimeInternals.txn.runWithStateTransaction(origin as any, body)\n\n const taskRunnerRuntime: TaskRunner.TaskRunnerRuntime = {\n moduleId: runtime.moduleId,\n instanceId: runtimeInternals.instanceId,\n runWithStateTransaction,\n resolveConcurrencyPolicy: runtimeInternals.concurrency.resolveConcurrencyPolicy,\n }\n\n const builder = {\n debounce: (ms: number) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(flowApi.debounce<T>(ms)(stream), triggerName),\n throttle: (ms: number) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(flowApi.throttle<T>(ms)(stream), triggerName),\n filter: (predicate: (value: T) => boolean) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(flowApi.filter(predicate)(stream), triggerName),\n map: <U>(f: (value: T) => U) =>\n LogicBuilderFactory<Sh, R>(runtime, runtimeInternals)(stream.pipe(Stream.map(f)), triggerName),\n run<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.run<T, A, E, R2>(eff, options)(stream)\n },\n runLatest<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.runLatest<T, A, E, R2>(eff, options)(stream)\n },\n runExhaust<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.runExhaust<T, A, E, R2>(eff, options)(stream)\n },\n runParallel<A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): Logic.Of<Sh, R & R2, void, E> {\n return flowApi.runParallel<T, A, E, R2>(eff, options)(stream)\n },\n runFork: <A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n ): Logic.Of<Sh, R & R2, void, E> => {\n if (runtimeInternals && triggerName && typeof eff !== 'function' && getDirectStateWriteMetadata<Sh>(eff) != null) {\n return Effect.sync(() => {\n const metadata = getDirectStateWriteMetadata<Sh>(eff)!\n runtimeInternals.txn.registerActionStateWriteback(\n triggerName,\n metadata.kind === 'update'\n ? ({ kind: 'update', run: metadata.run } as any)\n : ({ kind: 'mutate', run: metadata.run } as any),\n )\n }) as Logic.Of<Sh, R & R2, void, E>\n }\n return Effect.forkScoped(flowApi.run<T, A, E, R2>(eff)(stream)).pipe(Effect.asVoid) as Logic.Of<\n Sh,\n R & R2,\n void,\n E\n >\n },\n runParallelFork: <A = void, E = never, R2 = unknown>(\n eff: Logic.Of<Sh, R & R2, A, E> | ((p: T) => Logic.Of<Sh, R & R2, A, E>),\n ): Logic.Of<Sh, R & R2, void, E> => {\n if (runtimeInternals && triggerName && typeof eff !== 'function' && getDirectStateWriteMetadata<Sh>(eff) != null) {\n return Effect.sync(() => {\n const metadata = getDirectStateWriteMetadata<Sh>(eff)!\n runtimeInternals.txn.registerActionStateWriteback(\n triggerName,\n metadata.kind === 'update'\n ? ({ kind: 'update', run: metadata.run } as any)\n : ({ kind: 'mutate', run: metadata.run } as any),\n )\n }) as Logic.Of<Sh, R & R2, void, E>\n }\n return Effect.forkScoped(flowApi.runParallel<T, A, E, R2>(eff)(stream)).pipe(Effect.asVoid) as Logic.Of<\n Sh,\n R & R2,\n void,\n E\n >\n },\n runTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'task', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n runParallelTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'parallel', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n runLatestTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'latest', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n runExhaustTask: <A = void, E = never, R2 = unknown>(\n config: TaskRunner.TaskRunnerConfig<T, Sh, R & R2, A, E>,\n ): Logic.Of<Sh, R & R2, void, never> =>\n TaskRunner.makeTaskRunner<T, Sh, R & R2, A, E>(stream, 'exhaust', taskRunnerRuntime, {\n ...config,\n triggerName: config.triggerName ?? triggerName,\n }) as Logic.Of<Sh, R & R2, void, never>,\n toStream: () => stream,\n update: (\n reducer: (prev: StateOf<Sh>, payload: T) => StateOf<Sh> | Effect.Effect<StateOf<Sh>, any, any>,\n ): Logic.Of<Sh, R, void, never> =>\n Stream.runForEach(stream, (payload) =>\n taskRunnerRuntime.runWithStateTransaction(\n {\n kind: 'watcher:update',\n name: triggerName,\n },\n () =>\n Effect.gen(function* () {\n const prev = (yield* runtime.getState) as StateOf<Sh>\n const next = reducer(prev, payload)\n if (Effect.isEffect(next)) {\n const exit = yield* Effect.exit(next as Effect.Effect<StateOf<Sh>, any, any>)\n if (exit._tag === 'Failure') {\n yield* Effect.logError('Flow error', exit.cause)\n return\n }\n yield* runtime.setState(exit.value as StateOf<Sh>)\n return\n }\n yield* runtime.setState(next as StateOf<Sh>)\n }),\n ),\n ).pipe(Effect.catchCause((cause) => Effect.logError('Flow error', cause))) as Logic.Of<Sh, R, void, never>,\n mutate: (reducer: (draft: Logic.Draft<StateOf<Sh>>, payload: T) => void): Logic.Of<Sh, R, void, never> =>\n Stream.runForEach(stream, (payload) =>\n taskRunnerRuntime.runWithStateTransaction(\n {\n kind: 'watcher:mutate',\n name: triggerName,\n },\n () =>\n Effect.gen(function* () {\n const prev = (yield* runtime.getState) as StateOf<Sh>\n const recordPatch = runtimeInternals.txn.recordStatePatch\n const updateDraft = runtimeInternals.txn.updateDraft\n\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as StateOf<Sh>, (draft) => {\n reducer(draft as Logic.Draft<StateOf<Sh>>, payload)\n })\n\n for (const path of patchPaths) {\n recordPatch(path, 'unknown')\n }\n\n updateDraft(nextState)\n }),\n ),\n ).pipe(Effect.catchCause((cause) => Effect.logError('Flow error', cause))) as Logic.Of<Sh, R, void, never>,\n } as Omit<Logic.IntentBuilder<T, Sh, R>, 'pipe'>\n\n const pipe: Logic.IntentBuilder<T, Sh, R>['pipe'] = function (this: unknown) {\n // eslint-disable-next-line prefer-rest-params\n const fns = arguments as unknown as ReadonlyArray<\n (self: Logic.IntentBuilder<T, Sh, R>) => Logic.IntentBuilder<T, Sh, R>\n >\n let acc: Logic.IntentBuilder<T, Sh, R> = builder as Logic.IntentBuilder<T, Sh, R>\n for (let i = 0; i < fns.length; i++) {\n acc = fns[i](acc)\n }\n return acc\n }\n\n return Object.assign(builder, { pipe }) as Logic.IntentBuilder<T, Sh, R>\n }\n}\nimport type { BoundApi } from './module.js'\n\n/**\n * BoundApi implementation: creates a pre-bound `$` for a given Store shape + runtime.\n *\n * Note: public types and entrypoint signatures live in api/BoundApi.ts; this file only hosts the implementation.\n */\nexport function make<Sh extends Logix.AnyModuleShape, R = never>(\n shape: Sh,\n runtime: Logix.ModuleRuntime<Logix.StateOf<Sh>, Logix.ActionOf<Sh>>,\n options?: {\n readonly getPhase?: () => 'setup' | 'run'\n readonly phaseService?: LogicDiagnostics.LogicPhaseService\n readonly moduleId?: string\n readonly logicUnit?: LogicDiagnostics.LogicUnitService\n },\n): BoundApi<Sh, R> {\n const runtimeInternals = getRuntimeInternals(runtime as any)\n\n const getPhase = options?.getPhase ?? (() => 'run')\n const getCurrentPhase = (): 'setup' | 'run' => {\n const phaseService = options?.phaseService\n const phase = phaseService?.current ?? getPhase()\n return phase === 'setup' ? 'setup' : 'run'\n }\n const guardRunOnly = (kind: string, api: string) => {\n const phaseService = options?.phaseService\n const phase = phaseService?.current ?? getPhase()\n if (phase === 'setup') {\n throw LogicDiagnostics.makeLogicPhaseError(kind, api, 'setup', options?.moduleId)\n }\n }\n const flowApi = FlowRuntime.make<Sh, R>(runtime, runtimeInternals)\n\n const makeIntentBuilder = (runtime_: Logix.ModuleRuntime<any, any>) =>\n LogicBuilderFactory<Sh, R>(runtime_, runtimeInternals)\n const withLifecycle = <A>(\n available: (manager: Lifecycle.LifecycleManager) => Effect.Effect<A, never, any>,\n missing: () => Effect.Effect<A, never, any>,\n ) =>\n Effect.serviceOption(Lifecycle.LifecycleContext as ServiceMap.Key<any, Lifecycle.LifecycleManager>).pipe(\n Effect.flatMap((maybe) =>\n Option.match(maybe, {\n onSome: available,\n onNone: missing,\n })),\n )\n const withPlatform = (invoke: (platform: Platform.Service) => Effect.Effect<void, never, any>) =>\n Effect.serviceOption(Platform.Tag as ServiceMap.Key<any, Platform.Service>).pipe(\n Effect.flatMap((maybe) =>\n Option.match(maybe, {\n onSome: invoke,\n onNone: () => Effect.void,\n }),\n ),\n )\n\n const emitSetupOnlyViolation = (api: string): Effect.Effect<void> =>\n Debug.record({\n type: 'diagnostic',\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'logic::invalid_phase',\n severity: 'error',\n message: `${api} is setup-only and is not allowed in run phase.`,\n hint:\n 'Move $.lifecycle.* calls to the synchronous part of Module.logic builder (before return) for registration; ' +\n 'for dynamic resource cleanup in the run phase, use Effect.acquireRelease / Scope finalizer instead of registering onDestroy late.',\n kind: 'lifecycle_in_run',\n }).pipe(Effect.orDie)\n\n const createIntentBuilder = <T>(stream: Stream.Stream<T>, triggerName?: string) =>\n makeIntentBuilder(runtime)(stream, triggerName)\n\n const actionMatchesTag = (action: unknown, tag: string): boolean => {\n const actionTag = (action as any)?._tag\n if (actionTag === tag) {\n return true\n }\n const actionType = (action as any)?.type\n return actionType === tag\n }\n\n const actionStreamByTag = (tag: string): Stream.Stream<Logix.ActionOf<Sh>> => {\n const topicSelector = runtime.actionsByTag$\n if (typeof topicSelector === 'function') {\n return topicSelector(tag)\n }\n return runtime.actions$.pipe(Stream.filter((action: unknown) => actionMatchesTag(action, tag)))\n }\n\n const onceInRunSession = (key: string): Effect.Effect<boolean, never, any> =>\n Effect.serviceOption(RunSessionTag as unknown as ServiceMap.Key<any, RunSession>).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.local.once(key) : true)),\n )\n\n let cachedDiagnosticsLevel: Debug.DiagnosticsLevel | undefined\n\n const isModuleLike = (\n value: unknown,\n ): value is {\n readonly _kind: 'ModuleDef' | 'Module'\n readonly id: string\n readonly tag: ServiceMap.Key<any, Logix.ModuleRuntime<any, any>>\n readonly schemas?: Record<string, unknown>\n readonly meta?: Record<string, JsonValue>\n readonly dev?: { readonly source?: { readonly file: string; readonly line: number; readonly column: number } }\n } =>\n Boolean(\n value &&\n typeof value === 'object' &&\n ((value as any)._kind === 'ModuleDef' || (value as any)._kind === 'Module') &&\n 'tag' in (value as object) &&\n ServiceMap.isKey((value as any).tag),\n )\n\n const buildModuleHandle = (\n tag: ServiceMap.Key<any, Logix.ModuleRuntime<any, any>>,\n rt: Logix.ModuleRuntime<any, any>,\n ): unknown => {\n const actionsProxy: Logix.ModuleHandle<any>['actions'] = new Proxy(\n {},\n {\n get: (_target, prop) => (payload: unknown) =>\n rt.dispatch({\n _tag: prop as string,\n payload,\n }),\n },\n ) as Logix.ModuleHandle<any>['actions']\n\n const handle: Logix.ModuleHandle<any> = {\n read: (selector) => Effect.map(rt.getState, selector),\n changes: rt.changes,\n dispatch: rt.dispatch,\n actions$: rt.actions$,\n actions: actionsProxy,\n }\n\n const EXTEND_HANDLE = Symbol.for('logix.module.handle.extend')\n const extend = (tag as any)?.[EXTEND_HANDLE] as\n | ((runtime: Logix.ModuleRuntime<any, any>, base: Logix.ModuleHandle<any>) => unknown)\n | undefined\n\n return typeof extend === 'function' ? (extend(rt, handle) ?? handle) : handle\n }\n\n const emitModuleDescriptorOnce = (\n module: {\n readonly id: string\n readonly tag: any\n readonly schemas?: Record<string, unknown>\n readonly meta?: Record<string, JsonValue>\n readonly dev?: { readonly source?: { readonly file: string; readonly line: number; readonly column: number } }\n },\n rt: Logix.ModuleRuntime<any, any>,\n ): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n // Hot-path guard: never emit events when diagnostics are off.\n if (cachedDiagnosticsLevel === 'off') return\n\n const key = `module_descriptor:${String(rt.instanceId ?? 'unknown')}`\n const shouldEmit = yield* onceInRunSession(key)\n if (!shouldEmit) return\n\n const actionKeys = Object.keys((module.tag as any)?.shape?.actionMap ?? {})\n\n const internalSymbol = Symbol.for('logix.module.internal')\n const internal = (module as any)[internalSymbol] as { readonly mounted?: ReadonlyArray<any> } | undefined\n\n const logicUnits = (internal?.mounted ?? []).map((u: any) => ({\n kind: String(u?.kind ?? 'user'),\n id: String(u?.id ?? ''),\n derived: u?.derived ? true : undefined,\n name: typeof u?.name === 'string' ? u.name : undefined,\n }))\n\n const schemaKeys = module.schemas && typeof module.schemas === 'object' ? Object.keys(module.schemas) : undefined\n\n const meta = module.meta && typeof module.meta === 'object' ? module.meta : undefined\n\n const source = module.dev?.source\n\n const traitsSnapshot = runtimeInternals.traits.getModuleTraitsSnapshot()\n const traits = traitsSnapshot\n ? {\n digest: traitsSnapshot.digest,\n count: traitsSnapshot.traits.length,\n }\n : undefined\n\n const data = {\n id: module.id,\n moduleId: String(rt.moduleId),\n instanceId: String(rt.instanceId),\n actionKeys,\n logicUnits,\n schemaKeys,\n meta,\n source,\n traits,\n }\n\n yield* Debug.record({\n type: 'trace:module:descriptor',\n moduleId: rt.moduleId,\n instanceId: rt.instanceId,\n data,\n } as any)\n })\n\n /**\n * strict: resolve a Module runtime only from the current Effect environment.\n *\n * Notes:\n * - With multiple roots / instances, any process-wide registry cannot express the correct semantics.\n * - A missing provider is a wiring error: fail deterministically and provide actionable hints (more details in dev/test).\n */\n const resolveModuleRuntime = (\n tag: ServiceMap.Key<any, Logix.ModuleRuntime<any, any>>,\n ): Effect.Effect<Logix.ModuleRuntime<any, any>, never, any> =>\n Effect.gen(function* () {\n const requestedModuleId = typeof (tag as any)?.id === 'string' ? ((tag as any).id as string) : undefined\n const fromModuleId = typeof options?.moduleId === 'string' ? options.moduleId : runtime.moduleId\n\n // self: always allow resolving the current ModuleRuntime (both Bound.make and runtime injection paths).\n if (requestedModuleId && requestedModuleId === runtime.moduleId) {\n return runtime as unknown as Logix.ModuleRuntime<any, any>\n }\n\n const fromImports = runtimeInternals.imports.get(tag as unknown as ServiceMap.Key<any, any>)\n if (fromImports) {\n return fromImports as unknown as Logix.ModuleRuntime<any, any>\n }\n\n // Bound.make (no moduleId context): allow resolving from the current Effect env (useful for tests/scaffolding).\n if (typeof options?.moduleId !== 'string') {\n const fromEnv = yield* Effect.serviceOption(tag as any)\n if (Option.isSome(fromEnv)) {\n return fromEnv.value as unknown as Logix.ModuleRuntime<any, any>\n }\n }\n\n // 2) Not found: die immediately — this is a wiring error; guide the caller to fix the composition.\n const tokenId = requestedModuleId ?? '<unknown module id>'\n const fix: string[] = isDevEnv()\n ? [\n '- Provide the child implementation in the same scope (imports).',\n ` Example: ${fromModuleId ?? 'ParentModule'}.implement({ imports: [${requestedModuleId ?? 'ChildModule'}.impl], ... })`,\n '- If you intentionally want a root singleton, provide it at app root (Runtime.make(...,{ layer }) / root imports),',\n ' and use Root.resolve(ModuleTag) (instead of $.use) at the callsite.',\n ]\n : []\n\n const err = new Error(\n isDevEnv()\n ? [\n '[MissingModuleRuntimeError] Cannot resolve ModuleRuntime for ModuleTag.',\n '',\n `tokenId: ${tokenId}`,\n 'entrypoint: logic.$.use',\n 'mode: strict',\n `from: ${fromModuleId ?? '<unknown module id>'}`,\n `startScope: moduleId=${fromModuleId ?? '<unknown>'}, instanceId=${String(runtime.instanceId ?? '<unknown>')}`,\n '',\n 'fix:',\n ...fix,\n ].join('\\n')\n : '[MissingModuleRuntimeError] module runtime not found',\n )\n\n ;(err as any).tokenId = tokenId\n ;(err as any).entrypoint = 'logic.$.use'\n ;(err as any).mode = 'strict'\n ;(err as any).from = fromModuleId\n ;(err as any).startScope = {\n moduleId: fromModuleId,\n instanceId: String(runtime.instanceId ?? '<unknown>'),\n }\n ;(err as any).fix = fix\n\n err.name = 'MissingModuleRuntimeError'\n return yield* Effect.die(err)\n })\n\n type BatchedStateWritebackOutcome =\n | { readonly _tag: 'ok' }\n | { readonly _tag: 'failure'; readonly cause: unknown }\n\n type BatchedStateWritebackRequest =\n | {\n readonly kind: 'update'\n readonly update: (prev: Logix.StateOf<Sh>) => Logix.StateOf<Sh>\n readonly done: Deferred.Deferred<BatchedStateWritebackOutcome>\n }\n | {\n readonly kind: 'mutate'\n readonly mutate: (draft: Logic.Draft<Logix.StateOf<Sh>>) => void\n readonly done: Deferred.Deferred<BatchedStateWritebackOutcome>\n }\n\n type BatchedStateWritebackCoordinator = {\n readonly enqueueUpdate: (update: (prev: Logix.StateOf<Sh>) => Logix.StateOf<Sh>) => Effect.Effect<void, never, any>\n readonly enqueueMutate: (mutate: (draft: Logic.Draft<Logix.StateOf<Sh>>) => void) => Effect.Effect<void, never, any>\n }\n\n // Perf-first batching for `$.state.update/$.state.mutate` called *outside* a transaction:\n // - Many watchers may write back on the same tick; running N transactions is dominated by fixed txn cost.\n // - Batch them into a single StateTransaction (still sequentially applies reducers), reducing queue/commit overhead.\n //\n // Notes:\n // - Enabled only when runtimeInternals exists and NODE_ENV=production (perf mode).\n // - Semantics change: multiple state writebacks may share the same txnSeq/txnId (forward-only evolution).\n let batchedStateWritebackCoordinator: BatchedStateWritebackCoordinator | undefined\n\n const getOrCreateBatchedStateWritebackCoordinator = (): BatchedStateWritebackCoordinator => {\n if (batchedStateWritebackCoordinator) return batchedStateWritebackCoordinator\n if (!runtimeInternals) {\n throw new Error('[BatchedStateWritebackCoordinator] Missing runtimeInternals (expected in ModuleRuntime-backed $).')\n }\n\n let inFlight = false\n const pending: Array<BatchedStateWritebackRequest> = []\n\n const drain = (): ReadonlyArray<BatchedStateWritebackRequest> => {\n if (pending.length === 0) return []\n return pending.splice(0, pending.length)\n }\n\n const ok: BatchedStateWritebackOutcome = { _tag: 'ok' }\n const fail = (cause: unknown): BatchedStateWritebackOutcome => ({ _tag: 'failure', cause })\n\n const applyBatch = (batch: ReadonlyArray<BatchedStateWritebackRequest>): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n if (batch.length === 0) return\n\n let current = (yield* runtime.getState) as Logix.StateOf<Sh>\n\n for (let i = 0; i < batch.length; i++) {\n const req = batch[i]!\n if (req.kind === 'update') {\n const next = req.update(current)\n current = next\n yield* runtime.setState(next)\n continue\n }\n\n const { nextState, patchPaths } = mutateWithPatchPaths(current as Logix.StateOf<Sh>, (draft) => {\n req.mutate(draft as Logic.Draft<Logix.StateOf<Sh>>)\n })\n\n for (const path of patchPaths) {\n runtimeInternals.txn.recordStatePatch(path, 'unknown')\n }\n\n runtimeInternals.txn.updateDraft(nextState)\n current = nextState as Logix.StateOf<Sh>\n }\n })\n\n const flushInFlight = (): Effect.Effect<void, never, any> =>\n Effect.uninterruptible(\n Effect.gen(function* () {\n if (inFlight) return\n inFlight = true\n try {\n while (true) {\n const batch = drain()\n if (batch.length === 0) {\n // Release the inFlight lock, then re-check pending to avoid the \"enqueue while exiting\" race.\n inFlight = false\n if (pending.length === 0) return\n inFlight = true\n continue\n }\n\n const originName =\n batch.length === 1 ? (batch[0]!.kind === 'update' ? 'update' : 'mutate') : 'writeback:batched'\n\n const exit = yield* Effect.exit(\n runtimeInternals.txn.runWithStateTransaction(\n {\n kind: 'state',\n name: originName,\n details: { batched: true, count: batch.length },\n } as any,\n () => applyBatch(batch).pipe(Effect.asVoid),\n ),\n )\n\n const outcome: BatchedStateWritebackOutcome = exit._tag === 'Success' ? ok : fail(exit.cause)\n\n for (let i = 0; i < batch.length; i++) {\n yield* Deferred.succeed(batch[i]!.done, outcome)\n }\n\n // Unexpected failures are treated as fatal; unblock waiters and stop the drain loop.\n if (outcome._tag === 'failure') {\n return\n }\n }\n } finally {\n inFlight = false\n }\n }),\n )\n\n const waitForMicrotask = (): Effect.Effect<void, never, never> =>\n Effect.promise(\n () =>\n new Promise<void>((resolve) => {\n if (typeof queueMicrotask === 'function') {\n queueMicrotask(resolve)\n return\n }\n Promise.resolve().then(resolve)\n }),\n )\n\n const enqueueAndAwait = (req: BatchedStateWritebackRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n pending.push(req)\n if (!inFlight) {\n yield* waitForMicrotask()\n }\n yield* flushInFlight()\n const outcome = yield* Deferred.await(req.done)\n if (outcome._tag === 'failure') {\n return yield* Effect.die(outcome.cause)\n }\n })\n\n const coordinator: BatchedStateWritebackCoordinator = {\n enqueueUpdate: (update) =>\n Effect.gen(function* () {\n const done = yield* Deferred.make<BatchedStateWritebackOutcome>()\n yield* enqueueAndAwait({ kind: 'update', update, done })\n }),\n enqueueMutate: (mutate) =>\n Effect.gen(function* () {\n const done = yield* Deferred.make<BatchedStateWritebackOutcome>()\n yield* enqueueAndAwait({ kind: 'mutate', mutate, done })\n }),\n }\n\n batchedStateWritebackCoordinator = coordinator\n return coordinator\n }\n\n const stateApi: BoundApi<Sh, R>['state'] = {\n read: runtime.getState,\n update: (f) =>\n markDirectStateWriteEffect<Sh, Effect.Effect<void, never, any>>(\n Effect.gen(function* () {\n const inTxn = yield* Effect.service(TaskRunner.inSyncTransactionFiber).pipe(Effect.orDie)\n if (inTxn) {\n const prev = yield* runtime.getState\n return yield* runtime.setState(f(prev))\n }\n\n const body = () => Effect.flatMap(runtime.getState, (prev) => runtime.setState(f(prev)))\n\n if (runtimeInternals && !isDevEnv()) {\n return yield* getOrCreateBatchedStateWritebackCoordinator().enqueueUpdate(f as any)\n }\n\n return yield* runtimeInternals\n ? runtimeInternals.txn.runWithStateTransaction({ kind: 'state', name: 'update' } as any, body)\n : body()\n }),\n { kind: 'update', run: f as any },\n ),\n mutate: (f) =>\n markDirectStateWriteEffect<Sh, Effect.Effect<void, never, any>>(\n Effect.gen(function* () {\n const recordPatch = runtimeInternals?.txn.recordStatePatch\n const updateDraft = runtimeInternals?.txn.updateDraft\n\n const inTxn = yield* Effect.service(TaskRunner.inSyncTransactionFiber).pipe(Effect.orDie)\n if (inTxn) {\n const prev = yield* runtime.getState\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as Logix.StateOf<Sh>, (draft) => {\n f(draft as Logic.Draft<Logix.StateOf<Sh>>)\n })\n\n for (const path of patchPaths) {\n recordPatch?.(path, 'unknown')\n }\n\n updateDraft?.(nextState)\n return\n }\n\n const body = () =>\n Effect.gen(function* () {\n const prev = yield* runtime.getState\n const { nextState, patchPaths } = mutateWithPatchPaths(prev as Logix.StateOf<Sh>, (draft) => {\n f(draft as Logic.Draft<Logix.StateOf<Sh>>)\n })\n\n for (const path of patchPaths) {\n recordPatch?.(path, 'unknown')\n }\n\n updateDraft?.(nextState)\n })\n\n if (runtimeInternals && !isDevEnv()) {\n return yield* getOrCreateBatchedStateWritebackCoordinator().enqueueMutate(f as any)\n }\n\n return yield* runtimeInternals\n ? runtimeInternals.txn.runWithStateTransaction({ kind: 'state', name: 'mutate' } as any, body)\n : body()\n }),\n { kind: 'mutate', run: f as any },\n ),\n ref: runtime.ref,\n }\n\n\n const actions = shape.actionMap as BoundApi<Sh, R>['actions']\n\n const dispatcherCache = new Map<string, (...args: any[]) => Effect.Effect<void, any, any>>()\n\n const hasAction = (key: string): boolean => Object.prototype.hasOwnProperty.call(actions as any, key)\n\n const dispatchers: BoundApi<Sh, R>['dispatchers'] = new Proxy({} as any, {\n get: (_target, prop) => {\n if (typeof prop !== 'string') return undefined\n if (!hasAction(prop)) return undefined\n\n const cached = dispatcherCache.get(prop)\n if (cached) return cached\n\n const token = (actions as any)[prop] as Action.AnyActionToken\n const fn = (...args: any[]) => runtime.dispatch((token as any)(...args))\n\n dispatcherCache.set(prop, fn)\n return fn\n },\n has: (_target, prop) => typeof prop === 'string' && hasAction(prop),\n ownKeys: () => Object.keys(actions as any),\n getOwnPropertyDescriptor: (_target, prop) => {\n if (typeof prop !== 'string') return undefined\n if (!hasAction(prop)) return undefined\n return { enumerable: true, configurable: true }\n },\n }) as unknown as BoundApi<Sh, R>['dispatchers']\n\n const dispatch: BoundApi<Sh, R>['dispatch'] = (...args: any[]) => {\n const [first, second] = args\n\n if (typeof first === 'string') {\n return runtime.dispatch({ _tag: first, payload: second } as Logix.ActionOf<Sh>)\n }\n\n if (Action.isActionToken(first)) {\n return runtime.dispatch((first as any)(second))\n }\n\n return runtime.dispatch(first as Logix.ActionOf<Sh>)\n }\n\n const matchApi = <V>(value: V): Logic.FluentMatch<V> => MatchBuilder.makeMatch(value)\n\n const matchTagApi = <V extends { _tag: string }>(value: V): Logic.FluentMatchTag<V> =>\n MatchBuilder.makeMatchTag(value)\n\n // Primary reducer registration: write into the reducer map via the runtime's internal registrar.\n const reducer: BoundApi<Sh, R>['reducer'] = (tag, fn) => {\n return Effect.sync(() => {\n runtimeInternals.txn.registerReducer(String(tag), fn as any)\n }) as any\n }\n\n const effect: BoundApi<Sh, R>['effect'] = (token, handler) =>\n Effect.gen(function* () {\n if (!Action.isActionToken(token)) {\n return yield* Effect.die(new Error('[BoundApi.effect] token must be an ActionToken'))\n }\n\n const phase = getCurrentPhase()\n const logicUnit = options?.logicUnit\n\n yield* runtimeInternals.effects.registerEffect({\n actionTag: token.tag,\n handler: handler as any,\n phase,\n ...(logicUnit\n ? {\n logicUnit: {\n logicUnitId: logicUnit.logicUnitId,\n logicUnitLabel: logicUnit.logicUnitLabel,\n path: logicUnit.path,\n },\n }\n : {}),\n })\n }) as any\n\n const api: BoundApi<Sh, R> = {\n root: {\n resolve: (tag: any) => {\n guardRunOnly('root_resolve_in_setup', '$.root.resolve')\n return Root.resolve(tag, {\n entrypoint: 'logic.$.root.resolve',\n waitForReady: true,\n }) as any\n },\n },\n state: stateApi,\n actions,\n dispatchers,\n dispatch,\n flow: flowApi,\n match: matchApi,\n matchTag: matchTagApi,\n lifecycle: {\n onInitRequired: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onInitRequired') as any\n }\n runtimeInternals.lifecycle.registerInitRequired(eff as any)\n return Effect.void as any\n },\n onStart: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onStart') as any\n }\n runtimeInternals.lifecycle.registerStart(eff as any)\n return Effect.void as any\n },\n onInit: (eff: Logic.Of<Sh, R, void, never>) => {\n // Legacy alias: same semantics as onInitRequired (to reduce migration friction).\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onInit') as any\n }\n runtimeInternals.lifecycle.registerInitRequired(eff as any)\n return Effect.void as any\n },\n onDestroy: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onDestroy') as any\n }\n runtimeInternals.lifecycle.registerDestroy(eff as any)\n return Effect.void as any\n },\n onError: (\n handler: (\n cause: import('effect').Cause.Cause<unknown>,\n context: Lifecycle.ErrorContext,\n ) => Effect.Effect<void, never, R>,\n ) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onError') as any\n }\n runtimeInternals.lifecycle.registerOnError(handler as any)\n return Effect.void as any\n },\n onSuspend: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onSuspend') as any\n }\n runtimeInternals.lifecycle.registerPlatformSuspend(Effect.asVoid(eff as Effect.Effect<void, never, any>))\n return Effect.void as any\n },\n onResume: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onResume') as any\n }\n runtimeInternals.lifecycle.registerPlatformResume(Effect.asVoid(eff as Effect.Effect<void, never, any>))\n return Effect.void as any\n },\n onReset: (eff: Logic.Of<Sh, R, void, never>) => {\n if (getCurrentPhase() === 'run') {\n return emitSetupOnlyViolation('$.lifecycle.onReset') as any\n }\n runtimeInternals.lifecycle.registerPlatformReset(Effect.asVoid(eff as Effect.Effect<void, never, any>))\n return Effect.void as any\n },\n },\n traits: {\n declare: (traits: ModuleTraits.TraitSpec) => {\n if (getCurrentPhase() === 'run') {\n throw LogicDiagnostics.makeLogicPhaseError(\n 'traits_declare_in_run',\n '$.traits.declare',\n 'run',\n options?.moduleId,\n )\n }\n\n if (!traits || typeof traits !== 'object') {\n throw new Error('[InvalidTraitsDeclaration] $.traits.declare expects an object.')\n }\n\n const logicUnit = options?.logicUnit ?? {\n logicUnitId: 'unknown',\n logicUnitIdKind: 'derived' as const,\n logicUnitLabel: 'logicUnit:unknown',\n path: undefined as string | undefined,\n }\n\n runtimeInternals.traits.registerModuleTraitsContribution({\n traits,\n provenance: {\n originType: 'logicUnit',\n originId: logicUnit.logicUnitId,\n originIdKind: logicUnit.logicUnitIdKind,\n originLabel: logicUnit.logicUnitLabel,\n path: logicUnit.path,\n },\n })\n },\n source: {\n refresh: (fieldPath: string, options?: { readonly force?: boolean }) =>\n Effect.gen(function* () {\n const handler = runtimeInternals.traits.getSourceRefreshHandler(fieldPath) as\n | ((state: Logix.StateOf<Sh>) => Effect.Effect<void, never, any>)\n | undefined\n if (!handler) {\n // If no refresh handler is registered, treat it as a no-op to avoid throwing when StateTraitProgram is not installed.\n return yield* Effect.void\n }\n\n const force = options?.force === true\n const runHandler = (state: Logix.StateOf<Sh>) =>\n force ? Effect.provideService(handler(state), TaskRunner.forceSourceRefresh, true) : handler(state)\n\n // Never call enqueueTransaction inside the transaction window (it can deadlock):\n // - Run the handler inside the current transaction so it writes to the draft via bound.state.mutate.\n // - The outer transaction window is responsible for commit + debug aggregation.\n const inTxn = yield* Effect.service(TaskRunner.inSyncTransactionFiber).pipe(Effect.orDie)\n if (inTxn) {\n const state = (yield* runtime.getState) as Logix.StateOf<Sh>\n return yield* runHandler(state)\n }\n\n // Treat one source-refresh as a dedicated transaction entry.\n return yield* runtimeInternals.txn.runWithStateTransaction(\n {\n kind: 'source-refresh',\n name: fieldPath,\n } as any,\n () =>\n Effect.gen(function* () {\n const state = (yield* runtime.getState) as Logix.StateOf<Sh>\n return yield* runHandler(state)\n }),\n )\n }),\n },\n },\n reducer,\n effect,\n use: new Proxy(() => {}, {\n apply: (_target, _thisArg, [arg]) => {\n guardRunOnly('use_in_setup', '$.use')\n if (isModuleLike(arg)) {\n const domain = arg\n const tag = domain.tag as unknown as ServiceMap.Key<any, Logix.ModuleRuntime<any, any>>\n\n const resolveAndBuild = resolveModuleRuntime(tag).pipe(Effect.map((rt) => buildModuleHandle(tag, rt)))\n\n const resolveWithDescriptor = resolveModuleRuntime(tag).pipe(\n Effect.tap((rt) => emitModuleDescriptorOnce(domain, rt)),\n Effect.map((rt) => buildModuleHandle(tag, rt)),\n )\n\n const detectAndSelect = Effect.service(Debug.currentDiagnosticsLevel).pipe(\n Effect.map((level) => {\n cachedDiagnosticsLevel = level\n return level\n }),\n Effect.flatMap((level) => (level === 'off' ? resolveAndBuild : resolveWithDescriptor)),\n )\n\n // 022 perf gate: when diagnostics are off, $.use(module) and $.use(module.tag) must be equivalent with zero extra overhead.\n // Constraint: Effect is a value (reusable), so we must one-time cache at execution time instead of branching at construction time.\n return Effect.suspend(() => {\n if (cachedDiagnosticsLevel === 'off') {\n return resolveAndBuild\n }\n\n if (cachedDiagnosticsLevel !== undefined) {\n return resolveWithDescriptor\n }\n\n return detectAndSelect\n }) as unknown as Logic.Of<Sh, R, any, never>\n }\n if (ServiceMap.isKey(arg)) {\n const candidate = arg as { _kind?: unknown }\n\n // Module: return a read-only ModuleHandle view.\n if (candidate._kind === 'ModuleTag') {\n return resolveModuleRuntime(arg as any).pipe(\n Effect.map((rt: Logix.ModuleRuntime<any, any>) => buildModuleHandle(arg as any, rt)),\n ) as unknown as Logic.Of<Sh, R, any, never>\n }\n\n // Regular service tag: read the service from Env.\n return Effect.service(arg as ServiceMap.Key<any, any>).pipe(Effect.orDie) as unknown as Logic.Of<Sh, R, any, never>\n }\n return Effect.die('BoundApi.use: unsupported argument') as unknown as Logic.Of<Sh, R, any, never>\n },\n }) as unknown as BoundApi<Sh, R>['use'],\n onAction: new Proxy(() => {}, {\n apply: (_target, _thisArg, args) => {\n guardRunOnly('use_in_setup', '$.onAction')\n const arg = args[0]\n if (Action.isActionToken(arg)) {\n const tag = arg.tag\n return createIntentBuilder(actionStreamByTag(tag).pipe(Stream.map((action: any) => action.payload)), tag)\n }\n if (Schema.isSchema(arg)) {\n const decode = Schema.decodeUnknownSync(arg as any)\n return createIntentBuilder(\n runtime.actions$.pipe(\n Stream.filter((a: any) => {\n try {\n decode(a)\n return true\n } catch {\n return false\n }\n }),\n ),\n )\n }\n if (typeof arg === 'function') {\n return createIntentBuilder(runtime.actions$.pipe(Stream.filter(arg)))\n }\n if (typeof arg === 'string') {\n return createIntentBuilder(actionStreamByTag(arg), arg)\n }\n if (typeof arg === 'object' && arg !== null) {\n if ('_tag' in arg) {\n const tag = String((arg as any)._tag)\n return createIntentBuilder(actionStreamByTag(tag), tag)\n }\n }\n return createIntentBuilder(runtime.actions$)\n },\n get: (_target, prop) => {\n guardRunOnly('use_in_setup', '$.onAction')\n if (typeof prop === 'string') {\n return createIntentBuilder(actionStreamByTag(prop), prop)\n }\n return undefined\n },\n }) as unknown as BoundApi<Sh, R>['onAction'],\n onState: (selector: (s: Logix.StateOf<Sh>) => any) => {\n guardRunOnly('use_in_setup', '$.onState')\n return createIntentBuilder(runtime.changes(selector))\n },\n on: (stream: Stream.Stream<any>) => {\n guardRunOnly('use_in_setup', '$.on')\n return createIntentBuilder(stream)\n },\n } as any\n\n setBoundInternals(api as any, runtimeInternals)\n\n return api\n}\n","import { Schema } from 'effect'\n\ntype ActionArgs<P> = [P] extends [void] ? [] | [P] : [P]\ntype ActionFn<P, Out> = (...args: ActionArgs<P>) => Out\n\ntype DevSource = {\n readonly file: string\n readonly line: number\n readonly column: number\n}\n\nexport type ActionValue<Tag extends string, Payload> = Payload extends void\n ? {\n readonly _tag: Tag\n readonly payload?: Payload\n }\n : {\n readonly _tag: Tag\n readonly payload: Payload\n }\n\nexport type ActionCreator<Tag extends string, Payload> = ActionFn<Payload, ActionValue<Tag, Payload>>\n\nexport type ActionToken<\n Tag extends string,\n Payload,\n PayloadSchema extends Schema.Schema<any> = Schema.Schema<any>,\n> = ActionCreator<Tag, Payload> & {\n readonly _kind: 'ActionToken'\n readonly tag: Tag\n readonly schema: PayloadSchema\n readonly source?: DevSource\n}\n\nexport type AnyActionToken = ActionToken<string, any, Schema.Schema<any>>\n\nexport const isActionToken = (value: unknown): value is AnyActionToken =>\n typeof value === 'function' &&\n (value as any)._kind === 'ActionToken' &&\n typeof (value as any).tag === 'string' &&\n Schema.isSchema((value as any).schema)\n\nexport const make = <Tag extends string, PayloadSchema extends Schema.Schema<any>>(\n tag: Tag,\n schema: PayloadSchema,\n options?: { readonly source?: DevSource },\n): ActionToken<Tag, Schema.Schema.Type<PayloadSchema>, PayloadSchema> => {\n const fn = ((...args: readonly [unknown?]) => ({\n _tag: tag,\n payload: args[0],\n })) as unknown as ActionToken<Tag, Schema.Schema.Type<PayloadSchema>, PayloadSchema>\n\n ;(fn as any)._kind = 'ActionToken'\n ;(fn as any).tag = tag\n ;(fn as any).schema = schema\n if (options?.source) {\n ;(fn as any).source = options.source\n }\n\n return fn\n}\n\nexport const makeActions = <M extends Record<string, Schema.Schema<any>>>(\n schemas: M,\n options?: {\n readonly source?: DevSource\n readonly sources?: Partial<Record<Extract<keyof M, string>, DevSource>>\n },\n): {\n readonly [K in keyof M]: ActionToken<Extract<K, string>, Schema.Schema.Type<M[K]>, M[K]>\n} => {\n const out: Record<string, AnyActionToken> = {}\n const sources = options?.sources as Record<string, DevSource | undefined> | undefined\n const defaultSource = options?.source\n for (const [key, schema] of Object.entries(schemas)) {\n const source = sources?.[key] ?? defaultSource\n out[key] = make(key, schema, source ? { source } : undefined)\n }\n return out as any\n}\n\nexport type ActionDef = Schema.Schema<any> | AnyActionToken\nexport type ActionDefs = Record<string, ActionDef>\n\nexport type NormalizedActionTokens<M extends ActionDefs> = {\n readonly [K in keyof M]: M[K] extends Schema.Schema<any>\n ? ActionToken<Extract<K, string>, Schema.Schema.Type<M[K]>, M[K]>\n : M[K] extends ActionToken<any, infer P, infer S>\n ? ActionToken<Extract<K, string>, P, S>\n : never\n}\n\nexport const normalizeActions = <M extends ActionDefs>(defs: M): NormalizedActionTokens<M> => {\n const out: Record<string, AnyActionToken> = {}\n\n for (const [key, def] of Object.entries(defs)) {\n if (Schema.isSchema(def)) {\n out[key] = make(key, def)\n continue\n }\n\n if (isActionToken(def)) {\n if (def.tag !== key) {\n throw new Error(`[Logix.Action] actionTag MUST equal key: key=\"${key}\", token.tag=\"${def.tag}\"`)\n }\n out[key] = def\n continue\n }\n\n throw new Error(`[Logix.Action] invalid action def for key \"${key}\"`)\n }\n\n return out as any\n}\n","import { Effect, Stream, Option } from 'effect'\nimport type { AnyModuleShape, LogicEffect, ModuleRuntime, StateOf, ActionOf, ModuleShape } from './module.js'\nimport type * as Logic from './LogicMiddleware.js'\nimport * as EffectOp from '../../effect-op.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport { RunSessionTag } from '../../observability/runSession.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\nimport * as Debug from './DebugSink.js'\nimport * as ReadQuery from './ReadQuery.js'\nimport { makeRunBudgetEnvelopeV1, makeRunDegradeMarkerV1 } from './diagnosticsBudget.js'\nimport * as ModeRunner from './ModeRunner.js'\n\nconst getMiddlewareStack = (): Effect.Effect<EffectOp.MiddlewareStack, never, any> =>\n Effect.serviceOption(EffectOpCore.EffectOpMiddlewareTag).pipe(\n Effect.map((maybe) => (Option.isSome(maybe) ? maybe.value.stack : [])),\n )\n\nconst getRuntimeScope = (runtime: unknown): { readonly moduleId?: string; readonly instanceId?: string } => {\n if (!runtime) return {}\n if (typeof runtime !== 'object' && typeof runtime !== 'function') return {}\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n return {\n moduleId: typeof scope.moduleId === 'string' ? scope.moduleId : undefined,\n instanceId: typeof scope.instanceId === 'string' ? scope.instanceId : undefined,\n }\n}\n\ntype RuntimeReadQueryWithMetaCapability<S> = {\n readonly changesReadQueryWithMeta: <V>(readQuery: ReadQuery.ReadQueryInput<S, V>) => Stream.Stream<{ readonly value: V }>\n}\n\nconst hasChangesReadQueryWithMeta = <S>(candidate: unknown): candidate is RuntimeReadQueryWithMetaCapability<S> => {\n if (candidate == null) return false\n if (typeof candidate !== 'object' && typeof candidate !== 'function') return false\n return typeof (candidate as { readonly changesReadQueryWithMeta?: unknown }).changesReadQueryWithMeta === 'function'\n}\n\nexport interface Api<Sh extends ModuleShape<any, any>, R = never> {\n readonly fromAction: <T extends ActionOf<Sh>>(predicate: (a: ActionOf<Sh>) => a is T) => Stream.Stream<T>\n\n readonly fromState: {\n <V>(selector: (s: StateOf<Sh>) => V): Stream.Stream<V>\n <V>(query: ReadQuery.ReadQuery<StateOf<Sh>, V>): Stream.Stream<V>\n }\n\n readonly debounce: <V>(ms: number) => (stream: Stream.Stream<V>) => Stream.Stream<V>\n\n readonly throttle: <V>(ms: number) => (stream: Stream.Stream<V>) => Stream.Stream<V>\n\n readonly filter: <V>(predicate: (value: V) => boolean) => (stream: Stream.Stream<V>) => Stream.Stream<V>\n\n readonly run: {\n <V, A = void, E = never, R2 = unknown>(\n eff: LogicEffect<Sh, R & R2, A, E> | ((payload: V) => LogicEffect<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ): (stream: Stream.Stream<V>) => LogicEffect<Sh, R & R2, void, E>\n <V, A = void, E = never, R2 = unknown>(\n config: RunConfig<Sh, R & R2, V, A, E>,\n ): (stream: Stream.Stream<V>) => LogicEffect<Sh, R & R2, void, E>\n }\n\n readonly runParallel: <V, A = void, E = never, R2 = unknown>(\n eff: LogicEffect<Sh, R & R2, A, E> | ((payload: V) => LogicEffect<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => LogicEffect<Sh, R & R2, void, E>\n\n readonly runLatest: <V, A = void, E = never, R2 = unknown>(\n eff: LogicEffect<Sh, R & R2, A, E> | ((payload: V) => LogicEffect<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => LogicEffect<Sh, R & R2, void, E>\n\n readonly runExhaust: <V, A = void, E = never, R2 = unknown>(\n eff: LogicEffect<Sh, R & R2, A, E> | ((payload: V) => LogicEffect<Sh, R & R2, A, E>),\n options?: Logic.OperationOptions,\n ) => (stream: Stream.Stream<V>) => LogicEffect<Sh, R & R2, void, E>\n}\n\nexport interface RunConfig<Sh extends AnyModuleShape, R, V, A = void, E = never> {\n readonly effect: LogicEffect<Sh, R, A, E> | ((payload: V) => LogicEffect<Sh, R, A, E>)\n readonly mode?: ModeRunner.ModeRunnerMode\n readonly options?: Logic.OperationOptions\n}\n\ntype EffectResolver<T, Sh extends AnyModuleShape, R, A, E> = (payload: T) => LogicEffect<Sh, R, A, E>\n\nconst preResolveEffectResolver = <T, Sh extends AnyModuleShape, R, A, E>(\n eff: LogicEffect<Sh, R, A, E> | EffectResolver<T, Sh, R, A, E>,\n): EffectResolver<T, Sh, R, A, E> => {\n if (typeof eff === 'function') {\n return eff as EffectResolver<T, Sh, R, A, E>\n }\n return () => eff\n}\n\nconst resolveFlowRunId = (name: string, meta: Record<string, unknown>, fallbackRunSeq?: number): string => {\n const explicitRunId = meta.runId\n if (typeof explicitRunId === 'string' && explicitRunId.length > 0) {\n return explicitRunId\n }\n\n const instanceId = typeof meta.instanceId === 'string' && meta.instanceId.length > 0 ? meta.instanceId : 'global'\n const opSeq = meta.opSeq\n if (typeof opSeq === 'number' && Number.isFinite(opSeq) && opSeq >= 1) {\n return `${instanceId}::${name}::r${Math.floor(opSeq)}`\n }\n if (typeof fallbackRunSeq === 'number' && Number.isFinite(fallbackRunSeq) && fallbackRunSeq >= 1) {\n return `${instanceId}::${name}::r${Math.floor(fallbackRunSeq)}`\n }\n return `${instanceId}::${name}`\n}\n\nconst withFlowRunBudgetMeta = (\n name: string,\n meta: Record<string, unknown>,\n fallbackRunSeq?: number,\n): Record<string, unknown> => {\n const disableObservers =\n typeof meta.policy === 'object' && meta.policy !== null && (meta.policy as { readonly disableObservers?: unknown }).disableObservers === true\n\n return {\n ...meta,\n budgetEnvelope: makeRunBudgetEnvelopeV1({\n domain: 'flow',\n runId: resolveFlowRunId(name, meta, fallbackRunSeq),\n }),\n degrade: makeRunDegradeMarkerV1(disableObservers, disableObservers ? 'observer_disabled' : undefined),\n }\n}\n\nconst isRunConfig = <Sh extends AnyModuleShape, R, V, A, E>(\n input: unknown,\n): input is RunConfig<Sh, R, V, A, E> => {\n if (!input || typeof input !== 'object') {\n return false\n }\n const candidate = input as { readonly effect?: unknown; readonly mode?: unknown }\n if (!('effect' in candidate)) {\n return false\n }\n const mode = candidate.mode\n if (mode === undefined) {\n return true\n }\n return mode === 'task' || mode === 'parallel' || mode === 'latest' || mode === 'exhaust'\n}\n\nexport const make = <Sh extends AnyModuleShape, R = never>(\n runtime: ModuleRuntime<StateOf<Sh>, ActionOf<Sh>>,\n runtimeInternals?: RuntimeInternals,\n): Api<Sh, R> => {\n let flowBudgetRunSeq = 0\n const scope = getRuntimeScope(runtime)\n const resolveConcurrencyLimit = (): Effect.Effect<number | 'unbounded', never, any> =>\n runtimeInternals\n ? runtimeInternals.concurrency.resolveConcurrencyPolicy().pipe(Effect.map((p) => p.concurrencyLimit))\n : Effect.succeed(16)\n\n interface FlowOpRunContext {\n readonly stack: EffectOp.MiddlewareStack\n readonly hasMiddleware: boolean\n readonly metaTemplate: Record<string, unknown>\n readonly hasFiniteTemplateOpSeq: boolean\n readonly allocateOpSeq?: () => number\n }\n\n const makeFlowOpRunContext = (\n options?: Logic.OperationOptions,\n ): Effect.Effect<FlowOpRunContext, never, any> =>\n Effect.gen(function* () {\n const stack = yield* getMiddlewareStack()\n if (stack.length === 0) {\n return {\n stack,\n hasMiddleware: false,\n metaTemplate: {},\n hasFiniteTemplateOpSeq: false,\n allocateOpSeq: undefined,\n }\n }\n\n const sessionOpt = yield* Effect.serviceOption(RunSessionTag)\n\n const metaTemplate: Record<string, unknown> = {\n ...(options?.meta ?? {}),\n policy: options?.policy,\n tags: options?.tags,\n trace: options?.trace,\n moduleId: scope.moduleId,\n instanceId: scope.instanceId,\n }\n\n const hasFiniteTemplateOpSeq =\n typeof metaTemplate.opSeq === 'number' && Number.isFinite(metaTemplate.opSeq)\n const runSessionLocal = Option.isSome(sessionOpt) ? sessionOpt.value.local : undefined\n const opSeqKey = (metaTemplate.instanceId as string | undefined) ?? 'global'\n const allocateOpSeq = runSessionLocal\n ? () => runSessionLocal.nextSeq('opSeq', opSeqKey)\n : undefined\n\n return {\n stack,\n hasMiddleware: true,\n metaTemplate,\n hasFiniteTemplateOpSeq,\n allocateOpSeq,\n }\n })\n\n const buildFlowOpMeta = (context: FlowOpRunContext): Record<string, unknown> => {\n if (context.hasFiniteTemplateOpSeq) {\n // Keep per-op meta isolation when caller provided a fixed opSeq.\n return { ...context.metaTemplate }\n }\n if (context.allocateOpSeq) {\n return {\n ...context.metaTemplate,\n opSeq: context.allocateOpSeq(),\n }\n }\n // No in-session opSeq allocation: share template and let EffectOp.make fill opSeq.\n return context.metaTemplate\n }\n\n const runAsFlowOp = <A, E, R2, V>(\n context: FlowOpRunContext,\n name: string,\n payload: V,\n eff: LogicEffect<Sh, R & R2, A, E>,\n ): LogicEffect<Sh, R & R2, A, E> => {\n if (!context.hasMiddleware) {\n return eff\n }\n return Effect.gen(function* () {\n flowBudgetRunSeq += 1\n const meta = withFlowRunBudgetMeta(name, buildFlowOpMeta(context), flowBudgetRunSeq)\n\n const op = EffectOp.make<A, E, any>({\n kind: 'flow',\n name,\n payload,\n effect: eff as any,\n meta,\n })\n return yield* EffectOp.run(op, context.stack)\n }) as any\n }\n\n const makeFlowOpMapper = <T, A, E, R2>(\n context: FlowOpRunContext,\n name: string,\n resolver: EffectResolver<T, Sh, R & R2, A, E>,\n ) => {\n if (!context.hasMiddleware) {\n return resolver\n }\n return (payload: T) => runAsFlowOp<A, E, R2, T>(context, name, payload, resolver(payload))\n }\n\n const runStreamWithMode =\n <T, A, E, R2>(\n mode: ModeRunner.ModeRunnerMode,\n name: 'flow.run' | 'flow.runParallel' | 'flow.runLatest' | 'flow.runExhaust',\n resolver: EffectResolver<T, Sh, R & R2, A, E>,\n options?: Logic.OperationOptions,\n ) =>\n (stream: Stream.Stream<T>): LogicEffect<Sh, R & R2, void, E> =>\n Effect.gen(function* () {\n const context = yield* makeFlowOpRunContext(options)\n const mapper = makeFlowOpMapper<T, A, E, R2>(context, name, resolver)\n\n return yield* ModeRunner.runByMode<T, E, any>({\n stream,\n mode,\n run: mapper,\n resolveConcurrencyLimit: resolveConcurrencyLimit(),\n latest: {\n strategy: 'switch',\n },\n })\n }) as any\n\n const runStreamParallelWithDiagnostics =\n <T, A, E, R2>(resolver: EffectResolver<T, Sh, R & R2, A, E>, options?: Logic.OperationOptions) =>\n (stream: Stream.Stream<T>): LogicEffect<Sh, R & R2, void, E> =>\n runStreamWithMode<T, A, E, R2>('parallel', 'flow.runParallel', resolver, options)(stream).pipe(\n Effect.catchCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId: scope.moduleId,\n instanceId: scope.instanceId,\n code: 'flow::unhandled_failure',\n severity: 'error',\n message: 'Flow watcher (runParallel) failed with an unhandled error.',\n hint: 'Handle errors explicitly inside the watcher (catch/catchAll) or write back via TaskRunner failure; avoid silent failures.',\n kind: 'flow_unhandled_failure',\n trigger: {\n kind: 'flow',\n name: 'runParallel',\n },\n }).pipe(Effect.flatMap(() => Effect.failCause(cause)))),\n ) as any\n\n const fromState = <V>(\n selectorOrQuery: ((s: StateOf<Sh>) => V) | ReadQuery.ReadQuery<StateOf<Sh>, V>,\n ): Stream.Stream<V> => {\n const runtimeWithReadQueryMeta = hasChangesReadQueryWithMeta<StateOf<Sh>>(runtime)\n ? runtime\n : undefined\n\n if (ReadQuery.isReadQuery(selectorOrQuery)) {\n if (runtimeWithReadQueryMeta) {\n return runtimeWithReadQueryMeta.changesReadQueryWithMeta(selectorOrQuery).pipe(Stream.map((evt) => evt.value))\n }\n return runtime.changes(selectorOrQuery.select)\n }\n\n if (!runtimeWithReadQueryMeta) {\n return runtime.changes(selectorOrQuery)\n }\n\n const compiled = ReadQuery.compile(selectorOrQuery)\n if (compiled.lane === 'static') {\n return runtimeWithReadQueryMeta.changesReadQueryWithMeta(compiled).pipe(Stream.map((evt) => evt.value))\n }\n\n return runtime.changes(selectorOrQuery)\n }\n\n return {\n fromAction: <T extends ActionOf<Sh>>(predicate: (a: ActionOf<Sh>) => a is T) =>\n runtime.actions$.pipe(Stream.filter(predicate)),\n\n fromState,\n\n debounce: (ms: number) => (stream) => Stream.debounce(stream, ms),\n\n throttle: (ms: number) => (stream) =>\n Stream.throttle(stream, {\n cost: () => 1,\n units: 1,\n duration: ms,\n strategy: 'enforce',\n }),\n\n filter: (predicate: (value: any) => boolean) => (stream) => Stream.filter(stream, predicate),\n\n run: (effOrConfig: unknown, options?: Logic.OperationOptions) => (stream) => {\n const mode = isRunConfig<Sh, any, any, any, any>(effOrConfig) ? (effOrConfig.mode ?? 'task') : 'task'\n const resolvedOptions = isRunConfig<Sh, any, any, any, any>(effOrConfig) ? effOrConfig.options : options\n const effect = isRunConfig<Sh, any, any, any, any>(effOrConfig) ? effOrConfig.effect : effOrConfig\n const resolver = preResolveEffectResolver<any, Sh, any, any, any>(effect as any)\n if (mode === 'parallel') {\n return runStreamParallelWithDiagnostics<any, any, any, any>(resolver, resolvedOptions)(stream) as any\n }\n return runStreamWithMode<any, any, any, any>(\n mode,\n mode === 'latest' ? 'flow.runLatest' : mode === 'exhaust' ? 'flow.runExhaust' : 'flow.run',\n resolver,\n resolvedOptions,\n )(stream) as any\n },\n\n runParallel: (eff, options) => (stream) =>\n runStreamParallelWithDiagnostics<any, any, any, any>(\n preResolveEffectResolver<any, Sh, any, any, any>(eff),\n options,\n )(stream),\n\n runLatest: (eff, options) => (stream) =>\n runStreamWithMode<any, any, any, any>(\n 'latest',\n 'flow.runLatest',\n preResolveEffectResolver<any, Sh, any, any, any>(eff),\n options,\n )(stream),\n\n runExhaust: (eff, options) => (stream) =>\n runStreamWithMode<any, any, any, any>(\n 'exhaust',\n 'flow.runExhaust',\n preResolveEffectResolver<any, Sh, any, any, any>(eff),\n options,\n )(stream),\n }\n}\n","import { Effect } from 'effect'\n\nexport const makeMatch = <V>(value: V) => {\n let result: Effect.Effect<any, any, any> | undefined\n\n const chain = {\n with: <A>(predicate: (value: V) => boolean, handler: (value: V) => A) => {\n if (result) return chain\n if (predicate(value)) {\n result = handler(value) as any\n }\n return chain\n },\n otherwise: <A>(handler: (value: V) => A): A => {\n if (result) return result as A\n return handler(value)\n },\n exhaustive: () => {\n if (result) {\n return result\n }\n return Effect.die(new Error('[FluentMatch] Non-exhaustive match: no pattern matched value'))\n },\n }\n\n return chain\n}\n\nexport const makeMatchTag = <V extends { _tag: string }>(value: V) => {\n let result: Effect.Effect<any, any, any> | undefined\n\n const chain = {\n with: <K extends V['_tag'], A>(t: K, handler: (value: Extract<V, { _tag: K }>) => A) => {\n if (result) return chain\n if (value._tag === t) {\n result = handler(value as Extract<V, { _tag: K }>) as any\n }\n return chain\n },\n otherwise: <A>(handler: (value: V) => A): A => {\n if (result) return result as A\n return handler(value)\n },\n exhaustive: () => {\n if (result) {\n return result\n }\n return Effect.die(new Error('[FluentMatchTag] Non-exhaustive match: no tag handler matched value'))\n },\n }\n\n return chain\n}\n","import { Deferred, Effect, Layer, ServiceMap } from 'effect'\nimport { isDevEnv } from './runtime/core/env.js'\nimport { RootContextTag, type RootContext } from './runtime/core/RootContext.js'\n\nexport type RootResolveEntrypoint = 'logic.root.resolve' | 'logic.$.root.resolve'\n\nexport interface RootResolveOptions {\n readonly entrypoint?: RootResolveEntrypoint\n /**\n * Whether to wait when RootContext is not ready yet:\n * - Default false: avoid misuse during layer/setup which can deadlock.\n * - `$.root.resolve` passes true in the run phase (run-only), allowing Env assembly to complete.\n */\n readonly waitForReady?: boolean\n}\n\nconst tagIdOf = (tag: ServiceMap.Key<any, any>): string =>\n typeof (tag as any)?.id === 'string'\n ? String((tag as any).id)\n : typeof (tag as any)?.key === 'string'\n ? String((tag as any).key)\n : '<unknown tag>'\n\nconst makeMissingRootProviderError = (\n tag: ServiceMap.Key<any, any>,\n entrypoint: RootResolveEntrypoint,\n extra?: string,\n): Error => {\n const dev = isDevEnv()\n const tokenId = tagIdOf(tag)\n const fix: string[] = dev\n ? [\n '- Provide it when creating the runtime tree (Logix.Runtime.make(...,{ layer }) / ManagedRuntime.make(Layer.mergeAll(...))).',\n \"- If you're in React and want the current runtime environment singleton, use useModule(ModuleTag).\",\n '- Do not rely on nested RuntimeProvider.layer to mock Root.resolve.',\n ]\n : []\n\n const message = dev\n ? [\n '[MissingRootProviderError] Cannot resolve Tag from root provider.',\n extra ? `\\n${extra}` : '',\n `tokenId: ${tokenId}`,\n `entrypoint: ${entrypoint}`,\n 'mode: global',\n 'startScope: root',\n '',\n 'fix:',\n ...fix,\n ]\n .filter((s) => s.length > 0)\n .join('\\n')\n : '[MissingRootProviderError] tag not found in root provider'\n\n const err = new Error(message)\n err.name = 'MissingRootProviderError'\n ;(err as any).tokenId = tokenId\n ;(err as any).entrypoint = entrypoint\n ;(err as any).mode = 'global'\n ;(err as any).startScope = { kind: 'root' }\n ;(err as any).fix = fix\n return err\n}\n\n/**\n * resolve\n *\n * Resolve a Tag explicitly from the root provider of the current Runtime tree (ServiceTag / ModuleTag).\n *\n * Semantics:\n * - Always reads rootContext; unaffected by nearer-scope Layer/Context overrides.\n * - For ModuleTag: expresses root singleton semantics only (not used for multi-instance selection).\n */\nexport const resolve = <Id, Svc>(\n tag: ServiceMap.Key<Id, Svc>,\n options?: RootResolveOptions,\n): Effect.Effect<Svc, never, any> =>\n Effect.gen(function* () {\n const entrypoint: RootResolveEntrypoint = options?.entrypoint ?? 'logic.root.resolve'\n\n const root = yield* Effect.service(RootContextTag).pipe(Effect.orDie)\n\n const rootContext = root.context ?? (options?.waitForReady ? yield* Deferred.await(root.ready) : undefined)\n\n if (!rootContext) {\n return yield* Effect.die(\n makeMissingRootProviderError(tag as ServiceMap.Key<any, any>, entrypoint, 'reason: rootContextNotReady'),\n )\n }\n\n try {\n return ServiceMap.get(rootContext, tag as ServiceMap.Key<any, any>) as Svc\n } catch {\n return yield* Effect.die(makeMissingRootProviderError(tag as ServiceMap.Key<any, any>, entrypoint))\n }\n })\n\n/**\n * layerFromContext(tests/perf only)\n *\n * Provide a \"ready immediately\" RootContext for Root.resolve.\n * - `ready` is fulfilled immediately to avoid extra waits when waitForReady=true.\n */\nexport const layerFromContext = (context: ServiceMap.ServiceMap<any>): Layer.Layer<any, never, any> =>\n Layer.effect(\n RootContextTag,\n Effect.gen(function* () {\n const ready = yield* Deferred.make<ServiceMap.ServiceMap<any>>()\n yield* Deferred.succeed(ready, context)\n const root: RootContext = { context, ready, lifecycle: { state: 'ready' } }\n return root\n }),\n )\n","import { fnv1a32, stableStringify } from '../../digest.js'\nimport type { ReadQueryStaticIr } from './ReadQuery.js'\n\nexport type DeclarativeLinkNodeId = string\n\nexport type DeclarativeLinkNode =\n | {\n readonly id: DeclarativeLinkNodeId\n readonly kind: 'readQuery'\n readonly moduleId: string\n readonly instanceKey?: string\n /** MUST reuse `ReadQueryStaticIr` (no parallel selector-like IR). */\n readonly readQuery: ReadQueryStaticIr\n }\n | {\n readonly id: DeclarativeLinkNodeId\n readonly kind: 'dispatch'\n readonly moduleId: string\n readonly instanceKey?: string\n readonly actionTag: string\n }\n\nexport type DeclarativeLinkEdge = {\n readonly from: DeclarativeLinkNodeId\n readonly to: DeclarativeLinkNodeId\n}\n\n/**\n * DeclarativeLinkIR (v1):\n * - JSON serializable, IR-recognizable cross-module dependency graph.\n * - Read side: static ReadQuery only (must include readsDigest, no fallbackReason).\n * - Write side: dispatch only (no direct state writes).\n */\nexport interface DeclarativeLinkIR {\n readonly version: 1\n readonly nodes: ReadonlyArray<DeclarativeLinkNode>\n readonly edges: ReadonlyArray<DeclarativeLinkEdge>\n}\n\nexport const getDeclarativeLinkIrDigest = (ir: DeclarativeLinkIR): string =>\n `dlink_ir_v1:${fnv1a32(stableStringify(ir))}`\n\n/**\n * Export envelope compatible with the ConvergeStaticIrCollector bus:\n * - EvidenceCollector indexes by `staticIrDigest`.\n */\nexport interface DeclarativeLinkIrExport {\n readonly staticIrDigest: string\n readonly moduleId: string\n readonly instanceId: string\n readonly kind: 'declarativeLinkIr'\n readonly ir: DeclarativeLinkIR\n}\n\nexport const exportDeclarativeLinkIr = (args: { readonly linkId: string; readonly ir: DeclarativeLinkIR }): DeclarativeLinkIrExport => ({\n staticIrDigest: getDeclarativeLinkIrDigest(args.ir),\n moduleId: `link:${args.linkId}`,\n instanceId: 'runtime',\n kind: 'declarativeLinkIr',\n ir: args.ir,\n})\n\n","import * as ReadQueryCore from './internal/runtime/core/ReadQuery.js'\nimport * as ReadQueryBuildGateCore from './internal/runtime/core/ReadQueryBuildGate.js'\n\nexport type ReadLane = ReadQueryCore.ReadLane\nexport type ReadProducer = ReadQueryCore.ReadProducer\nexport type EqualsKind = ReadQueryCore.EqualsKind\nexport type ReadsDigest = ReadQueryCore.ReadsDigest\nexport type ReadQueryStrictGateRule = ReadQueryCore.ReadQueryStrictGateRule\nexport type ReadQueryFallbackReason = ReadQueryCore.ReadQueryFallbackReason\nexport type ReadQueryQualityMeta = ReadQueryCore.ReadQueryQualityMeta\nexport type ReadQueryStrictGateGrade = ReadQueryCore.ReadQueryStrictGateGrade\n\nexport type ReadQueryStaticIr = ReadQueryCore.ReadQueryStaticIr\n\nexport type ReadQuery<S, V> = ReadQueryCore.ReadQuery<S, V>\nexport type ReadQueryCompiled<S, V> = ReadQueryCore.ReadQueryCompiled<S, V>\nexport type ReadQueryInput<S, V> = ReadQueryCore.ReadQueryInput<S, V>\nexport type ReadQueryStrictGateConfig = ReadQueryCore.ReadQueryStrictGateConfig\nexport type ReadQueryStrictGateDecision = ReadQueryCore.ReadQueryStrictGateDecision\n\nexport const isReadQuery = ReadQueryCore.isReadQuery\nexport const isReadQueryCompiled = ReadQueryCore.isReadQueryCompiled\nexport const hasBuildQualityGrade = ReadQueryCore.hasBuildQualityGrade\nexport const shouldEvaluateStrictGateAtRuntime = ReadQueryCore.shouldEvaluateStrictGateAtRuntime\nexport const markRuntimeMissingBuildGrade = ReadQueryCore.markRuntimeMissingBuildGrade\nexport const resolveBuildGradeStrictGateDecision = ReadQueryCore.resolveBuildGradeStrictGateDecision\n\nexport const make = ReadQueryCore.make\n\nexport const compile = ReadQueryCore.compile\nexport const evaluateStrictGate = ReadQueryCore.evaluateStrictGate\n\nexport type SelectorQualityEntry = ReadQueryBuildGateCore.SelectorQualityEntry\nexport type SelectorQualitySummary = ReadQueryBuildGateCore.SelectorQualitySummary\nexport type SelectorQualityReport = ReadQueryBuildGateCore.SelectorQualityReport\nexport type SelectorQualityReportResult = ReadQueryBuildGateCore.SelectorQualityReportResult\nexport type BuildReadQueryGradeResult<S, V> = ReadQueryBuildGateCore.BuildReadQueryGradeResult<S, V>\n\nexport const gradeReadQueryAtBuild = ReadQueryBuildGateCore.gradeReadQueryAtBuild\nexport const buildSelectorQualityReport = ReadQueryBuildGateCore.buildSelectorQualityReport\nexport const hasBuildGateFailure = ReadQueryBuildGateCore.hasBuildGateFailure\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA,oBAAAA;AAAA,EAAA,qBAAAC;AAAA,EAAA,eAAAC;AAAA,EAAA;AAAA;AAAA,cAAAC;AAAA;AAAA;AAAA,IAAAC,kBAAuB;;;ACAvB,IAAAC,kBAA0D;;;ACA1D,IAAAC,kBAcO;;;ACdP,IAAAC,iBAA+C;;;ACA/C,oBAAsB;;;ACAtB,IAAAC,iBAAiE;;;ACwEjE,IAAM,iBAAoD;AAAA,EACxD,UAAU;AAAA,EACV,eAAe;AAAA,EACf,gBAAgB;AAAA,EAChB,iBAAiB;AAAA,EACjB,cAAc,IAAI;AAAA,EAClB,uBAAuB;AACzB;;;AC3EA,IAAAC,iBAAmC;AAE5B,IAAM,gBAAgB,0BAAW,UAA8B,+BAA+B;AAAA,EACnG,cAAc,MAAM;AACtB,CAAC;AAsHM,IAAM,wBAAN,cAAoC,0BAAW,QAGpD,EAAE,0BAA0B,EAAE;AAAC;;;AFmJ1B,IAAM,oBAAoB,0BAAW,UAA+B,yCAAyC;AAAA,EAClH,cAAc,MAAM,CAAC;AACvB,CAAC;AACM,IAAM,sBAAsB,0BAAW,UAA8B,2CAA2C;AAAA,EACrH,cAAc,MAAM;AACtB,CAAC;AACM,IAAM,eAAe,0BAAW,UAA8B,oCAAoC;AAAA,EACvG,cAAc,MAAM;AACtB,CAAC;AACM,IAAM,eAAe,0BAAW,UAA8B,oCAAoC;AAAA,EACvG,cAAc,MAAM;AACtB,CAAC;AAEM,IAAM,0BAA0B,0BAAW,UAA4B,+CAA+C;AAAA,EAC3H,cAAc,MAAM;AACtB,CAAC;AAMM,IAAM,oCAAoC,0BAAW;AAAA,EAC1D;AAAA,EACA;AAAA,IACE,cAAc,MAAM;AAAA,EACtB;AACF;AAMO,IAAM,mBAAmB,0BAAW,UAAqB,wCAAwC;AAAA,EACtG,cAAc,MAAM;AACtB,CAAC;AAiBM,IAAM,0CAA0C,0BAAW;AAAA,EAChE;AAAA,EACA;AAAA,IACE,cAAc,OAAO;AAAA,MACnB,cAAc;AAAA,MACd,MAAM;AAAA,IACR;AAAA,EACF;AACF;AAgPA,IAAM,uBAAuB,oBAAI,IAAY;AAC7C,IAAM,wBAAwB,oBAAI,IAAY;AA4C9C,IAAM,oBAAoB,CAAC,UAAgE;AACzF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,eAAe,MAAM;AACzB,QAAI;AACA,aAAO,qBAAM,OAAO,MAAM,KAA6B;AAAA,IAC3D,QAAQ;AACN,UAAI;AACF,eAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,MAC5C,QAAQ;AACN,eAAO,OAAO,MAAM,KAAK;AAAA,MAC3B;AAAA,IACF;AAAA,EACF,GAAG;AAEH,QAAM,UAAU,kBAAkB,QAAQ;AAAA,EAAsB,WAAW;AAE3E,SAAO,sBAAO,SAAS,OAAO,EAAE;AAAA,IAC9B,sBAAO,aAAa;AAAA,MAClB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,gBAAgB,CAAC,UAA2D;AAChF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,SAAS,kBAAkB,QAAQ,gBAAgB,MAAM,QAAQ;AACvE,QAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAC5C,QAAM,MAAM,GAAG,MAAM;AAAA,EAAK,MAAM;AAEhC,QAAM,OACJ,MAAM,aAAa,YACf,sBAAO,WAAW,GAAG,IACrB,MAAM,aAAa,SACjB,sBAAO,QAAQ,GAAG,IAClB,sBAAO,SAAS,GAAG;AAE3B,QAAM,cAAuC;AAAA,IAC3C,kBAAkB;AAAA,IAClB,eAAe,cAAc,MAAM,QAAQ;AAAA,IAC3C,yBAAyB,MAAM;AAAA,IAC/B,4BAA4B,MAAM;AAAA,EACpC;AACA,MAAI,MAAM,MAAM;AACd,gBAAY,uBAAuB,IAAI,MAAM;AAAA,EAC/C;AACA,MAAI,MAAM,WAAW;AACnB,gBAAY,4BAA4B,IAAI,MAAM;AAAA,EACpD;AAEA,SAAO,KAAK,KAAK,sBAAO,aAAa,WAAW,CAAC;AACnD;AAOO,IAAM,YAAY,qBAAM,QAAQ,mBAAmB,CAAC,CAAC;AAQ5D,IAAM,gBAAsB;AAAA,EAC1B,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AACjB;AAEO,IAAM,iBAAiB,qBAAM,QAAQ,mBAAmB,CAAC,aAAa,CAAC;AAEvE,IAAM,uBAAuB,CAAC,UAAwC,MAAM,WAAW,KAAK,MAAM,CAAC,MAAM;AAOhH,IAAM,cAAoB;AAAA,EACxB,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAC/C;AAEO,IAAM,eAAe,qBAAM,QAAQ,mBAAmB,CAAC,WAAW,CAAC;AAE1E,IAAM,YAAY,OAAO,WAAW,eAAe,OAAO,aAAa;AAGvE,IAAM,4BAA4B,CAAC,UAAsC;AAEvE,MAAI,OAAQ,MAAc,SAAS,YAAa,MAAc,KAAK,WAAW,QAAQ,GAAG;AACvF,UAAM,WAAY,MAAc,YAAY;AAC5C,UAAM,OAAQ,MAAc;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,yBAAyB,WAAW,QAAQ,OAAO,IAAI;AAAA,QACvD;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,IAAI,KAAK;AAEjB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,mBAAmB;AACpC,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,eAAe,MAAM;AACzB,UAAI;AACF,eAAO,qBAAM,OAAO,MAAM,KAA6B;AAAA,MACzD,QAAQ;AACN,YAAI;AACF,iBAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,QAC5C,QAAQ;AACN,iBAAO,OAAO,MAAM,KAAK;AAAA,QAC3B;AAAA,MACF;AAAA,IACF,GAAG;AAEH,UAAM,MAAM,GAAG,QAAQ,IAAI,WAAW;AACtC,QAAI,qBAAqB,IAAI,GAAG,GAAG;AACjC,aAAO,sBAAO;AAAA,IAChB;AACA,yBAAqB,IAAI,GAAG;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,mCAAmC;AAAA,QACnC;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,MAAM,WAAW;AAEzB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,cAAc;AAC/B,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAE5C,UAAM,QACJ,MAAM,aAAa,YAAY,kBAAkB,MAAM,aAAa,SAAS,kBAAkB;AAEjG,UAAM,QACJ,MAAM,aAAa,YACf,wBACA,MAAM,aAAa,SACjB,qBACA;AAER,UAAM,MAAM,GAAG,QAAQ,IAAI,MAAM,IAAI,IAAI,MAAM,OAAO;AACtD,QAAI,sBAAsB,IAAI,GAAG,GAAG;AAClC,aAAO,sBAAO;AAAA,IAChB;AACA,0BAAsB,IAAI,GAAG;AAE7B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,iBAAiB,QAAQ,eAAe;AAAA,QACxC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,UAAI,MAAM,aAAa,WAAW;AAEhC,gBAAQ,KAAK,MAAM;AAAA,MACrB,WAAW,MAAM,aAAa,QAAQ;AAEpC,gBAAQ,KAAK,MAAM;AAAA,MACrB,OAAO;AAEL,gBAAQ,MAAM,MAAM;AAAA,MACtB;AAEA,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAIA,SAAO,sBAAO;AAChB;AAOA,IAAM,qBAA2B;AAAA,EAC/B,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AAEd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAAA,IAC7C;AAEA,WAAO,0BAA0B,KAAK;AAAA,EACxC;AACF;AAEO,IAAM,sBAAsB,qBAAM,QAAQ,mBAAmB,CAAC,kBAAkB,CAAC;AAQxF,IAAM,+BAAqC;AAAA,EACzC,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AACd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AAAA,IACf;AAEA,WAAO,MAAM,SAAS,qBAAsB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAC1F,0BAA0B,KAAK,IAC/B,sBAAO;AAAA,EACb;AACF;AAEO,IAAM,gCAAgC,qBAAM,QAAQ,mBAAmB,CAAC,4BAA4B,CAAC;AAOrG,IAAM,2BAA2B,qBAAM;AAAA,EAC5C,sBAAO;AAAA,EACP,sBAAO,IAAI,aAAa;AACtB,UAAM,UAAU,OAAO,sBAAO,QAAQ,sBAAO,cAAc;AAC3D,WAAO,IAAI;AAAA,MACT,CAAC,GAAG,OAAO,EAAE,OAAO,CAAC,WAAW,WAAW,sBAAO,aAAa,EAAE,OAAO,sBAAO,cAAc,EAAE,MAAM,WAAW,QAAQ,KAAK,CAAC,CAAC;AAAA,IACjI;AAAA,EACF,CAAC;AACH;AASO,IAAM,SAAS,CAAC,UACrB,sBAAO,IAAI,aAAa;AACtB,QAAM,QAAQ,OAAO,sBAAO,QAAQ,iBAAiB;AAIrD,MAAI,qBAAqB,KAAK,GAAG;AAC/B,QAAI,MAAM,SAAS,mBAAmB;AACpC,aAAO,kBAAkB,KAAK;AAC9B;AAAA,IACF;AACA,QAAI,MAAM,SAAS,gBAAgB,MAAM,aAAa,QAAQ;AAC5D,aAAO,cAAc,KAAK;AAAA,IAC5B;AACA;AAAA,EACF;AAIA,MAAI,MAAM,WAAW,GAAG;AACtB,QAAI,WAAW;AACb,UAAI,MAAM,SAAS,qBAAqB,MAAM,SAAS,cAAc;AACnE,eAAO,0BAA0B,KAAK;AAAA,MACxC;AACA;AAAA,IACF;AAEA,QAAI,MAAM,SAAS,mBAAmB;AACpC,aAAO,kBAAkB,KAAK;AAC9B;AAAA,IACF;AACA,QAAI,MAAM,SAAS,cAAc;AAC/B,aAAO,cAAc,KAAK;AAAA,IAC5B;AACA;AAAA,EACF;AAIA,MAAI,OAAO,MAAM,SAAS,YAAY,MAAM,KAAK,WAAW,QAAQ,GAAG;AACrE,UAAM,OAAO,OAAO,sBAAO,QAAQ,gBAAgB;AACnD,QAAI,SAAS,MAAO;AAAA,EACtB;AAEA,QAAM,WAAW;AAEjB,QAAM,mBAAmB,OAAO,sBAAO,QAAQ,uBAAuB;AAKtE,MAAI;AACJ,QAAM,SAAS,MAAc;AAC3B,QAAI,QAAQ,OAAW,OAAM,KAAK,IAAI;AACtC,WAAO;AAAA,EACT;AAIA,MACE,SAAS,cAAc,WACtB,qBAAqB,SAAS,SAAS,SAAS,qBAAqB,SAAS,SAAS,eACxF;AACA;AAAC,IAAC,SAAiB,YAAY,OAAO;AAAA,EACxC;AACA,MAAI,qBAAqB,SAAS,SAAS,iBAAiB,QAAW;AACrE,UAAM,eAAe,OAAO,sBAAO,QAAQ,mBAAmB;AAC9D,QAAI,cAAc;AAChB;AAAC,MAAC,SAAiB,eAAe;AAAA,IACpC;AAAA,EACF;AAEA,MAAI,SAAS,SAAS,gBAAiB,SAAiB,UAAU,QAAW;AAC3E,UAAM,QAAQ,OAAO,sBAAO,QAAQ,YAAY;AAChD,QAAI,OAAO;AACT;AAAC,MAAC,SAAiB,QAAQ;AAAA,IAC7B;AAAA,EACF;AAEA,MACE,qBAAqB,SACpB,SAAiB,SAAS,oBAC1B,SAAiB,WAAW,QAC7B;AACA,UAAM,cAAc,OAAO,sBAAO,cAA2B,aAAa;AAC1E,QAAI,sBAAO,OAAO,WAAW,KAAK,YAAY,OAAO;AACnD;AAAC,MAAC,SAAiB,SAAS,YAAY;AAAA,IAC1C;AAAA,EACF;AAEA,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO,MAAM,CAAC,EAAG,OAAO,QAAQ;AAChC;AAAA,EACF;AAEA,SAAO,sBAAO,QAAQ,OAA8B,CAAC,SAAS,KAAK,OAAO,QAAQ,GAAG,EAAE,SAAS,KAAK,CAAC;AACxG,CAAC;;;AFp1BI,IAAM,mBAAN,cAA+B,0BAAW,QAA4C,EAAE,2BAA2B,EAAE;AAAC;;;AK5I7H,IAAAC,iBAA0C;AAgBnC,IAAM,oCAAoC,0BAAW;AAAA,EAC1D;AAAA,EACA;AAAA,IACE,cAAc,MAAM,CAAC;AAAA,EACvB;AACF;;;ACrBA,IAAAC,iBAAwC;;;ACAxC,IAAAC,kBAA2C;;;ACA3C,IAAAC,kBAA0C;;;ACU1C,IAAM,aAAqB,MAAM;AAAC;AAElC,IAAM,YAAY,MAAc;AAC9B,QAAM,OAAQ,WAAmB;AACjC,MAAI,QAAQ,OAAO,KAAK,QAAQ,YAAY;AAC1C,QAAI;AACF,YAAM,IAAI,KAAK,IAAI;AACnB,UAAI,OAAO,MAAM,YAAY,OAAO,SAAS,CAAC,EAAG,QAAO;AAAA,IAC1D,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,KAAK,IAAI;AAClB;AAEA,IAAM,qBAAqB,CAAC,OAAyB;AACnD,QAAM,KAAM,WAAmB;AAC/B,MAAI,OAAO,OAAO,YAAY;AAC5B,QAAI;AACF,SAAG,EAAE;AACL;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI;AACF,YAAQ,QAAQ,EAAE,KAAK,EAAE;AAAA,EAC3B,QAAQ;AAEN,eAAW,IAAI,CAAC;AAAA,EAClB;AACF;AAEA,IAAM,iBAAiB,CAAC,IAAY,OAA2B;AAC7D,QAAM,KAAK,WAAW,IAAI,EAAE;AAC5B,SAAO,MAAM;AACX,QAAI;AACF,mBAAa,EAAE;AAAA,IACjB,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAEA,IAAM,8BAA8B,MAAgD;AAClF,QAAM,KAAM,WAAmB;AAC/B,MAAI,OAAO,OAAO,WAAY,QAAO;AAErC,MAAI;AACJ,MAAI;AACF,cAAU,IAAI,GAAG;AAAA,EACnB,QAAQ;AACN,WAAO;AAAA,EACT;AAGA,QAAM,QAAqB,CAAC;AAC5B,MAAI,YAAY;AAEhB,QAAM,QAAQ,MAAY;AACxB,gBAAY;AACZ,UAAM,QAAQ,MAAM,OAAO,GAAG,MAAM,MAAM;AAC1C,eAAW,KAAK,OAAO;AACrB,UAAI,EAAE,SAAU;AAChB,UAAI;AACF,UAAE,GAAG;AAAA,MACP,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,YAAQ,MAAM,YAAY;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,CAAC,OAA2B;AAC3C,UAAM,OAAa,EAAE,UAAU,OAAO,GAAG;AACzC,UAAM,KAAK,IAAI;AACf,QAAI,CAAC,WAAW;AACd,kBAAY;AACZ,UAAI;AACF,gBAAQ,MAAM,YAAY,MAAS;AAAA,MACrC,QAAQ;AACN,oBAAY;AAEZ,eAAO,eAAe,GAAG,EAAE;AAAA,MAC7B;AAAA,IACF;AACA,WAAO,MAAM;AACX,WAAK,WAAW;AAAA,IAClB;AAAA,EACF;AAEA,SAAO;AACT;AAEA,IAAM,4BAA4B,MAAgD;AAChF,QAAM,KAAM,WAAmB;AAC/B,QAAM,KAAM,WAAmB;AAC/B,MAAI,OAAO,OAAO,WAAY,QAAO;AAErC,SAAO,CAAC,OAAO;AACb,QAAI;AACJ,QAAI;AACF,WAAK,GAAG,EAAE;AAAA,IACZ,QAAQ;AACN,aAAO,eAAe,GAAG,EAAE;AAAA,IAC7B;AAEA,WAAO,MAAM;AACX,UAAI,OAAO,OAAO,WAAY;AAC9B,UAAI;AACF,WAAG,EAAE;AAAA,MACP,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,UAAU,MAAgD;AAC9D,QAAM,MAAO,WAAmB;AAChC,QAAM,SAAU,WAAmB;AACnC,MAAI,OAAO,QAAQ,WAAY,QAAO;AAEtC,SAAO,CAAC,OAAO;AACb,QAAI;AACJ,QAAI;AACF,WAAK,IAAI,EAAE;AAAA,IACb,QAAQ;AACN,aAAO;AAAA,IACT;AAEA,WAAO,MAAM;AACX,UAAI,OAAO,WAAW,WAAY;AAClC,UAAI;AACF,eAAO,EAAE;AAAA,MACX,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AACF;AAEO,IAAM,2BAA2B,MAAqB;AAC3D,QAAM,YACJ,0BAA0B,KAC1B,4BAA4B,MAC3B,CAAC,OAAmB,eAAe,GAAG,EAAE;AAE3C,QAAM,MAAM,QAAQ;AAEpB,SAAO;AAAA,IACL,OAAO;AAAA,IACP,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,wBAAwB,CAAC,OAAO,MAAM,EAAE,KAAK,UAAU,EAAE;AAAA,IACzD,iBAAiB;AAAA,EACnB;AACF;AAEA,IAAI;AAEG,IAAM,yBAAyB,MAAqB;AACzD,gDAAwB,yBAAyB;AACjD,SAAO;AACT;;;ACrKO,IAAM,gBAAgB,CAAC,aAA4C;AACxE,QAAM,MAAM,SAAS,QAAQ,IAAI;AACjC,MAAI,OAAO,EAAG,QAAO;AAErB,QAAM,WAAW,SAAS,MAAM,GAAG,GAAG;AACtC,QAAM,OAAO,SAAS,MAAM,MAAM,CAAC;AACnC,MAAI,KAAK,WAAW,EAAG,QAAO;AAE9B,QAAM,OAAO,KAAK,QAAQ,IAAI;AAC9B,MAAI,OAAO,GAAG;AACZ,WAAO,EAAE,MAAM,UAAU,mBAAmB,GAAG,QAAQ,KAAK,IAAI,GAAG;AAAA,EACrE;AAEA,QAAM,aAAa,KAAK,MAAM,GAAG,IAAI;AACrC,QAAM,SAAS,KAAK,MAAM,OAAO,CAAC;AAClC,MAAI,OAAO,WAAW,KAAK,GAAG;AAC5B,UAAM,aAAa,OAAO,MAAM,MAAM,MAAM;AAC5C,QAAI,WAAW,WAAW,EAAG,QAAO;AACpC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,mBAAmB,GAAG,QAAQ,KAAK,UAAU;AAAA,MAC7C;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,UAAU,mBAAmB,GAAG,QAAQ,KAAK,UAAU,GAAG;AAC3E;AAuCA,IAAM,0BAAqD,CAAC;AA+B5D,IAAM,6BAAwD,CAAC;AAExD,IAAM,mBAAmB,MAAoB;AAClD,MAAI,UAAU;AAGd,QAAM,eAAe,oBAAI,IAAgC;AACzD,QAAM,gBAAgB,oBAAI,IAAsB;AAChD,QAAM,kBAAkB,oBAAI,IAAmC;AAG/D,QAAM,mBAAmB,oBAAI,IAAmC;AAChE,QAAM,0BAA0B,oBAAI,IAA+B;AAEnE,QAAM,kBAAkB,CAAC,aAA+B,cAAc,IAAI,QAAQ,KAAK;AACvF,QAAM,mBAAmB,CAAC,aAA4C,gBAAgB,IAAI,QAAQ,KAAK;AAEvG,QAAM,kBAAkB,CAAC,UAAoB,aAAwC;AACnF,UAAM,OAAO,cAAc,IAAI,QAAQ,KAAK;AAC5C,kBAAc,IAAI,UAAU,OAAO,CAAC;AACpC,oBAAgB,IAAI,UAAU,QAAQ;AAAA,EACxC;AAEA,QAAM,uBAAuB,CAAC,UAAqC;AACjE,UAAM,WAAW,MAAM,KAAK,MAAM,SAAS;AAAA,EAC7C;AAEA,QAAM,iBAAiB,CAAC,UAAoB,aAAuC;AACjF,UAAM,OAAO,cAAc,QAAQ;AACnC,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,UAAM,QAAQ,YAAY,EAAE,WAAW,oBAAI,IAAgB,GAAG,UAAU,wBAAwB;AAChG,UAAM,aAAa,MAAM,UAAU,IAAI,QAAQ;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,UAAU,IAAI,QAAQ;AAC5B,2BAAqB,KAAK;AAAA,IAC5B;AACA,QAAI,CAAC,UAAU;AACb,uBAAiB,IAAI,UAAU,KAAK;AAAA,IACtC;AAEA,QAAI,CAAC,cAAc,MAAM;AACvB,YAAM,OAAO,wBAAwB,IAAI,KAAK,iBAAiB,KAAK;AACpE,8BAAwB,IAAI,KAAK,mBAAmB,OAAO,CAAC;AAAA,IAC9D;AAEA,WAAO,MAAM;AACX,YAAM,eAAe,iBAAiB,IAAI,QAAQ;AAClD,UAAI,CAAC,aAAc;AACnB,YAAM,UAAU,aAAa,UAAU,OAAO,QAAQ;AACtD,UAAI,WAAW,MAAM;AACnB,cAAM,OAAO,wBAAwB,IAAI,KAAK,iBAAiB,KAAK;AACpE,cAAM,OAAO,OAAO;AACpB,YAAI,QAAQ,GAAG;AACb,kCAAwB,OAAO,KAAK,iBAAiB;AAAA,QACvD,OAAO;AACL,kCAAwB,IAAI,KAAK,mBAAmB,IAAI;AAAA,QAC1D;AAAA,MACF;AACA,UAAI,aAAa,UAAU,SAAS,GAAG;AACrC,yBAAiB,OAAO,QAAQ;AAAA,MAClC,WAAW,SAAS;AAClB,6BAAqB,YAAY;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,0BAA0B,CAAC,aAA+B,iBAAiB,IAAI,QAAQ,GAAG,UAAU,QAAQ;AAClH,QAAM,2BAA2B,CAAC,sBAAiD,wBAAwB,IAAI,iBAAiB,KAAK;AAErI,QAAM,yBAAyB,CAAC,SAKpB;AACV,iBAAa,IAAI,KAAK,mBAAmB,KAAK,YAAY;AAE1D,QAAI,CAAC,cAAc,IAAI,KAAK,iBAAiB,GAAG;AAC9C,oBAAc,IAAI,KAAK,mBAAmB,CAAC;AAC3C,sBAAgB,IAAI,KAAK,mBAAmB,QAAQ;AAAA,IACtD;AAAA,EACF;AAEA,QAAM,2BAA2B,CAAC,sBAA+C;AAC/E,iBAAa,OAAO,iBAAiB;AAAA,EAEvC;AAEA,QAAM,aAAa,CAAC,SAIY;AAC9B,cAAU,KAAK;AAEf,eAAW,CAAC,KAAK,MAAM,KAAK,KAAK,SAAS,SAAS;AACjD,mBAAa,IAAI,KAAK,OAAO,KAAK;AAAA,IACpC;AAEA,QAAI,KAAK,SAAS,YAAY,SAAS,GAAG;AACxC,aAAO;AAAA,QACL,uBAAuB;AAAA,MACzB;AAAA,IACF;AAEA,QAAI,KAAK,YAAY;AACnB,UAAI;AACJ,UAAI;AACJ,UAAI;AAEJ,iBAAW,CAAC,UAAU,QAAQ,KAAK,KAAK,SAAS,aAAa;AAC5D,wBAAgB,UAAU,QAAQ;AAClC,cAAM,YAAY,iBAAiB,IAAI,QAAQ,GAAG,YAAY;AAC9D,YAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,QACF;AACA,YAAI,CAAC,qBAAqB;AACxB,gCAAsB;AACtB;AAAA,QACF;AACA,YAAI,CAAC,sBAAsB;AACzB,iCAAuB;AACvB;AAAA,QACF;AACA,YAAI,CAAC,oBAAoB;AACvB,+BAAqB,CAAC;AAAA,QACxB;AACA,2BAAmB,KAAK,SAAS;AAAA,MACnC;AAEA,UAAI,qBAAqB;AACvB,mBAAW,YAAY,qBAAqB;AAC1C,cAAI;AACF,iBAAK,WAAW,QAAQ;AAAA,UAC1B,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAEA,UAAI,sBAAsB;AACxB,mBAAW,YAAY,sBAAsB;AAC3C,cAAI;AACF,iBAAK,WAAW,QAAQ;AAAA,UAC1B,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAEA,UAAI,oBAAoB;AACtB,mBAAW,aAAa,oBAAoB;AAC1C,qBAAW,YAAY,WAAW;AAChC,gBAAI;AACF,mBAAK,WAAW,QAAQ;AAAA,YAC1B,QAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,QACL,uBAAuB;AAAA,MACzB;AAAA,IACF;AAEA,QAAI;AACJ,QAAI;AAEJ,eAAW,CAAC,UAAU,QAAQ,KAAK,KAAK,SAAS,aAAa;AAC5D,sBAAgB,UAAU,QAAQ;AAClC,YAAM,YAAY,iBAAiB,IAAI,QAAQ,GAAG,YAAY;AAC9D,UAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,MACF;AACA,UAAI,yBAAyB;AAC3B,mBAAW,YAAY,WAAW;AAChC,kCAAwB,KAAK,QAAQ;AAAA,QACvC;AACA;AAAA,MACF;AACA,UAAI,CAAC,sBAAsB;AACzB,+BAAuB;AACvB;AAAA,MACF;AACA,gCAA0B,MAAM,KAAK,oBAAoB;AACzD,iBAAW,YAAY,WAAW;AAChC,gCAAwB,KAAK,QAAQ;AAAA,MACvC;AAAA,IACF;AAEA,WAAO;AAAA,MACL,uBAAuB,2BAA2B,wBAAwB;AAAA,IAC5E;AAAA,EACF;AAEA,QAAM,iBAAiB,CAAC,sBAAkD,aAAa,IAAI,iBAAiB;AAE5G,QAAM,UAAU,MAAY;AAC1B,iBAAa,MAAM;AACnB,kBAAc,MAAM;AACpB,oBAAgB,MAAM;AACtB,qBAAiB,MAAM;AACvB,4BAAwB,MAAM;AAAA,EAChC;AAEA,SAAO;AAAA,IACL,YAAY,MAAM;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC7UA,IAAAC,kBAAuB;;;ACAvB,IAAAC,iBAAkD;;;ACAlD,IAAAC,iBAA2C;;;ACA3C,IAAAC,iBAA2B;;;AFepB,IAAM,yBAAyB,0BAAW,UAAmB,mDAAmD;AAAA,EACrH,cAAc,MAAM;AACtB,CAAC;AAUM,IAAM,qBAAqB,0BAAW,UAAmB,+CAA+C;AAAA,EAC7G,cAAc,MAAM;AACtB,CAAC;;;AG7BD,IAAAC,kBAAuB;AAkDhB,IAAM,6BAA6B,MAA8B;AACtE,QAAM,qBAAqB,oBAAI,IAAsC;AACrE,QAAM,4BAA4B,oBAAI,IAAoC;AAE1E,QAAM,kBAAkB,oBAAI,IAAmC;AAC/D,QAAM,+BAA+B,oBAAI,IAAmG;AAE5I,QAAM,6BAAmF,CAACC,UAAS;AACjG,UAAM,SAAmC;AAAA,MACvC,GAAGA;AAAA,MACH,UAAU;AAAA,MACV,WAAW;AAAA,IACb;AAEA,uBAAmB,IAAIA,MAAK,IAAI,MAAM;AACtC,UAAM,MAAM,0BAA0B,IAAIA,MAAK,uBAAuB,KAAK,oBAAI,IAAY;AAC3F,QAAI,IAAIA,MAAK,EAAE;AACf,8BAA0B,IAAIA,MAAK,yBAAyB,GAAG;AAE/D,WAAO,MAAM;AACX,yBAAmB,OAAOA,MAAK,EAAE;AACjC,YAAM,UAAU,0BAA0B,IAAIA,MAAK,uBAAuB;AAC1E,UAAI,CAAC,QAAS;AACd,cAAQ,OAAOA,MAAK,EAAE;AACtB,UAAI,QAAQ,SAAS,GAAG;AACtB,kCAA0B,OAAOA,MAAK,uBAAuB;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,0BAA6E,CAACA,UAAS;AAC3F,UAAM,eAAe,oBAAI,IAAwJ;AACjL,eAAW,KAAKA,MAAK,WAAW;AAC9B,mBAAa,IAAI,EAAE,QAAQ,CAAC;AAAA,IAC9B;AAEA,UAAM,mBAAmB,oBAAI,IAG3B;AACF,eAAW,KAAKA,MAAK,eAAe;AAClC,uBAAiB,IAAI,EAAE,QAAQ,CAAC;AAAA,IAClC;AAGA,UAAM,qBAAqB,oBAAI,IAAoB;AACnD,eAAW,KAAKA,MAAK,GAAG,OAAO;AAC7B,YAAM,KAAK,EAAE;AACb,YAAM,aAAa,iBAAiB,IAAI,EAAE;AAC1C,UAAI,CAAC,WAAY;AACjB,yBAAmB,IAAI,KAAK,mBAAmB,IAAI,EAAE,KAAK,KAAK,CAAC;AAChE,YAAM,QAAQ,mBAAmB,IAAI,EAAE,KAAK;AAC5C,UAAI,QAAQ,GAAG;AACb,cAAM,IAAI;AAAA,UACR,yGAAyGA,MAAK,MAAM,YAAY,EAAE;AAAA,QACpI;AAAA,MACF;AAAA,IACF;AAEA,UAAM,4BAA4B,oBAAI,IAA0C;AAChF,eAAW,KAAKA,MAAK,GAAG,OAAO;AAC7B,YAAM,OAAO,EAAE;AACf,YAAM,KAAK,EAAE;AACb,UAAI,CAAC,aAAa,IAAI,IAAI,EAAG;AAC7B,UAAI,CAAC,iBAAiB,IAAI,EAAE,EAAG;AAC/B,YAAM,OAAO,0BAA0B,IAAI,IAAI,KAAK,CAAC;AACrD,WAAK,KAAK,EAAE;AACZ,gCAA0B,IAAI,MAAM,IAAI;AAAA,IAC1C;AAEA,UAAM,SAAgC;AAAA,MACpC,GAAGA;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,MACA,eAAe,oBAAI,IAAI;AAAA,IACzB;AAEA,oBAAgB,IAAIA,MAAK,QAAQ,MAAM;AAEvC,eAAW,KAAKA,MAAK,WAAW;AAC9B,YAAM,OAAO,6BAA6B,IAAI,EAAE,iBAAiB,KAAK,CAAC;AACvE,WAAK,KAAK,EAAE,QAAQA,MAAK,QAAQ,QAAQ,EAAE,OAAO,CAAC;AACnD,mCAA6B,IAAI,EAAE,mBAAmB,IAAI;AAAA,IAC5D;AAEA,WAAO,MAAM;AACX,sBAAgB,OAAOA,MAAK,MAAM;AAClC,iBAAW,KAAKA,MAAK,WAAW;AAC9B,cAAM,OAAO,6BAA6B,IAAI,EAAE,iBAAiB;AACjE,YAAI,CAAC,KAAM;AACX,cAAM,OAAO,KAAK,OAAO,CAAC,MAAM,EAAE,EAAE,WAAWA,MAAK,UAAU,EAAE,WAAW,EAAE,OAAO;AACpF,YAAI,KAAK,WAAW,GAAG;AACrB,uCAA6B,OAAO,EAAE,iBAAiB;AAAA,QACzD,OAAO;AACL,uCAA6B,IAAI,EAAE,mBAAmB,IAAI;AAAA,QAC5D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAA6D,CAAC,SAClE,uBAAO,IAAI,aAAa;AACtB,QAAI,YAAY;AAGhB,eAAW,aAAa,KAAK,2BAA2B;AACtD,YAAM,MAAM,0BAA0B,IAAI,SAAS;AACnD,UAAI,CAAC,OAAO,IAAI,SAAS,EAAG;AAC5B,YAAM,SAAS,KAAK,gBAAgB,IAAI,SAAS;AACjD,UAAI,CAAC,OAAQ;AAEb,iBAAW,MAAM,KAAK;AACpB,cAAMA,QAAO,mBAAmB,IAAI,EAAE;AACtC,YAAI,CAACA,MAAM;AAEX,YAAI;AACJ,YAAI;AACF,qBAAWA,MAAK,UAAU,OAAO,OAAO,KAAY;AAAA,QACtD,QAAQ;AACN;AAAA,QACF;AAEA,cAAM,YAAYA,MAAK,aAAa,QAAQ;AAC5C,YAAIA,MAAK,YAAYA,MAAK,YAAYA,MAAK,WAAW,SAAS,GAAG;AAChE;AAAA,QACF;AAEA,QAAAA,MAAK,WAAW;AAChB,QAAAA,MAAK,YAAY;AACjB,oBAAY;AACZ,eAAOA,MAAK,WAAW,SAAS;AAAA,MAClC;AAAA,IACF;AAGA,eAAW,aAAa,KAAK,2BAA2B;AACtD,YAAM,OAAO,6BAA6B,IAAI,SAAS;AACvD,UAAI,CAAC,QAAQ,KAAK,WAAW,EAAG;AAChC,YAAM,SAAS,KAAK,gBAAgB,IAAI,SAAS;AACjD,UAAI,CAAC,OAAQ;AAEb,iBAAW,OAAO,MAAM;AACtB,cAAMA,QAAO,gBAAgB,IAAI,IAAI,MAAM;AAC3C,YAAI,CAACA,MAAM;AACX,cAAM,WAAWA,MAAK,aAAa,IAAI,IAAI,MAAM;AACjD,YAAI,CAAC,SAAU;AAEf,YAAI;AACJ,YAAI;AACF,kBAAQ,SAAS,UAAU,OAAO,OAAO,KAAY;AAAA,QACvD,QAAQ;AACN;AAAA,QACF;AAEA,cAAM,QAAQA,MAAK,cAAc,IAAI,IAAI,MAAM,KAAK,EAAE,UAAU,OAAO,WAAW,OAAU;AAC5F,cAAM,UAAU,CAAC,MAAM,YAAY,CAAC,OAAO,GAAG,MAAM,WAAW,KAAK;AACpE,YAAI,CAAC,QAAS;AAEd,cAAM,WAAW;AACjB,cAAM,YAAY;AAClB,QAAAA,MAAK,cAAc,IAAI,IAAI,QAAQ,KAAK;AAExC,cAAM,UAAUA,MAAK,0BAA0B,IAAI,IAAI,MAAM,KAAK,CAAC;AACnE,mBAAW,kBAAkB,SAAS;AACpC,gBAAM,OAAOA,MAAK,iBAAiB,IAAI,cAAc;AACrD,cAAI,CAAC,KAAM;AACX,sBAAY;AACZ,iBAAO,KAAK,SAAS,KAAK;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,UAAU;AAAA,EACrB,CAAC;AAEH,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;APpJA,IAAM,gCAAN,cAA4C,2BAAW,QAGrD,EAAE,6CAA6C,EAAE;AAAC;AAMpD,IAAM,mCAAN,cAA+C,2BAAW,QAGxD,EAAE,gDAAgD,EAAE;AAAC;AAUvD,IAAM,0BAAN,cAAsC,2BAAW,QAG/C,EAAE,gCAAgC,EAAE;AAAC;AAkFvC,IAAM,mCAAN,cAA+C,2BAAW,QAGxD,EAAE,yCAAyC,EAAE;AAAC;AA8BhD,IAAM,iCAAN,cAA6C,2BAAW,QAGtD,EAAE,uCAAuC,EAAE;AAAC;AAa9C,IAAM,0CAAN,cAAsD,2BAAW,QAG/D,EAAE,gDAAgD,EAAE;AAAC;AAoBhD,IAAM,kBAAN,cAA8B,2BAAW,QAA8C,EAAE,4BAA4B,EAAE;AAAC;AAExH,IAAM,oBAAoD,sBAAM;AAAA,EACrE;AAAA,EACA,uBAAO;AAAA,IACL,uBAAO,KAAK,MAAM,iBAAiB,CAAwB;AAAA,IAC3D,CAAC,UAAU,uBAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAAA,EAC9C;AACF;AAOO,IAAM,mBAAN,cAA+B,2BAAW,QAG/C,EAAE,6BAA6B,EAAE;AAAC;AAE7B,IAAM,qBAAqD,sBAAM;AAAA,EACtE;AAAA,EACA,uBAAuB;AACzB;AAOO,IAAM,4BAAN,cAAwC,2BAAW,QAGxD,EAAE,sCAAsC,EAAE;AAAC;AAEtC,IAAM,8BAA8D,sBAAM;AAAA,EAC/E;AAAA,EACA,2BAA2B;AAC7B;AAQO,IAAM,mBAAN,cAA+B,2BAAW,QAAgD,EAAE,6BAA6B,EAAE;AAAC;;;ADtRnI,IAAM,sCAAN,cAAkD,2BAAW,QAG3D,EAAE,4CAA4C,EAAE;AAAC;AAWnD,IAAM,0CAAN,cAAsD,2BAAW,QAG/D,EAAE,gDAAgD,EAAE;AAAC;AAIvD,IAAM,0CAAN,cAAsD,2BAAW,QAG/D,EAAE,gDAAgD,EAAE;AAAC;AAcvD,IAAM,6BAAN,cAAyC,2BAAW,QAGlD,EAAE,mCAAmC,EAAE;AAAC;AAqH1C,IAAM,iCAAN,cAA6C,2BAAW,QAGtD,EAAE,uCAAuC,EAAE;AAAC;;;AS/L9C,IAAAC,kBAA2C;AAsF3C,IAAM,iCAAN,cAA6C,2BAAW,QAGtD,EAAE,uCAAuC,EAAE;AAAC;;;ACzF9C,IAAAC,kBAAmC;AAY5B,IAAM,4BAAN,cAAwC,2BAAW,QAGxD,EAAE,sCAAsC,EAAE;AAAC;;;ACf7C,IAAAC,kBAA0C;;;ACA1C,IAAAC,kBAAkC;AA8BlC,IAAM,oBAAN,cAAgC,2BAAW,QAAuC,EAAE,0BAA0B,EAAE;AAAC;;;ADNjH,IAAM,2BAAN,cAAuC,2BAAW,QAGhD,EAAE,iCAAiC,EAAE;AAAC;;;AE3BxC,IAAAC,kBAA+B;;;ACM/B,IAAAC,kBAA+B;;;ACN/B,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAA8B;;;ACA9B,sBAAqC;;;ACArC,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAAmF;;;ACAnF,IAAAC,kBAAuB;;;ACYhB,IAAM,kBAAkB,CAAC,UAA2B;AACzD,MAAI,UAAU,KAAM,QAAO;AAC3B,QAAM,IAAI,OAAO;AACjB,MAAI,MAAM,SAAU,QAAO,KAAK,UAAU,KAAK;AAC/C,MAAI,MAAM,SAAU,QAAO,OAAO,SAAS,KAAK,IAAI,OAAO,KAAK,IAAI;AACpE,MAAI,MAAM,UAAW,QAAO,QAAQ,SAAS;AAE7C,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,WAAO,IAAI,MAAM,IAAI,eAAe,EAAE,KAAK,GAAG,CAAC;AAAA,EACjD;AAEA,MAAI,MAAM,UAAU;AAClB,UAAMC,UAAS;AACf,UAAM,OAAO,OAAO,KAAKA,OAAM,EAAE,KAAK;AACtC,WAAO,IAAI,KAAK,IAAI,CAAC,MAAM,GAAG,KAAK,UAAU,CAAC,CAAC,IAAI,gBAAgBA,QAAO,CAAC,CAAC,CAAC,EAAE,EAAE,KAAK,GAAG,CAAC;AAAA,EAC5F;AAEA,SAAO;AACT;AAMO,IAAM,UAAU,CAAC,UAA0B;AAChD,MAAI,OAAO;AACX,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAQ,MAAM,WAAW,CAAC;AAC1B,WAAQ,OAAO,aAAgB;AAAA,EACjC;AACA,SAAO,KAAK,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC1C;;;AC3CA,IAAAC,kBAA+B;AAC/B,gBAA2B;;;ACD3B,IAAAC,kBAA0C;AAQnC,IAAM,oBAAoB,2BAAW,UAAmB,4BAA4B;AAAA,EACzF,cAAc,MAAM;AACtB,CAAC;;;ACVD,IAAAC,kBAA+B;;;AJqC/B,IAAM,cAAc,IAAI,WAAW,CAAC;;;AKrCpC,IAAAC,kBAAuB;AACvB,IAAAC,mBAAuB;AAyPvB,IAAM,SAAS,MAAM;AACnB,QAAM,OAAO,WAAW;AACxB,MAAI,QAAQ,OAAO,KAAK,QAAQ,YAAY;AAC1C,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB;AACA,SAAO,MAAM,KAAK,IAAI;AACxB,GAAG;;;AChQH,IAAAC,kBAAsC;AACtC,IAAAC,mBAAuB;;;ACDvB,IAAAC,kBAAkC;AA0G3B,IAAM,sBAAN,cAAkC,2BAAW,QAGlD,EAAE,gCAAgC,EAAE;AAAC;;;AC7GvC,IAAAC,kBAA0C;AA+CnC,IAAM,YAAN,cAAwB,2BAAW,QAAqC,EAAE,yBAAyB,EAAE;AAAC;AAwDtG,IAAM,WAA2E,uBAAO,IAAI,aAAa;AAC9G,QAAM,MAAM,OAAO;AACnB,SAAO,OAAO,IAAI;AACpB,CAAC;AAEM,IAAM,cAAqD,uBAAO,IAAI,aAAa;AACxF,QAAM,MAAM,OAAO;AACnB,SAAO,IAAI;AACb,CAAC;;;AC/GD,IAAAC,kBAAyD;AA8ClD,IAAM,6BAA6B,2BAAW;AAAA,EACnD;AAAA,EACA;AAAA,IACE,cAAc,MAAM;AAAA,EACtB;AACF;;;ACnDA,IAAAC,kBAA8C;;;ACA9C,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAA+B;;;ACA/B,IAAAC,kBAA+B;;;ACC/B,IAAAC,aAA2B;;;ACD3B,IAAAC,kBAAuB;;;ACAvB,IAAAC,kBAA6C;AAoC7C,IAAM,qBAAN,cAAiC,2BAAW,QAAyC,EAAE,2BAA2B,EAAE;AAAC;;;ACpCrH,IAAAC,kBAA6G;;;ACA7G,IAAAC,kBAA0D;;;ACOnD,IAAM,0BAA0B,IAAI;;;ACOpC,IAAM,eAAe,uBAAO,IAAI,2BAA2B;AAMlE,IAAM,eAAe,CAAC,QAAgB,KAAa,UAAyB;AAC1E,SAAO,eAAe,QAAQ,KAAK;AAAA,IACjC;AAAA,IACA,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,UAAU;AAAA,EACZ,CAAC;AACH;AAEO,IAAM,UAAU,CAAC,WACrB,OAAyB,YAAY;AAEjC,IAAM,gBAAgB,CAAC,WAC5B,QAAQ,MAAM,GAAG;AAEZ,IAAM,aAAa,CAAO,QAAmC,SAA2C;AAC7G,eAAa,QAAe,cAAc,IAAI;AAC9C,SAAO;AACT;;;ACtCA,IAAAC,aAA2B;;;ACA3B,IAAAC,kBAAsD;;;ACuE/C,SAAS,YAAY,OAA8C;AACxE,MAAI,CAAC,SAAU,OAAO,UAAU,YAAY,OAAO,UAAU,WAAa,QAAO;AACjF,QAAM,QAAQ;AACd,SAAO,OAAO,MAAM,eAAe,YAAY,OAAO,MAAM,WAAW,cAAc,MAAM,QAAQ,MAAM,KAAK;AAChH;AAyCA,IAAM,iBAAiB,CAAC,UAA0E;AAChG,QAAM,SAAiC,CAAC;AACxC,QAAM,OAAO,oBAAI,IAAY;AAC7B,aAAW,KAAK,OAAO;AACrB,UAAM,MAAM,OAAO,MAAM,WAAW,KAAK,CAAC,KAAK,KAAK,CAAC;AACrD,QAAI,KAAK,IAAI,GAAG,EAAG;AACnB,SAAK,IAAI,GAAG;AACZ,WAAO,KAAK,CAAC;AAAA,EACf;AACA,SAAO,OAAO,MAAM,EAAE,KAAK;AAC7B;AAEA,IAAM,iBAAiB,CAAC,QAAwB,OAAO,SAAS,KAAK,EAAE;AAEvE,IAAM,kBAAkB,CAAC,UAAuD;AAC9E,QAAM,aAAa,eAAe,KAAK;AACvC,SAAO;AAAA,IACL,OAAO,WAAW;AAAA,IAClB,MAAM,eAAe,QAAQ,gBAAgB,UAAU,CAAC,CAAC;AAAA,EAC3D;AACF;AAEA,IAAM,oBAAoB,CAAC,UAA2B,MAAM,QAAQ,gBAAgB,KAAK,CAAC,CAAC;AAE3F,IAAI,0BAA0B;AAC9B,IAAM,yBAAyB,oBAAI,QAA0B;AAE7D,IAAM,4BAA4B,CAAC,aAA+B;AAChE,QAAM,WAAW,uBAAuB,IAAI,QAAQ;AACpD,MAAI,SAAU,QAAO;AACrB,6BAA2B;AAC3B,QAAM,aAAa,OAAO,uBAAuB;AACjD,yBAAuB,IAAI,UAAU,UAAU;AAC/C,SAAO;AACT;AAEA,IAAM,eAAe,CAAC,UAA0B;AAC9C,QAAM,UAAU,MAAM,KAAK;AAC3B,MAAI,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,GAAG;AACpD,UAAM,QAAQ,QAAQ,MAAM,GAAG,EAAE,EAAE,KAAK;AAExC,QAAI,CAAC,MAAM,WAAW,GAAG,KAAK,CAAC,MAAM,SAAS,GAAG,GAAG;AAClD,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAEA,IAAM,eAAe,CAAC,WAAkF;AACtG,QAAM,MAAM,OAAO,QAAQ,IAAI;AAC/B,MAAI,MAAM,EAAG,QAAO;AACpB,QAAM,OAAO,OAAO,MAAM,GAAG,GAAG,EAAE,KAAK;AACvC,QAAM,QAAQ,OAAO,MAAM,MAAM,CAAC,EAAE,KAAK;AAEzC,QAAM,WAAW,aAAa,IAAI;AAClC,MAAI,CAAC,6BAA6B,KAAK,QAAQ,EAAG,QAAO;AAGzD,MAAI,MAAM,WAAW,GAAG,GAAG;AACzB,UAAM,IAAI,MAAM,MAAM,kCAAkC;AACxD,QAAI,CAAC,EAAG,QAAO;AACf,WAAO,EAAE,OAAO,UAAU,MAAM,EAAE,CAAC,GAAG,KAAK,KAAK,GAAG;AAAA,EACrD;AAEA,SAAO,EAAE,OAAO,UAAU,MAAM,MAAM;AACxC;AAEA,IAAM,wBAAwB,CAAC,WAAkF;AAC/G,QAAM,UAAU,OAAO,KAAK;AAC5B,QAAM,IAAI,QAAQ;AAAA,IAChB;AAAA,EACF;AACA,MAAI,CAAC,EAAG,QAAO;AAEf,QAAM,QAAQ,EAAE,CAAC,GAAG,KAAK,KAAK;AAC9B,MAAI,CAAC,6BAA6B,KAAK,KAAK,EAAG,QAAO;AAEtD,QAAM,OAAO,EAAE,CAAC,GAAG,KAAK,KAAK;AAC7B,MAAI,KAAK,WAAW,EAAG,QAAO;AAE9B,SAAO,EAAE,OAAO,KAAK;AACvB;AAMA,IAAM,yBAAyB,CAAC,WAA+C;AAC7E,QAAM,YAAY,aAAa,MAAM,KAAK,sBAAsB,MAAM;AACtE,MAAI,CAAC,UAAW,QAAO;AAEvB,QAAM,OAAO,aAAa,UAAU,IAAI,EAAE,KAAK,EAAE,QAAQ,MAAM,EAAE,EAAE,KAAK;AAGxE;AACE,UAAM,KAAK,IAAI,OAAO,IAAI,UAAU,KAAK,4CAA4C;AACrF,UAAM,IAAI,KAAK,MAAM,EAAE;AACvB,QAAI,GAAG;AACL,aAAO,EAAE,MAAM,QAAQ,MAAM,EAAE,CAAC,EAAE;AAAA,IACpC;AAAA,EACF;AAGA,MAAI,KAAK,WAAW,GAAG,KAAK,KAAK,SAAS,GAAG,GAAG;AAC9C,UAAM,QAAQ,KAAK,MAAM,GAAG,EAAE,EAAE,KAAK;AACrC,QAAI,MAAM,WAAW,EAAG,QAAO,EAAE,MAAM,UAAU,SAAS,CAAC,EAAE;AAE7D,UAAM,QAAQ,MACX,MAAM,GAAG,EACT,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,EACnB,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AAE7B,UAAM,UAA4C,CAAC;AAEnD,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,KAAK,QAAQ,GAAG;AAC5B,UAAI,MAAM,EAAG,QAAO;AACpB,YAAM,MAAM,KAAK,MAAM,GAAG,GAAG,EAAE,KAAK;AACpC,YAAM,QAAQ,KAAK,MAAM,MAAM,CAAC,EAAE,KAAK;AAEvC,UAAI,CAAC,6BAA6B,KAAK,GAAG,EAAG,QAAO;AAEpD,YAAM,KAAK,IAAI,OAAO,IAAI,UAAU,KAAK,4CAA4C;AACrF,YAAM,IAAI,MAAM,MAAM,EAAE;AACxB,UAAI,CAAC,EAAG,QAAO;AACf,cAAQ,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAU;AAAA,IACnC;AAGA,YAAQ,KAAK,CAAC,GAAG,MAAO,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAAE;AAC/D,WAAO,EAAE,MAAM,UAAU,QAAQ;AAAA,EACnC;AAEA,SAAO;AACT;AASA,IAAM,gCAAgC;AACtC,IAAM,wBAAwB,oBAAI,QAA2C;AAC7E,IAAM,4BAA4B,oBAAI,IAAqC;AAE3E,IAAM,SAAS,CAAO,KAAgB,QAA0B;AAC9D,QAAM,QAAQ,IAAI,IAAI,GAAG;AACzB,MAAI,UAAU,OAAW,QAAO;AAChC,MAAI,OAAO,GAAG;AACd,MAAI,IAAI,KAAK,KAAK;AAClB,SAAO;AACT;AAEA,IAAM,SAAS,CAAO,KAAgB,KAAQ,OAAU,YAAoB;AAC1E,MAAI,IAAI,IAAI,GAAG,EAAG,KAAI,OAAO,GAAG;AAChC,MAAI,IAAI,KAAK,KAAK;AAClB,MAAI,IAAI,QAAQ,QAAS;AACzB,QAAM,YAAY,IAAI,KAAK,EAAE,KAAK,EAAE;AACpC,MAAI,cAAc,QAAW;AAC3B,QAAI,OAAO,SAAS;AAAA,EACtB;AACF;AAEA,IAAM,eAAe,CAAC,OAAyB;AAC7C,MAAI;AACF,WAAO,GAAG,SAAS;AAAA,EACrB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAIO,IAAM,UAAU,CAAO,UAAyD;AACrF,MAAI,YAAY,KAAK,GAAG;AACtB,UAAM,QAAQ,eAAe,MAAM,KAAK;AACxC,UAAM,cAAc,MAAM,SAAS,IAAI,gBAAgB,KAAK,IAAI;AAEhE,UAAM,aAAa,MAAM;AACzB,UAAMC,YAA8B;AAAA,MAClC,YAAY,MAAM;AAAA,MAClB,UAAU,MAAM;AAAA,MAChB,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH;AAAA,MACA,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA,UAAAA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW;AACjB,QAAM,YACH,OAAQ,UAAkB,aAAa,YAAa,SAAiB,SAAS,SAAS,IACnF,SAAiB,WAClB,YACH,OAAQ,SAAiB,SAAS,YAAa,SAAiB,KAAK,SAAS,IAC1E,SAAiB,OAClB;AAEN,QAAM,gBAAmD,MAAM,QAAS,UAAkB,UAAU,IAC9F,SAAiB,WAAsC,OAAO,CAAC,MAAmB,OAAO,MAAM,QAAQ,IACzG;AAEJ,MAAI,iBAAiB,cAAc,SAAS,GAAG;AAC7C,UAAM,QAAQ,eAAe,aAAa;AAC1C,UAAM,cAAc,gBAAgB,KAAK;AACzC,UAAMC,cAAa,kBAAkB,EAAE,MAAM,SAAS,MAAM,CAAC;AAE7D,UAAMD,YAA8B;AAAA,MAClC,YAAAC;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd;AAEA,WAAO;AAAA,MACL,YAAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA,UAAAD;AAAA,IACF;AAAA,EACF;AAEA,QAAM,aAAa,sBAAsB,IAAI,QAA+B;AAC5E,MAAI,YAAY;AACd,UAAMA,YAA8B;AAAA,MAClC,YAAY,WAAW;AAAA,MACvB;AAAA,MACA,MAAM;AAAA,MACN,UAAU;AAAA,MACV,OAAO,WAAW;AAAA,MAClB,aAAa,WAAW;AAAA,MACxB,YAAY,WAAW;AAAA,IACzB;AAEA,WAAO;AAAA,MACL,YAAY,WAAW;AAAA,MACvB;AAAA,MACA,OAAO,WAAW;AAAA,MAClB,QAAQ;AAAA,MACR,YAAY,WAAW;AAAA,MACvB,MAAM;AAAA,MACN,UAAU;AAAA,MACV,aAAa,WAAW;AAAA,MACxB,UAAAA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,aAAa,aAAa,QAA+B,EAAE,KAAK;AACtE,QAAM,iBAAiB,WAAW,SAAS,IAAI,OAAO,2BAA2B,UAAU,IAAI;AAC/F,MAAI,gBAAgB;AAClB,0BAAsB,IAAI,UAAiC,cAAc;AACzE,UAAMA,YAA8B;AAAA,MAClC,YAAY,eAAe;AAAA,MAC3B;AAAA,MACA,MAAM;AAAA,MACN,UAAU;AAAA,MACV,OAAO,eAAe;AAAA,MACtB,aAAa,eAAe;AAAA,MAC5B,YAAY,eAAe;AAAA,IAC7B;AAEA,WAAO;AAAA,MACL,YAAY,eAAe;AAAA,MAC3B;AAAA,MACA,OAAO,eAAe;AAAA,MACtB,QAAQ;AAAA,MACR,YAAY,eAAe;AAAA,MAC3B,MAAM;AAAA,MACN,UAAU;AAAA,MACV,aAAa,eAAe;AAAA,MAC5B,UAAAA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,WAAW,SAAS,IAAI,uBAAuB,UAAU,IAAI;AAE5E,MAAI,QAAQ,SAAS,QAAQ;AAC3B,UAAM,QAAQ,CAAC,OAAO,IAAI;AAC1B,UAAM,cAAc,gBAAgB,KAAK;AACzC,UAAMC,cAAa,kBAAkB,EAAE,MAAM,QAAQ,MAAM,OAAO,KAAK,CAAC;AACxE,UAAM,WAAoC,EAAE,YAAAA,aAAY,OAAO,aAAa,YAAY,WAAW;AACnG,0BAAsB,IAAI,UAAiC,QAAQ;AACnE,WAAO,2BAA2B,YAAY,UAAU,6BAA6B;AAErF,UAAMD,YAA8B;AAAA,MAClC,YAAAC;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd;AAEA,WAAO;AAAA,MACL,YAAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA,UAAAD;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,SAAS,UAAU;AAC7B,UAAM,QAAQ,eAAe,OAAO,QAAQ,IAAI,CAAC,CAAC,EAAE,IAAI,MAAM,IAAI,CAAC;AACnE,UAAM,cAAc,gBAAgB,KAAK;AACzC,UAAMC,cAAa,kBAAkB,EAAE,MAAM,UAAU,SAAS,OAAO,QAAQ,CAAC;AAChF,UAAM,WAAoC,EAAE,YAAAA,aAAY,OAAO,aAAa,YAAY,gBAAgB;AACxG,0BAAsB,IAAI,UAAiC,QAAQ;AACnE,WAAO,2BAA2B,YAAY,UAAU,6BAA6B;AAErF,UAAMD,YAA8B;AAAA,MAClC,YAAAC;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA;AAAA,MACA,YAAY;AAAA,IACd;AAEA,WAAO;AAAA,MACL,YAAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,YAAY;AAAA,MACZ,MAAM;AAAA,MACN,UAAU;AAAA,MACV;AAAA,MACA,UAAAD;AAAA,IACF;AAAA,EACF;AAGA,QAAM,qBACJ,WAAW,SAAS,IAAI,KAAK,WAAW,WAAW,UAAU,IAAI,sBAAsB;AAEzF,QAAM,4BAA4B,WAAW,WAAW,KAAK,WAAW,SAAS,eAAe;AAChG,QAAM,iBACJ,CAAC,YAAY,4BAA4B,uBAAuB;AAElE,QAAM,aACJ,mBAAmB,uBACf,0BAA0B,QAA+B,IACzD,kBAAkB,EAAE,MAAM,WAAW,UAAU,KAAK,WAAW,CAAC;AACtE,QAAM,WAA8B;AAAA,IAClC;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,UAAU;AAAA,IACV;AAAA,IACA,YAAY;AAAA,EACd;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,OAAO,CAAC;AAAA,IACR,QAAQ;AAAA,IACR,YAAY;AAAA,IACZ,MAAM;AAAA,IACN,UAAU;AAAA,IACV;AAAA,IACA;AAAA,EACF;AACF;;;AC1fA,IAAAE,kBAA2B;;;AP6FpB,IAAM,oBAAN,cAAgC,2BAAW,QAA2C,EAAE,8BAA8B,EAAE;AAAC;AAEhI,IAAM,wBAAwB,2BAAW,UAAsC,+CAA+C;AAAA,EAC5H,cAAc,MAAM;AACtB,CAAC;AACD,IAAM,4BAA4B,2BAAW;AAAA,EAC3C;AAAA,EACA;AAAA,IACE,cAAc,MAAM;AAAA,EACtB;AACF;;;AQvGA,IAAAC,kBAAsC;;;ACAtC,IAAAC,kBAA4D;;;ACA5D,IAAAC,kBAAqC;;;ACArC,IAAAC,kBAA0C;AAsFnC,IAAM,uBAAN,cAAmC,2BAAW,QAGnD,EAAE,4BAA4B,EAAE;AAAC;AAiB5B,IAAM,sBAAN,cAAkC,2BAAW,QAGlD,EAAE,2BAA2B,EAAE;AAAC;;;AC7GlC,IAAAC,kBAAmC;AAqB5B,IAAM,MAAN,cAAkB,2BAAW,QAAsB,EAAE,mBAAmB,EAAE;AAAC;;;ACrBlF,IAAAC,kBAA4B;AA2B5B,IAAMC,SAAQ,uBAAO,UAAU,CAAC,UAAU,MAAM,iBAAiB;;;AC3BjE,IAAAC,kBAA2E;;;ACA3E,IAAAC,kBAAuB;;;ACAvB,IAAAC,kBAAuC;;;ACAvC,IAAAC,kBAAuB;;;ACAvB,IAAAC,kBAAoD;;;AhE8B7C,SAAS,KACd,SACA,OAC2B;AAC3B,SAAO,uBAAO,IAAI,aAAa;AAC7B,UAAM,UAAwD,CAAC;AAE/D,eAAW,CAAC,KAAKC,OAAM,KAAK,OAAO,QAAQ,OAAO,GAAG;AACnD,YAAM,UAAU,OAAO,uBAAO,QAAQA,OAAM,EAAE,KAAK,uBAAO,KAAK;AAE/D,cAAQ,GAAG,IAAI;AAAA,QACb,MAAM,CAAC,aAAkB,uBAAO,IAAI,QAAQ,UAAU,QAAQ;AAAA,QAC9D,SAAS,QAAQ;AAAA,QACjB,UAAU,QAAQ;AAAA,QAClB,UAAU,QAAQ;AAAA,QAClB,SAAS,IAAI;AAAA,UACX,CAAC;AAAA,UACD;AAAA,YACE,KAAK,CAAC,SAAS,SAAS,CAAC,YAAiB,QAAQ,SAAS,EAAE,MAAM,MAAgB,QAAQ,CAAC;AAAA,UAC9F;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO,OAAO;AAAA,MACZ;AAAA,IAGF;AAAA,EACF,CAAC;AACH;;;AiErBO,IAAM,6BAA6B,CAAC,OACzC,eAAe,QAAQ,gBAAgB,EAAE,CAAC,CAAC;AActC,IAAM,0BAA0B,CAAC,UAAgG;AAAA,EACtI,gBAAgB,2BAA2B,KAAK,EAAE;AAAA,EAClD,UAAU,QAAQ,KAAK,MAAM;AAAA,EAC7B,YAAY;AAAA,EACZ,MAAM;AAAA,EACN,IAAI,KAAK;AACX;;;AC/BO,IAAMC,WAAwB;;;AnEarC,IAAM,mBAAsD,CAAC,EAAE,MAAM,iBAAiB,eAAe,eAAe,CAAC;AAErH,IAAM,sBAAsB,CAAC,UAAoD;AAC/E,QAAM,OAAO,OAAO,UAAU,WAAY,EAAE,WAAW,MAAM,IAA8C;AAE3G,QAAM,YAAY,KAAK;AACvB,MAAI,OAAO,cAAc,YAAY,UAAU,WAAW,GAAG;AAC3D,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAEA,QAAM,WAAW,MAAM,QAAQ,KAAK,QAAQ,KAAK,KAAK,SAAS,SAAS,IAAI,KAAK,WAAW;AAE5F,QAAM,cAAc,KAAK,eAAgB,EAAE,MAAM,SAAS;AAC1D,QAAM,cAAc,KAAK,eAAgB,EAAE,MAAM,WAAW;AAC5D,QAAM,mBAAmB,KAAK,oBAAoB;AAElD,SAAO;AAAA,IACL;AAAA,IACA,MAAM,KAAK;AAAA,IACX,aAAa,KAAK;AAAA,IAClB,UAAU,KAAK;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,IAAMC,QAAO,CAAO,YAAmC,WACvD,WAAW,QAAQ;AAAA,EACtB,YAAY,oBAAoB,UAAU;AAAA,EAC1C,MAAM;AACR,CAAC;AAEI,IAAMC,iBAAgB,CAAC,WACvB,cAAc,MAAM;AAEpB,IAAMC,WAAU,CAAC,WAA6E,QAAQ,MAAM;AAE5G,IAAMC,cAAa,CAAO,QAAmC,SAC7D,WAAW,QAAQ,IAAI;AAc9B,IAAM,eAAe,CAAC,UACpB,QAAQ,KAAK,KACb,OAAO,UAAU,aACf,MAAuC,UAAU,eAChD,MAAuC,UAAU,aACpD,SAAU;AAEZ,IAAM,kBAAkB,CACtBC,YACsB;AACtB,MAAI,aAAaA,OAAM,GAAG;AACxB,WAAOA,QAAO;AAAA,EAChB;AACA,SAAOA;AACT;AAeO,SAAS,KACd,QACA,OACqB;AACrB,QAAM,SACJ,OAAO,MACP,CAAC,GAAG,OAAO,OAAO,EACf,IAAI,CAAC,MAAM,EAAE,EAAE,EACf,KAAK,EACL,KAAK,GAAG;AAEb,QAAM,gBAAgB,uBAAO,OAAO,IAAI;AACxC,QAAM,WAAW,oBAAI,IAAY;AAEjC,aAAWA,WAAU,OAAO,SAAS;AACnC,UAAM,MAAM,gBAAgBA,OAAM;AAClC,aAAS,IAAI,OAAO,IAAI,EAAE,CAAC;AAC1B,IAAC,cAAoE,IAAI,EAAE,IAAI;AAAA,EAIlF;AAEA,QAAM,SAAuB;AAAA,IAC3B;AAAA,IACA;AAAA,EACF;AAEA,QAAM,UAAU,uBAAO,IAAI,aAAa;AACtC,UAAM,QAAQ,OAAO,uBAAO,QAAc,uBAAuB,EAAE,KAAK,uBAAO,KAAK;AACpF,QAAI,UAAU,OAAO;AACnB,aAAa,OAAO;AAAA,QAClB,MAAM;AAAA,QACN,UAAU,QAAQ,MAAM;AAAA,QACxB,MAAM;AAAA,QACN,UAAU;AAAA,QACV,SACE;AAAA,QACF,MAAM;AAAA,QACN,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AACA,WAAO,OAAO;AAAA,EAChB,CAAC;AAED,QAAM,aAAgC;AAAA,IACpC,WAAW;AAAA,IACX,UAAU,MAAM,KAAK,QAAQ;AAAA,IAC7B,UAAU,CAAC,GAAG,kBAAkB,EAAE,MAAM,iBAAiB,eAAe,QAAQ,MAAM,GAAG,CAAC;AAAA,IAC1F,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,aAAa,EAAE,MAAM,WAAW;AAAA,IAChC,kBAAkB;AAAA,EACpB;AAEA,SAAY,WAAW,SAAS;AAAA,IAC9B;AAAA,IACA,MAAM;AAAA,EACR,CAAC;AACH;AA4BO,SAAS,gBACd,QACAC,QAC6B;AAC7B,QAAM,SACJ,OAAO,MACP,CAAC,GAAG,OAAO,OAAO,EACf,IAAI,CAAC,MAAM,EAAE,EAAE,EACf,KAAK,EACL,KAAK,GAAG;AAEb,QAAM,gBAAgB,uBAAO,OAAO,IAAI;AACxC,QAAM,WAAW,oBAAI,IAAY;AAEjC,aAAWD,WAAU,OAAO,SAAS;AACnC,UAAM,MAAM,gBAAgBA,OAAM;AAClC,aAAS,IAAI,OAAO,IAAI,EAAE,CAAC;AAC1B,IAAC,cAAoE,IAAI,EAAE,IAAI;AAAA,EAIlF;AAEA,QAAM,UAAU,uBAAO,OAAO,IAAI;AAClC,aAAW,MAAM,OAAO,KAAK,aAAa,GAA8C;AACtF,UAAM,MAAO,cAAsB,EAAE;AACpC,IAAC,QAAgB,EAAE,IAAI;AAAA,MACtB,MAAM,CAAC,aAAiD;AACtD,cAAM,WAAqBE,SAAQ,QAAQ;AAC3C,cAAM,WAAW,SAAS;AAC1B,cAAM,KAAK,SAAS,SAAS,YAAY,SAAS,eAAe,QAAQ,SAAS,kBAAkB;AACpG,YAAI,CAAC,IAAI;AACP,gBAAM,IAAI;AAAA,YACR,iFAAiF,EAAE,gBAAgB,SAAS,UAAU,UAAU,SAAS,IAAI,oBAAoB,SAAS,cAAc;AAAA,UAE1L;AAAA,QACF;AACA,eAAO,EAAE,MAAM,YAAY,UAAU,IAAI,KAAK,WAAW,SAAS;AAAA,MACpE;AAAA,MACA,UAAU,CAAC,cAAsB;AAC/B,YAAI,OAAO,cAAc,YAAY,UAAU,WAAW,GAAG;AAC3D,gBAAM,IAAI,MAAM,gEAAgE;AAAA,QAClF;AACA,eAAO,EAAE,MAAM,gBAAgB,UAAU,IAAI,KAAK,UAAU;AAAA,MAC9D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,QAAQD,OAAM,OAAO;AAC3B,MAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,UAAM,IAAI,MAAM,oEAAoE;AAAA,EACtF;AAEA,QAAM,aAAa,MAChB,OAAO,CAAC,MAAoC,QAAQ,KAAK,EAAE,QAAQ,EAAE,EAAE,CAAC,EACxE,IAAI,CAAC,MAAM;AACV,QAAI,EAAE,KAAK,SAAS,cAAc,EAAE,GAAG,SAAS,gBAAgB;AAC9D,YAAM,IAAI,MAAM,sEAAsE;AAAA,IACxF;AACA,WAAO;AAAA,EACT,CAAC,EACA,MAAM,EACN,KAAK,CAAC,GAAG,MAAM;AACd,UAAM,KAAK,GAAG,EAAE,KAAK,QAAQ,IAAI,EAAE,KAAK,UAAU,SAAS,UAAU,KAAK,EAAE,GAAG,QAAQ,IAAI,EAAE,GAAG,SAAS;AACzG,UAAM,KAAK,GAAG,EAAE,KAAK,QAAQ,IAAI,EAAE,KAAK,UAAU,SAAS,UAAU,KAAK,EAAE,GAAG,QAAQ,IAAI,EAAE,GAAG,SAAS;AACzG,WAAO,KAAK,KAAK,KAAK,KAAK,KAAK,IAAI;AAAA,EACtC,CAAC;AAEH,QAAM,UAA2C,uBAAO,IAAI,aAAa;AACvE,UAAM,UAAU,OAAO,uBAAO,QAAQ,yBAAyB,EAAE,KAAK,uBAAO,KAAK;AAClF,UAAM,aAAa,OAAO,uBAAO,QAAQ,iCAAiC,EAAE,KAAK,uBAAO,KAAK;AAE7F,UAAM,eAAe,oBAAI,IAA4C;AACrE,UAAM,iBAAiB,CAAC,QACtB,uBAAO,QAAQ,MAAM;AACnB,YAAM,SAAS,aAAa,IAAI,GAAG;AACnC,UAAI,OAAQ,QAAO,uBAAO,QAAQ,MAAM;AACxC,aAAO,uBAAO,QAAQ,GAAU,EAAE;AAAA,QAChC,uBAAO;AAAA,UAAI,CAAC,OACV,uBAAO,KAAK,MAAM;AAChB,yBAAa,IAAI,KAAK,EAAE;AAAA,UAC1B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,CAAC;AAEH,UAAM,QAAoB,CAAC;AAC3B,UAAM,UAAsB,CAAC;AAC7B,UAAM,YAAwB,CAAC;AAC/B,UAAM,gBAA4B,CAAC;AAEnC,aAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK,GAAG;AAC7C,YAAM,OAAO,WAAW,CAAC;AACzB,YAAM,cAAc,OAAO,eAAe,KAAK,KAAK,GAAG;AACvD,YAAM,YAAY,OAAO,eAAe,KAAK,GAAG,GAAG;AAEnD,YAAM,UAAU,GAAG,YAAY,QAAQ,KAAK,YAAY,UAAU;AAClE,YAAM,aAAa,IAAI,CAAC;AACxB,YAAM,iBAAiB,IAAI,CAAC;AAE5B,YAAM,KAAK;AAAA,QACT,IAAI;AAAA,QACJ,MAAM;AAAA,QACN,UAAU,YAAY;AAAA,QACtB,aAAa,YAAY;AAAA,QACzB,WAAW,KAAK,KAAK,UAAU;AAAA,MACjC,CAAC;AACD,YAAM,KAAK;AAAA,QACT,IAAI;AAAA,QACJ,MAAM;AAAA,QACN,UAAU,UAAU;AAAA,QACpB,aAAa,UAAU;AAAA,QACvB,WAAW,KAAK,GAAG;AAAA,MACrB,CAAC;AAED,cAAQ,KAAK,EAAE,MAAM,YAAY,IAAI,eAAe,CAAC;AAErD,gBAAU,KAAK,EAAE,QAAQ,YAAY,mBAAmB,SAAS,WAAW,KAAK,KAAK,UAAU,CAAC;AACjG,oBAAc,KAAK;AAAA,QACjB,QAAQ;AAAA,QACR,UAAU,CAAC,YACR,UAAU,SAAS,EAAE,MAAM,KAAK,GAAG,WAAW,QAAQ,CAAQ,EAAU,KAAK,uBAAO,MAAM;AAAA,MAC/F,CAAC;AAAA,IACH;AAEA,UAAM,KAAwB;AAAA,MAC5B,SAAS;AAAA,MACT;AAAA,MACA,OAAO;AAAA,IACT;AAEA,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,UAAM,aAAa,QAAQ,wBAAwB,YAAmB;AAEtE,QAAI,WAAW,SAAS,GAAG;AACzB,YAAM,WAAW,wBAAwB,EAAE,QAAQ,GAAG,CAAC;AACvD,iBAAW,aAAa,YAAY;AAClC,kBAAU,SAAS,QAAe;AAAA,MACpC;AAAA,IACF;AAEA,WAAO,uBAAO;AAAA,MAAa,MACzB,uBAAO,KAAK,MAAM;AAChB,mBAAW;AAAA,MACb,CAAC;AAAA,IACH;AAEA,WAAO,uBAAO;AAAA,EAChB,CAAC;AAED,QAAM,aAAgC;AAAA,IACpC,WAAW,SAAS,MAAM;AAAA,IAC1B,UAAU,MAAM,KAAK,QAAQ;AAAA,IAC7B,UAAU;AAAA,IACV,aAAa,EAAE,MAAM,SAAS;AAAA,IAC9B,aAAa,EAAE,MAAM,WAAW;AAAA,IAChC,kBAAkB;AAAA,EACpB;AAEA,SAAY,WAAW,SAAS;AAAA,IAC9B;AAAA,IACA,MAAM;AAAA,EACR,CAAC;AACH;","names":["attachMeta","getDefinition","getMeta","make","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","link","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","record","import_effect","import_effect","import_effect","import_effect","import_mutative","import_effect","import_mutative","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","SchemaAST","import_effect","import_effect","import_effect","import_effect","SchemaAST","import_effect","staticIr","selectorId","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","nowMs","import_effect","import_effect","import_effect","import_effect","import_effect","module","compile","make","getDefinition","getMeta","attachMeta","module","build","compile"]}
|