@logixjs/core 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/dist/{Bound-BN1DQ_lM.d.ts → Bound-CEa1ihvH.d.ts} +2 -2
- package/dist/{Bound-BPIfH9SS.d.cts → Bound-CNLNkC7c.d.cts} +2 -2
- package/dist/Bound.cjs +620 -163
- package/dist/Bound.cjs.map +1 -1
- package/dist/Bound.d.cts +3 -3
- package/dist/Bound.d.ts +3 -3
- package/dist/Bound.js +14 -14
- package/dist/{Debug-Bq8Sqjcr.d.cts → Debug-BhMYr-1i.d.cts} +3 -3
- package/dist/{Debug-B5q5Bkzx.d.ts → Debug-ByM7m4Ft.d.ts} +3 -3
- package/dist/Debug.cjs +553 -32
- package/dist/Debug.cjs.map +1 -1
- package/dist/Debug.d.cts +10 -8
- package/dist/Debug.d.ts +10 -8
- package/dist/Debug.js +12 -10
- package/dist/EffectOp.cjs.map +1 -1
- package/dist/EffectOp.js +2 -3
- package/dist/EffectOp.js.map +1 -1
- package/dist/Env.cjs +664 -6
- package/dist/Env.cjs.map +1 -1
- package/dist/Env.js +5 -2
- package/dist/ExternalStore-BAz83PVq.d.cts +60 -0
- package/dist/ExternalStore-BYWPbYs8.d.ts +60 -0
- package/dist/ExternalStore.cjs +746 -0
- package/dist/ExternalStore.cjs.map +1 -0
- package/dist/ExternalStore.d.cts +4 -0
- package/dist/ExternalStore.d.ts +4 -0
- package/dist/ExternalStore.js +19 -0
- package/dist/ExternalStore.js.map +1 -0
- package/dist/{Flow-1fZT8MpX.d.cts → Flow-BlSoMmhV.d.cts} +2 -2
- package/dist/{Flow-BhpjE22E.d.ts → Flow-CQSGve5c.d.ts} +2 -2
- package/dist/Flow.cjs +2 -2
- package/dist/Flow.cjs.map +1 -1
- package/dist/Flow.d.cts +4 -4
- package/dist/Flow.d.ts +4 -4
- package/dist/Flow.js +7 -8
- package/dist/{Handle-D_cLW1Z3.d.ts → Handle-B7PSmsrY.d.ts} +1 -1
- package/dist/{Handle-D8D1zPb_.d.cts → Handle-ByovhL-c.d.cts} +1 -1
- package/dist/Handle.d.cts +3 -3
- package/dist/Handle.d.ts +3 -3
- package/dist/{Kernel-CnGE1Fyk.d.ts → Kernel-DGSpS4GM.d.ts} +2 -2
- package/dist/{Kernel-8kC-jOda.d.cts → Kernel-DZAk-Mrn.d.cts} +2 -2
- package/dist/Kernel.cjs +680 -22
- package/dist/Kernel.cjs.map +1 -1
- package/dist/Kernel.d.cts +10 -8
- package/dist/Kernel.d.ts +10 -8
- package/dist/Kernel.js +7 -4
- package/dist/{Link-Db7975nU.d.ts → Link-Cm4eR9n0.d.ts} +10 -3
- package/dist/{Link-fX8x1eCK.d.cts → Link-DF8i8iWR.d.cts} +10 -3
- package/dist/Link.cjs +1128 -86
- package/dist/Link.cjs.map +1 -1
- package/dist/Link.d.cts +3 -3
- package/dist/Link.d.ts +3 -3
- package/dist/Link.js +30 -25
- package/dist/{Logic-DRh4sDZj.d.cts → Logic-BcQA0AvE.d.cts} +1 -1
- package/dist/{Logic-BRjEMr-W.d.ts → Logic-OotSE1xw.d.ts} +1 -1
- package/dist/Logic.d.cts +3 -3
- package/dist/Logic.d.ts +3 -3
- package/dist/{MatchBuilder-CJk5oCkR.d.cts → MatchBuilder-BNDJ8waF.d.ts} +1 -1
- package/dist/{MatchBuilder-0QOc-nlU.d.ts → MatchBuilder-CvZ5WY1B.d.cts} +1 -1
- package/dist/MatchBuilder.d.cts +4 -4
- package/dist/MatchBuilder.d.ts +4 -4
- package/dist/Middleware-D8tUDLv_.d.cts +100 -0
- package/dist/Middleware-DS7CbTTN.d.ts +100 -0
- package/dist/Middleware.cjs +461 -13
- package/dist/Middleware.cjs.map +1 -1
- package/dist/Middleware.d.cts +2 -86
- package/dist/Middleware.d.ts +2 -86
- package/dist/Middleware.js +13 -11
- package/dist/{Module-DnzluX2J.d.ts → Module-CFj0I2yE.d.ts} +45 -18
- package/dist/{Module-B_0xRDMR.d.cts → Module-DpXPW9EQ.d.cts} +45 -18
- package/dist/Module.cjs +8583 -5741
- package/dist/Module.cjs.map +1 -1
- package/dist/Module.d.cts +5 -4
- package/dist/Module.d.ts +5 -4
- package/dist/Module.js +32 -27
- package/dist/ModuleTag-BcVF6z7B.d.ts +113 -0
- package/dist/ModuleTag-DuZXo_NS.d.cts +113 -0
- package/dist/ModuleTag.cjs +2609 -1232
- package/dist/ModuleTag.cjs.map +1 -1
- package/dist/ModuleTag.d.cts +4 -4
- package/dist/ModuleTag.d.ts +4 -4
- package/dist/ModuleTag.js +28 -25
- package/dist/{Observability-cY4kLn0S.d.ts → Observability-D-ZWeEVb.d.ts} +22 -15
- package/dist/{Observability-COqEvp2C.d.cts → Observability-V7sRMYTh.d.cts} +22 -15
- package/dist/Observability.cjs +1938 -640
- package/dist/Observability.cjs.map +1 -1
- package/dist/Observability.d.cts +4 -4
- package/dist/Observability.d.ts +4 -4
- package/dist/Observability.js +21 -19
- package/dist/{Process-mL8fHDSB.d.cts → Process-B55aJMFk.d.cts} +29 -4
- package/dist/{Process-CM9xbMdP.d.ts → Process-DvhFEwUS.d.ts} +29 -4
- package/dist/Process.cjs +1122 -85
- package/dist/Process.cjs.map +1 -1
- package/dist/Process.d.cts +4 -3
- package/dist/Process.d.ts +4 -3
- package/dist/Process.js +27 -22
- package/dist/{ReadQuery-BlMwhe-F.d.ts → ReadQuery-C4vZ8Prc.d.ts} +2 -2
- package/dist/{ReadQuery-SinbStGF.d.ts → ReadQuery-CafjlJQo.d.cts} +1 -1
- package/dist/{ReadQuery-SinbStGF.d.cts → ReadQuery-CafjlJQo.d.ts} +1 -1
- package/dist/{ReadQuery-CL5XlXts.d.cts → ReadQuery-mc0NgrFV.d.cts} +2 -2
- package/dist/ReadQuery.cjs +3 -3
- package/dist/ReadQuery.cjs.map +1 -1
- package/dist/ReadQuery.d.cts +2 -2
- package/dist/ReadQuery.d.ts +2 -2
- package/dist/ReadQuery.js +3 -3
- package/dist/{Reflection-CQnKwPXj.d.ts → Reflection-C8xZ267q.d.ts} +88 -7
- package/dist/{Reflection-Kabo1mlU.d.cts → Reflection-DP7Vsv3f.d.cts} +88 -7
- package/dist/Reflection.cjs +2934 -1553
- package/dist/Reflection.cjs.map +1 -1
- package/dist/Reflection.d.cts +14 -11
- package/dist/Reflection.d.ts +14 -11
- package/dist/Reflection.js +26 -21
- package/dist/Resource.cjs +670 -12
- package/dist/Resource.cjs.map +1 -1
- package/dist/Resource.js +6 -3
- package/dist/Root.cjs +675 -17
- package/dist/Root.cjs.map +1 -1
- package/dist/Root.js +7 -3
- package/dist/{Runtime-CtyzZG4i.d.ts → Runtime-BWc9YfUB.d.ts} +37 -7
- package/dist/{Runtime-B-aL-f29.d.cts → Runtime-PShIC4DW.d.cts} +37 -7
- package/dist/Runtime.cjs +1899 -809
- package/dist/Runtime.cjs.map +1 -1
- package/dist/Runtime.d.cts +14 -11
- package/dist/Runtime.d.ts +14 -11
- package/dist/Runtime.js +33 -28
- package/dist/ScopeRegistry.cjs +668 -10
- package/dist/ScopeRegistry.cjs.map +1 -1
- package/dist/ScopeRegistry.js +6 -3
- package/dist/{StateTrait-OWhbj12c.d.cts → StateTrait-CQsDlXJm.d.cts} +23 -6
- package/dist/{StateTrait-BGsZghTz.d.ts → StateTrait-YvJzVDKl.d.ts} +23 -6
- package/dist/StateTrait.cjs +1475 -370
- package/dist/StateTrait.cjs.map +1 -1
- package/dist/StateTrait.d.cts +7 -5
- package/dist/StateTrait.d.ts +7 -5
- package/dist/StateTrait.js +17 -14
- package/dist/{TraitLifecycle-LdIWmKlg.d.ts → TraitLifecycle-CjIBICAA.d.ts} +2 -2
- package/dist/{TraitLifecycle-CwV5WPFX.d.cts → TraitLifecycle-NmqGiXPC.d.cts} +2 -2
- package/dist/TraitLifecycle.cjs +489 -32
- package/dist/TraitLifecycle.cjs.map +1 -1
- package/dist/TraitLifecycle.d.cts +4 -4
- package/dist/TraitLifecycle.d.ts +4 -4
- package/dist/TraitLifecycle.js +7 -7
- package/dist/Workflow-BlFG_20_.d.cts +414 -0
- package/dist/Workflow-CW9S_aAP.d.ts +414 -0
- package/dist/Workflow.cjs +2977 -0
- package/dist/Workflow.cjs.map +1 -0
- package/dist/Workflow.d.cts +7 -0
- package/dist/Workflow.d.ts +7 -0
- package/dist/Workflow.js +55 -0
- package/dist/Workflow.js.map +1 -0
- package/dist/{chunk-G5ZBFPNU.js → chunk-2A4UKO2D.js} +2 -2
- package/dist/chunk-2DE6D42I.js +248 -0
- package/dist/chunk-2DE6D42I.js.map +1 -0
- package/dist/{chunk-ANLBCBDC.js → chunk-2DVLMSOE.js} +6 -6
- package/dist/{chunk-BE3HW4FY.js → chunk-34CF6OGE.js} +14 -16
- package/dist/chunk-34CF6OGE.js.map +1 -0
- package/dist/{chunk-ZFY7U2FR.js → chunk-3LPIXG56.js} +43 -3
- package/dist/chunk-3LPIXG56.js.map +1 -0
- package/dist/chunk-3VZYDNXZ.js +10 -0
- package/dist/chunk-3VZYDNXZ.js.map +1 -0
- package/dist/{chunk-3TMODYZV.js → chunk-3XO4HR6V.js} +2 -2
- package/dist/chunk-46FGVWRF.js +817 -0
- package/dist/chunk-46FGVWRF.js.map +1 -0
- package/dist/chunk-4LODUXFI.js +288 -0
- package/dist/chunk-4LODUXFI.js.map +1 -0
- package/dist/{chunk-GMPEOUP2.js → chunk-4MZ7BT3R.js} +2 -2
- package/dist/chunk-4MZ7BT3R.js.map +1 -0
- package/dist/{chunk-TKZ7MEIA.js → chunk-53GVPGSM.js} +2 -2
- package/dist/{chunk-KP7MUZNX.js → chunk-5W2V2NVJ.js} +2 -2
- package/dist/chunk-5W2V2NVJ.js.map +1 -0
- package/dist/chunk-6DACKW3D.js +613 -0
- package/dist/chunk-6DACKW3D.js.map +1 -0
- package/dist/chunk-AQ7L2QZ5.js +1395 -0
- package/dist/chunk-AQ7L2QZ5.js.map +1 -0
- package/dist/{chunk-NZJKFF45.js → chunk-C2UZZQ76.js} +2 -2
- package/dist/chunk-CCKP5Z6F.js +701 -0
- package/dist/chunk-CCKP5Z6F.js.map +1 -0
- package/dist/chunk-CUKM2XUW.js +27 -0
- package/dist/{chunk-QCHIQWAJ.js.map → chunk-CUKM2XUW.js.map} +1 -1
- package/dist/{chunk-M2RGJPXX.js → chunk-DBD6Q6JH.js} +3 -3
- package/dist/{chunk-ZGDVUPTM.js → chunk-EB4RGQO3.js} +2 -2
- package/dist/{chunk-PAYXCY6A.js → chunk-G7ESIQTI.js} +12 -14
- package/dist/chunk-G7ESIQTI.js.map +1 -0
- package/dist/chunk-GPBAZQ23.js +348 -0
- package/dist/chunk-GPBAZQ23.js.map +1 -0
- package/dist/{chunk-OFADUJWJ.js → chunk-I4LCE5OY.js} +3 -5
- package/dist/{chunk-OFADUJWJ.js.map → chunk-I4LCE5OY.js.map} +1 -1
- package/dist/{chunk-DFNM3WX2.js → chunk-IMCC6TBN.js} +158 -39
- package/dist/chunk-IMCC6TBN.js.map +1 -0
- package/dist/{chunk-76WT3HOR.js → chunk-IROZNQAF.js} +22 -21
- package/dist/chunk-IROZNQAF.js.map +1 -0
- package/dist/{chunk-TAAPQVZN.js → chunk-ISKNULNH.js} +2 -2
- package/dist/chunk-J3CWXIPV.js +242 -0
- package/dist/chunk-J3CWXIPV.js.map +1 -0
- package/dist/{chunk-PYOE4VSI.js → chunk-JBKYRTCS.js} +224 -161
- package/dist/chunk-JBKYRTCS.js.map +1 -0
- package/dist/{chunk-66ALHVEX.js → chunk-KKIAYH4X.js} +3 -3
- package/dist/{chunk-3RMKLXHX.js → chunk-KLDVG3SY.js} +2 -2
- package/dist/{chunk-BABLDP24.js → chunk-KSZQYSEH.js} +3 -3
- package/dist/chunk-KSZQYSEH.js.map +1 -0
- package/dist/{chunk-CW6T36TN.js → chunk-M3M7JFAH.js} +4 -4
- package/dist/chunk-M3M7JFAH.js.map +1 -0
- package/dist/{chunk-THATMZXD.js → chunk-MLB253V2.js} +2 -2
- package/dist/{chunk-THATMZXD.js.map → chunk-MLB253V2.js.map} +1 -1
- package/dist/{chunk-JGIWG6SR.js → chunk-MS77U77X.js} +664 -550
- package/dist/chunk-MS77U77X.js.map +1 -0
- package/dist/chunk-MW4FA3MW.js +23 -0
- package/dist/chunk-MW4FA3MW.js.map +1 -0
- package/dist/chunk-MYKNINNN.js +228 -0
- package/dist/chunk-MYKNINNN.js.map +1 -0
- package/dist/{chunk-4CQAV7YB.js → chunk-O6TTQXTY.js} +2 -2
- package/dist/{chunk-NBD3KUOZ.js → chunk-OJDJ4VDQ.js} +35 -24
- package/dist/chunk-OJDJ4VDQ.js.map +1 -0
- package/dist/{chunk-NQZ2OSGR.js → chunk-PVZEMNJY.js} +9 -9
- package/dist/chunk-PVZEMNJY.js.map +1 -0
- package/dist/chunk-RN26DV2M.js +271 -0
- package/dist/chunk-RN26DV2M.js.map +1 -0
- package/dist/{chunk-JCXGZRMU.js → chunk-RQQW3IQC.js} +3 -3
- package/dist/chunk-RQQW3IQC.js.map +1 -0
- package/dist/{chunk-24VULZ7A.js → chunk-TKOGZDD6.js} +3 -3
- package/dist/{chunk-EGK3KN7B.js → chunk-TQYLVXGY.js} +70 -39
- package/dist/chunk-TQYLVXGY.js.map +1 -0
- package/dist/{chunk-QMM6O4CD.js → chunk-UACD2CL2.js} +15 -3
- package/dist/{chunk-QMM6O4CD.js.map → chunk-UACD2CL2.js.map} +1 -1
- package/dist/{chunk-M3WTHJHJ.js → chunk-VH575UTV.js} +30 -34
- package/dist/chunk-VH575UTV.js.map +1 -0
- package/dist/{chunk-AUIR5O6W.js → chunk-WWBMC24F.js} +9 -15
- package/dist/chunk-WWBMC24F.js.map +1 -0
- package/dist/{chunk-JWOYLO27.js → chunk-WYJUJV4L.js} +80 -7
- package/dist/chunk-WYJUJV4L.js.map +1 -0
- package/dist/{chunk-EY4NZKDR.js → chunk-XFMMPYNU.js} +2 -2
- package/dist/chunk-Y4VRBIS6.js +35 -0
- package/dist/chunk-Y4VRBIS6.js.map +1 -0
- package/dist/{chunk-DMBALCE2.js → chunk-ZC7MSQ5U.js} +77 -4
- package/dist/chunk-ZC7MSQ5U.js.map +1 -0
- package/dist/{chunk-OGWBVHB3.js → chunk-ZCK6SCOE.js} +67 -8
- package/dist/chunk-ZCK6SCOE.js.map +1 -0
- package/dist/{chunk-IHVBV5C2.js → chunk-ZTFTABXV.js} +2 -1
- package/dist/chunk-ZTFTABXV.js.map +1 -0
- package/dist/index.cjs +9532 -5017
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +163 -27
- package/dist/index.d.ts +163 -27
- package/dist/index.js +119 -56
- package/dist/index.js.map +1 -1
- package/dist/{ir-BMP7yxJJ.d.cts → ir-C-Zm_GlZ.d.cts} +1 -1
- package/dist/{ir-DUOz6H-5.d.ts → ir-DGyGiwVe.d.ts} +1 -1
- package/dist/{module-k7m3txak.d.ts → module-DqQ1U-Me.d.ts} +129 -100
- package/dist/{module-B8CBqIZ_.d.cts → module-doenaCsZ.d.cts} +129 -100
- package/package.json +12 -1
- package/dist/ModuleTag-C8FHY_sY.d.ts +0 -93
- package/dist/ModuleTag-EGbgBMpZ.d.cts +0 -93
- package/dist/chunk-3QMIVH35.js +0 -43
- package/dist/chunk-3QMIVH35.js.map +0 -1
- package/dist/chunk-76WT3HOR.js.map +0 -1
- package/dist/chunk-AUIR5O6W.js.map +0 -1
- package/dist/chunk-BABLDP24.js.map +0 -1
- package/dist/chunk-BE3HW4FY.js.map +0 -1
- package/dist/chunk-CW6T36TN.js.map +0 -1
- package/dist/chunk-DFNM3WX2.js.map +0 -1
- package/dist/chunk-DMBALCE2.js.map +0 -1
- package/dist/chunk-EGK3KN7B.js.map +0 -1
- package/dist/chunk-GMPEOUP2.js.map +0 -1
- package/dist/chunk-IHVBV5C2.js.map +0 -1
- package/dist/chunk-JCXGZRMU.js.map +0 -1
- package/dist/chunk-JGIWG6SR.js.map +0 -1
- package/dist/chunk-JWOYLO27.js.map +0 -1
- package/dist/chunk-KIXAU3GM.js +0 -137
- package/dist/chunk-KIXAU3GM.js.map +0 -1
- package/dist/chunk-KP7MUZNX.js.map +0 -1
- package/dist/chunk-M3WTHJHJ.js.map +0 -1
- package/dist/chunk-M7IYCTJV.js +0 -79
- package/dist/chunk-M7IYCTJV.js.map +0 -1
- package/dist/chunk-NBD3KUOZ.js.map +0 -1
- package/dist/chunk-NQZ2OSGR.js.map +0 -1
- package/dist/chunk-OGWBVHB3.js.map +0 -1
- package/dist/chunk-PAYXCY6A.js.map +0 -1
- package/dist/chunk-PYOE4VSI.js.map +0 -1
- package/dist/chunk-QCHIQWAJ.js +0 -21
- package/dist/chunk-VZB726PE.js +0 -93
- package/dist/chunk-VZB726PE.js.map +0 -1
- package/dist/chunk-W3TEWHLO.js +0 -568
- package/dist/chunk-W3TEWHLO.js.map +0 -1
- package/dist/chunk-ZFLHVFUC.js +0 -192
- package/dist/chunk-ZFLHVFUC.js.map +0 -1
- package/dist/chunk-ZFY7U2FR.js.map +0 -1
- /package/dist/{chunk-G5ZBFPNU.js.map → chunk-2A4UKO2D.js.map} +0 -0
- /package/dist/{chunk-ANLBCBDC.js.map → chunk-2DVLMSOE.js.map} +0 -0
- /package/dist/{chunk-3TMODYZV.js.map → chunk-3XO4HR6V.js.map} +0 -0
- /package/dist/{chunk-TKZ7MEIA.js.map → chunk-53GVPGSM.js.map} +0 -0
- /package/dist/{chunk-NZJKFF45.js.map → chunk-C2UZZQ76.js.map} +0 -0
- /package/dist/{chunk-M2RGJPXX.js.map → chunk-DBD6Q6JH.js.map} +0 -0
- /package/dist/{chunk-ZGDVUPTM.js.map → chunk-EB4RGQO3.js.map} +0 -0
- /package/dist/{chunk-TAAPQVZN.js.map → chunk-ISKNULNH.js.map} +0 -0
- /package/dist/{chunk-66ALHVEX.js.map → chunk-KKIAYH4X.js.map} +0 -0
- /package/dist/{chunk-3RMKLXHX.js.map → chunk-KLDVG3SY.js.map} +0 -0
- /package/dist/{chunk-4CQAV7YB.js.map → chunk-O6TTQXTY.js.map} +0 -0
- /package/dist/{chunk-24VULZ7A.js.map → chunk-TKOGZDD6.js.map} +0 -0
- /package/dist/{chunk-EY4NZKDR.js.map → chunk-XFMMPYNU.js.map} +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/TraitLifecycle.ts","../src/internal/trait-lifecycle/index.ts","../src/internal/runtime/core/TaskRunner.ts","../src/internal/runtime/core/DebugSink.ts","../src/internal/observability/jsonValue.ts","../src/internal/runtime/core/errorSummary.ts","../src/internal/runtime/core/EffectOpCore.ts","../src/internal/runtime/core/env.ts","../src/internal/runtime/core/ReplayLog.ts","../src/internal/state-trait/rowid.ts","../src/internal/runtime/core/runtimeInternalsAccessor.ts"],"sourcesContent":["// TraitLifecycle: shared lower-level interface for Form/Query (@logixjs/core/TraitLifecycle, Phase 2 placeholder).\n//\n// - Exposes serializable FieldRef and unified request protocols.\n// - Concrete implementation lives in internal/trait-lifecycle/*; this module provides public API + type exports.\n\nimport type { BoundApi } from './Bound.js'\nimport { Effect } from 'effect'\nimport * as Internal from './internal/trait-lifecycle/index.js'\n\nexport type FieldRef = Internal.FieldRef\nexport type ValidateMode = Internal.ValidateRequest['mode']\nexport type ValidateRequest = Internal.ValidateRequest\nexport type ExecuteRequest = Internal.ExecuteRequest\nexport type CleanupRequest = Internal.CleanupRequest\nexport type SourceWiring = ReturnType<typeof Internal.makeSourceWiring>\n\nexport const Ref = Internal.Ref\n\nexport const scopedValidate = (bound: BoundApi<any, any>, request: ValidateRequest): Effect.Effect<void, never, any> =>\n Internal.scopedValidate(bound as any, request)\n\nexport const scopedExecute = (bound: BoundApi<any, any>, request: ExecuteRequest): Effect.Effect<void, never, any> =>\n Internal.scopedExecute(bound as any, request)\n\nexport const cleanup = (bound: BoundApi<any, any>, request: CleanupRequest): Effect.Effect<void, never, any> =>\n Internal.cleanup(bound as any, request)\n\nexport const makeSourceWiring = (bound: BoundApi<any, any>, module: unknown): SourceWiring =>\n Internal.makeSourceWiring(bound as any, module)\n\nexport const install = (bound: BoundApi<any, any>): Effect.Effect<void, never, any> => Internal.install(bound as any)\n","import { Effect, FiberRef, Option } from 'effect'\nimport type { BoundApi } from '../runtime/core/module.js'\nimport * as TaskRunner from '../runtime/core/TaskRunner.js'\nimport * as ReplayLog from '../runtime/core/ReplayLog.js'\nimport type { CleanupRequest, ExecuteRequest, FieldRef, ValidateRequest } from './model.js'\nimport type * as StateTraitValidate from '../state-trait/validate.js'\nimport * as RowId from '../state-trait/rowid.js'\nimport { getBoundInternals, getModuleTraitsProgram } from '../runtime/core/runtimeInternalsAccessor.js'\n\nexport type { CleanupRequest, ExecuteRequest, FieldRef, ValidateRequest }\n\n/**\n * Ref: builders for FieldRef (serializable and comparable).\n *\n * Notes:\n * - Phase 2 provides only minimal constructors.\n * - Array optimizations (RowID/trackBy) and richer ref semantics will land in later phases.\n */\nexport const Ref = {\n field: (path: string): FieldRef => ({ kind: 'field', path }),\n list: (path: string, listIndexPath?: ReadonlyArray<number>): FieldRef => ({\n kind: 'list',\n path,\n listIndexPath,\n }),\n item: (\n path: string,\n index: number,\n options?: { readonly listIndexPath?: ReadonlyArray<number>; readonly field?: string },\n ): FieldRef => ({\n kind: 'item',\n path,\n index,\n listIndexPath: options?.listIndexPath,\n field: options?.field,\n }),\n /**\n * fromValuePath:\n * - Parse a valuePath (e.g. \"items.0.warehouseId\") into a FieldRef.\n * - Supports a single index (required) and multi-level indices via listIndexPath (optional but recommended).\n *\n * Conventions:\n * - If numeric segments are present, return an item ref (the last index becomes item.index; preceding indices go into listIndexPath).\n * - If no numeric segments are present:\n * - \"items[]\" (pattern list root) returns a list ref.\n * - Otherwise return a field ref (including pattern fields like \"items[].x\").\n * - \"$root\" / empty string returns the root ref.\n */\n fromValuePath: (valuePath: string): FieldRef => {\n const raw = typeof valuePath === 'string' ? valuePath.trim() : ''\n if (!raw || raw === '$root') return Ref.root()\n\n const segments = raw.split('.').filter(Boolean)\n if (segments.length === 0) return Ref.root()\n\n const isIndex = (seg: string): boolean => /^[0-9]+$/.test(seg)\n const stripPattern = (seg: string): string => (seg.endsWith('[]') ? seg.slice(0, -2) : seg)\n\n const indexPositions: Array<{ readonly pos: number; readonly index: number }> = []\n for (let i = 0; i < segments.length; i++) {\n const seg = segments[i]!\n if (!isIndex(seg)) continue\n const index = Number(seg)\n if (!Number.isFinite(index) || index < 0) return Ref.field(raw)\n indexPositions.push({ pos: i, index })\n }\n\n // valuePath with indices => item ref\n if (indexPositions.length > 0) {\n const last = indexPositions[indexPositions.length - 1]!\n const listIndexPath = indexPositions.slice(0, -1).map((p) => p.index)\n\n const listPath = segments\n .slice(0, last.pos)\n .filter((seg) => !isIndex(seg))\n .map(stripPattern)\n .join('.')\n\n if (!listPath) return Ref.field(raw)\n\n const fieldRest = segments\n .slice(last.pos + 1)\n .map(stripPattern)\n .join('.')\n\n return Ref.item(listPath, last.index, {\n listIndexPath: listIndexPath.length > 0 ? listIndexPath : undefined,\n field: fieldRest ? fieldRest : undefined,\n })\n }\n\n // pattern list root => list ref\n if (segments.some((s) => s.includes('[]'))) {\n const last = segments[segments.length - 1]!\n if (last.endsWith('[]')) {\n return Ref.list(segments.map(stripPattern).join('.'))\n }\n }\n\n return Ref.field(raw)\n },\n root: (): FieldRef => ({ kind: 'root' }),\n} as const\n\n/**\n * scopedValidate (placeholder): in later phases this will compute the minimal set via ReverseClosure and write back into the error tree.\n */\nexport const scopedValidate = (bound: BoundApi<any, any>, request: ValidateRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n let internals: ReturnType<typeof getBoundInternals> | undefined\n try {\n internals = getBoundInternals(bound as any)\n } catch {\n return\n }\n\n const enqueue = internals.traits.enqueueStateTraitValidateRequest as unknown as\n | ((r: StateTraitValidate.ScopedValidateRequest) => void)\n | undefined\n\n if (!enqueue) return\n\n const toTarget = (ref: FieldRef): StateTraitValidate.ValidateTarget => {\n if (ref.kind === 'root') return { kind: 'root' }\n if (ref.kind === 'field') return { kind: 'field', path: ref.path }\n if (ref.kind === 'list') {\n return {\n kind: 'list',\n path: ref.path,\n ...(ref.listIndexPath ? { listIndexPath: ref.listIndexPath } : {}),\n }\n }\n return {\n kind: 'item',\n path: ref.path,\n index: ref.index,\n ...(ref.listIndexPath ? { listIndexPath: ref.listIndexPath } : {}),\n field: ref.field,\n }\n }\n\n const internal: StateTraitValidate.ScopedValidateRequest = {\n mode: request.mode as any,\n target: toTarget(request.target),\n }\n\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n enqueue(internal)\n return\n }\n\n yield* internals.txn.runWithStateTransaction({ kind: 'trait', name: 'scopedValidate', details: request }, () =>\n Effect.sync(() => {\n enqueue(internal)\n }),\n )\n })\n\n/**\n * scopedExecute (placeholder): a unified execution entrypoint for query/resource actions (refresh/invalidate, etc.).\n */\nexport const scopedExecute = (bound: BoundApi<any, any>, request: ExecuteRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n if (request.kind === 'source:refresh') {\n const toFieldPath = (ref: FieldRef): string | undefined => {\n if (ref.kind === 'root') return undefined\n if (ref.kind === 'field' || ref.kind === 'list') return ref.path\n const base = `${ref.path}[]`\n return ref.field ? `${base}.${ref.field}` : base\n }\n\n const fieldPath = toFieldPath(request.target)\n if (!fieldPath) {\n return\n }\n\n yield* bound.traits.source.refresh(fieldPath) as Effect.Effect<void, never, any>\n return\n }\n\n if (request.kind !== 'query:invalidate') {\n return\n }\n\n const replayLogOpt = yield* Effect.serviceOption(ReplayLog.ReplayLog)\n if (Option.isNone(replayLogOpt)) {\n return\n }\n\n let moduleId: string | undefined\n let instanceId: string | undefined\n try {\n const internals = getBoundInternals(bound as any)\n moduleId = internals.moduleId\n instanceId = internals.instanceId\n } catch {\n moduleId = undefined\n instanceId = undefined\n }\n\n yield* replayLogOpt.value.record({\n _tag: 'InvalidateRequest',\n timestamp: Date.now(),\n moduleId,\n instanceId,\n kind: 'query',\n target: 'query',\n meta: request.request,\n })\n })\n\n/**\n * cleanup (placeholder): deterministic cleanup under structural changes (errors/ui/resources).\n */\nexport const cleanup = (bound: BoundApi<any, any>, request: CleanupRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const apply = () =>\n bound.state.mutate((draft) => {\n const clearAt = (root: 'errors' | 'ui', path: string): void => {\n if (!path) return\n RowId.unsetAtPathMutating(draft, `${root}.${path}`)\n }\n\n if (request.kind === 'field:unregister') {\n const target = request.target\n if (target.kind !== 'field') return\n clearAt('errors', target.path)\n clearAt('ui', target.path)\n return\n }\n\n if (request.kind === 'list:item:remove') {\n const target = request.target\n if (target.kind !== 'item') return\n const base = `${target.path}.${target.index}`\n const path = target.field ? `${base}.${target.field}` : base\n clearAt('errors', path)\n clearAt('ui', path)\n return\n }\n\n if (request.kind === 'list:reorder') {\n // Reorder does not change the external index semantics by itself.\n // Alignment of errors/ui should be handled by domain reducers or higher-level logic; keep it a no-op here.\n return\n }\n })\n\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n return yield* apply()\n }\n\n let runWithTxn:\n | ((\n origin: { readonly kind: string; readonly name?: string; readonly details?: unknown },\n body: () => Effect.Effect<void, never, any>,\n ) => Effect.Effect<void, never, any>)\n | undefined\n\n try {\n const internals = getBoundInternals(bound as any)\n runWithTxn = (origin, body) => internals.txn.runWithStateTransaction(origin as any, body)\n } catch {\n runWithTxn = undefined\n }\n\n if (!runWithTxn) {\n return yield* apply()\n }\n\n return yield* runWithTxn({ kind: 'trait', name: 'cleanup', details: request }, apply)\n })\n\ntype SourceWiring = {\n readonly setup: Effect.Effect<void, never, any>\n readonly refreshOnKeyChange: (changedPath: string) => Effect.Effect<void, never, any>\n}\n\nconst isAuxRootPath = (path: string): boolean =>\n path === 'errors' || path === 'ui' || path.startsWith('errors.') || path.startsWith('ui.')\n\nconst toPatternPath = (path: string): string => {\n if (!path) return path\n const segments = path.split('.').filter(Boolean)\n return segments\n .map((seg) => (/^[0-9]+$/.test(seg) ? '[]' : seg))\n .join('.')\n .replace(/\\.\\[\\]/g, '[]')\n}\n\nconst isDepAffectedByChange = (dep: string, changed: string): boolean => {\n if (!dep || !changed) return false\n if (dep === changed) return true\n if (changed.startsWith(`${dep}.`)) return true\n if (changed.startsWith(`${dep}[]`)) return true\n // list root structural change (e.g. changed=\"items\") should affect any list-item deps (e.g. dep=\"items[].x\").\n if (dep.startsWith(`${changed}[]`)) return true\n return false\n}\n\nexport const makeSourceWiring = (bound: BoundApi<any, any>, module: unknown): SourceWiring => {\n const program = getModuleTraitsProgram(module as any) as { readonly entries?: ReadonlyArray<any> } | undefined\n\n const sources = program?.entries?.filter((e) => e && e.kind === 'source') ?? []\n\n const sourceOnMount = sources.filter((e: any) =>\n Array.isArray(e?.meta?.triggers) ? e.meta.triggers.includes('onMount') : false,\n )\n\n const sourceOnKeyChange = sources.filter((e: any) =>\n Array.isArray(e?.meta?.triggers) ? e.meta.triggers.includes('onKeyChange') : false,\n )\n\n const setup = Effect.sync(() => {\n if (sourceOnMount.length === 0) return\n bound.lifecycle.onStart(\n Effect.forEach(sourceOnMount, (entry: any) => bound.traits.source.refresh(entry.fieldPath), {\n concurrency: 'unbounded',\n }).pipe(Effect.asVoid),\n )\n })\n\n const refreshOnKeyChange = (changedPath: string): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n if (!changedPath || isAuxRootPath(changedPath)) return\n if (sourceOnKeyChange.length === 0) return\n\n const changedPattern = toPatternPath(changedPath)\n\n yield* Effect.forEach(\n sourceOnKeyChange,\n (entry: any) => {\n const deps = (entry?.meta?.deps ?? []) as ReadonlyArray<string>\n const affected = deps.some((dep) => isDepAffectedByChange(dep, changedPattern))\n if (!affected) return Effect.void\n return bound.traits.source.refresh(entry.fieldPath)\n },\n { concurrency: 'unbounded' },\n )\n }).pipe(Effect.asVoid)\n\n return { setup, refreshOnKeyChange }\n}\n\n/**\n * install (placeholder): the default wiring entrypoint for TraitLifecycle.\n *\n * Notes:\n * - Phase 2 only provides an entrypoint that feature packages can depend on.\n * - Concrete wiring for \"domain event → request → in-transaction execution\" will be composed by Form/Query default logics in later phases.\n */\nexport const install = (_bound: BoundApi<any, any>): Effect.Effect<void, never, any> => Effect.void\n","import { Cause, Effect, Fiber, FiberRef, Ref, Stream } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { isDevEnv } from './env.js'\nimport type * as Logic from './LogicMiddleware.js'\nimport type { AnyModuleShape } from './module.js'\nimport type { RuntimeInternalsResolvedConcurrencyPolicy } from './RuntimeInternals.js'\nimport type { StateTxnOrigin } from './StateTransaction.js'\n\n/**\n * Prevents calling run*Task inside a \"synchronous transaction execution fiber\" (it would deadlock the txnQueue).\n *\n * - ModuleRuntime locally marks it as true while executing each transaction (dispatch/source-refresh/devtools/...).\n * - run*Task checks the flag on start: when true, it emits diagnostics only in dev/test and then no-ops.\n */\nexport const inSyncTransactionFiber = FiberRef.unsafeMake(false)\n\n/**\n * Force source.refresh:\n * - Default: when snapshot keyHash is unchanged and a non-idle snapshot already exists, refresh SHOULD be a no-op\n * (avoid redundant IO/writeback).\n * - Exception: explicit refresh (manual refresh) / invalidation-driven refresh needs to \"re-fetch even with the same keyHash\".\n *\n * Note: use a FiberRef to locally pass \"whether this refresh is forced\", avoiding expanding the source refresh handler signature.\n */\nexport const forceSourceRefresh = FiberRef.unsafeMake(false)\n\n/**\n * Synchronous transaction window (process-level) marker:\n * - Used as a hard guard in \"non-Effect API\" entry points (e.g. Promise/async functions).\n * - FiberRef cannot reliably read the \"current fiber\" in such entry points, so we need a synchronous callstack-level marker.\n *\n * Note: if a transaction body incorrectly crosses async boundaries, this marker will be held longer; that is a severe violation.\n */\nlet inSyncTransactionGlobalDepth = 0\n\nexport const enterSyncTransaction = (): void => {\n inSyncTransactionGlobalDepth += 1\n}\n\nexport const exitSyncTransaction = (): void => {\n inSyncTransactionGlobalDepth = Math.max(0, inSyncTransactionGlobalDepth - 1)\n}\n\nexport const isInSyncTransaction = (): boolean => inSyncTransactionGlobalDepth > 0\n\nexport type TaskRunnerMode =\n | 'task' // sequential\n | 'parallel'\n | 'latest'\n | 'exhaust'\n\nexport type TaskStatus = 'idle' | 'pending' | 'running' | 'success' | 'failure' | 'interrupted'\n\nexport interface TaskExecution {\n readonly taskId: number\n readonly status: TaskStatus\n readonly acceptedAt: number\n readonly startedAt?: number\n readonly endedAt?: number\n}\n\nexport interface TaskRunnerOrigins {\n readonly pending?: StateTxnOrigin\n readonly success?: StateTxnOrigin\n readonly failure?: StateTxnOrigin\n}\n\ntype TaskHandler<Payload, Sh extends AnyModuleShape, R> =\n | Logic.Of<Sh, R, void, never>\n | ((payload: Payload) => Logic.Of<Sh, R, void, never>)\n\ntype TaskEffect<Payload, Sh extends AnyModuleShape, R, A, E> =\n | Logic.Of<Sh, R, A, E>\n | ((payload: Payload) => Logic.Of<Sh, R, A, E>)\n\nexport interface TaskRunnerConfig<Payload, Sh extends AnyModuleShape, R, A = void, E = never> {\n /**\n * Optional: trigger source name (e.g. actionTag / fieldPath), used as the default pending origin.name.\n * - BoundApiRuntime may fill this in for onAction(\"xxx\") / traits.source.refresh(\"field\"), etc.\n * - Other callers are not required to provide it.\n */\n readonly triggerName?: string\n\n /**\n * pending: synchronous state writes (loading=true / clearing errors, etc.), always a separate transaction entry.\n * - Only executed for tasks that are accepted and actually started (ignored triggers in runExhaustTask do not run pending).\n */\n readonly pending?: TaskHandler<Payload, Sh, R>\n\n /**\n * effect: real IO / async work (must run outside the transaction window).\n */\n readonly effect: TaskEffect<Payload, Sh, R, A, E>\n\n /**\n * success: success writeback (separate transaction entry).\n */\n readonly success?: (result: A, payload: Payload) => Logic.Of<Sh, R, void, never>\n\n /**\n * failure: failure writeback (separate transaction entry).\n *\n * Note: takes a Cause to preserve defect/interrupt semantics; interrupts do not trigger failure writeback by default.\n */\n readonly failure?: (cause: Cause.Cause<E>, payload: Payload) => Logic.Of<Sh, R, void, never>\n\n /**\n * origin: optional override for the three transaction origins.\n * - Default: pending.kind=\"task:pending\"; success/failure.kind=\"service-callback\".\n */\n readonly origin?: TaskRunnerOrigins\n\n /**\n * priority: reserved for future debugging/sorting; does not change transaction boundaries or concurrency semantics.\n */\n readonly priority?: number\n}\n\nexport interface TaskRunnerRuntime {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runWithStateTransaction: (\n origin: StateTxnOrigin,\n body: () => Effect.Effect<void, never, any>,\n ) => Effect.Effect<void, never, any>\n readonly resolveConcurrencyPolicy?: () => Effect.Effect<RuntimeInternalsResolvedConcurrencyPolicy, never, any>\n}\n\nconst resolve = <Payload, Sh extends AnyModuleShape, R, A, E>(\n eff: TaskEffect<Payload, Sh, R, A, E> | TaskHandler<Payload, Sh, R>,\n payload: Payload,\n): any => (typeof eff === 'function' ? (eff as any)(payload) : eff)\n\nconst defaultOrigins = (triggerName: string | undefined): Required<TaskRunnerOrigins> => ({\n pending: {\n kind: 'task:pending',\n name: triggerName,\n },\n success: {\n kind: 'service-callback',\n name: 'task:success',\n },\n failure: {\n kind: 'service-callback',\n name: 'task:failure',\n },\n})\n\nexport const shouldNoopInSyncTransactionFiber = (options: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n}): Effect.Effect<boolean> =>\n Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(inSyncTransactionFiber)\n if (!inTxn) {\n return false\n }\n // Always no-op regardless of env (otherwise we may deadlock); diagnostics are emitted only in dev/test.\n if (isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: options.moduleId,\n instanceId: options.instanceId,\n code: options.code,\n severity: options.severity,\n message: options.message,\n hint: options.hint,\n actionTag: options.actionTag,\n kind: options.kind,\n })\n }\n return true\n })\n\nconst resolveConcurrencyLimit = (runtime: TaskRunnerRuntime): Effect.Effect<number | 'unbounded', never, any> =>\n runtime.resolveConcurrencyPolicy\n ? runtime.resolveConcurrencyPolicy().pipe(Effect.map((p) => p.concurrencyLimit))\n : Effect.succeed(16)\n\nconst runTaskLifecycle = <Payload, Sh extends AnyModuleShape, R, A, E>(\n payload: Payload,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n getCanWriteBack?: Effect.Effect<boolean>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> =>\n Effect.gen(function* () {\n const noop = yield* shouldNoopInSyncTransactionFiber({\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'logic::invalid_usage',\n severity: 'error',\n message: 'run*Task is not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint:\n 'Call run*Task from the run section of a watcher (e.g. $.onAction/$.onState/$.on); ' +\n 'do not call it directly inside a reducer / trait.run / synchronous transaction body. For long-lived flows, use a multi-entry pattern (pending → IO → writeback).',\n kind: 'run_task_in_transaction',\n })\n if (noop) {\n return\n }\n\n const defaults = defaultOrigins(config.triggerName)\n const origins: Required<TaskRunnerOrigins> = {\n pending: config.origin?.pending ?? defaults.pending,\n success: config.origin?.success ?? defaults.success,\n failure: config.origin?.failure ?? defaults.failure,\n }\n\n // 1) pending: separate transaction entry; once started it should not be interrupted by runLatest.\n const pending = config.pending\n if (pending) {\n yield* Effect.uninterruptible(\n runtime.runWithStateTransaction(origins.pending, () => Effect.asVoid(resolve(pending, payload))),\n )\n }\n\n // 2) IO: runs outside the transaction window.\n const io = resolve(config.effect, payload) as Effect.Effect<A, E, Logic.Env<Sh, R>>\n const exit = yield* Effect.exit(io)\n\n // 3) writeback: use the guard to confirm it's still the current task (runLatestTask).\n if (getCanWriteBack) {\n const ok = yield* getCanWriteBack\n if (!ok) {\n return\n }\n }\n\n if (exit._tag === 'Success') {\n const success = config.success\n if (success) {\n yield* runtime.runWithStateTransaction(origins.success, () => Effect.asVoid(success(exit.value, payload)))\n }\n return\n }\n\n // Failure: interruptions do not trigger failure writeback (e.g. runLatestTask cancellation, Scope ending).\n const cause = exit.cause as Cause.Cause<E>\n if (Cause.isInterrupted(cause)) {\n return\n }\n\n const failure = config.failure\n if (failure) {\n yield* runtime.runWithStateTransaction(origins.failure, () => Effect.asVoid(failure(cause, payload)))\n }\n }).pipe(\n // Watchers must not crash as a whole due to a single task failure: swallow errors, but keep them diagnosable.\n Effect.catchAllCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'task_runner::unhandled_failure',\n severity: 'error',\n message: 'TaskRunner encountered an unhandled failure (pending/IO/writeback).',\n hint: 'Add a failure writeback for this task or handle errors explicitly upstream; avoid fire-and-forget swallowing errors.',\n actionTag: config.triggerName,\n kind: 'task_runner_unhandled_failure',\n trigger: {\n kind: 'task',\n name: config.triggerName,\n },\n }).pipe(Effect.zipRight(Effect.logError('TaskRunner error', cause))),\n ),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n\n/**\n * makeTaskRunner:\n * - Reuses FlowRuntime concurrency semantics (sequential/parallel/latest/exhaust).\n * - Splits a single trigger into: pending (separate txn) → IO → success/failure (separate txn).\n */\nexport const makeTaskRunner = <Payload, Sh extends AnyModuleShape, R, A = void, E = never>(\n stream: Stream.Stream<Payload>,\n mode: TaskRunnerMode,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> => {\n if (mode === 'latest') {\n return Effect.gen(function* () {\n const taskIdRef = yield* Ref.make(0)\n const currentFiberRef = yield* Ref.make<Fiber.RuntimeFiber<void, never> | undefined>(undefined)\n\n const start = (payload: Payload) =>\n Effect.gen(function* () {\n const taskId = yield* Ref.updateAndGet(taskIdRef, (n) => n + 1)\n\n const prev = yield* Ref.get(currentFiberRef)\n if (prev) {\n // Do not wait for the old fiber to fully end (avoid blocking new triggers); writeback is guarded by taskId.\n yield* Fiber.interruptFork(prev)\n }\n\n const canWriteBack = Ref.get(taskIdRef).pipe(Effect.map((current) => current === taskId))\n\n const fiber = yield* Effect.fork(\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config, canWriteBack),\n )\n\n yield* Ref.set(currentFiberRef, fiber)\n })\n\n return yield* Stream.runForEach(stream, start)\n })\n }\n\n if (mode === 'exhaust') {\n return Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit(runtime)\n const busyRef = yield* Ref.make(false)\n\n const mapper = (payload: Payload) =>\n Effect.gen(function* () {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? ([false, busy] as const) : ([true, true] as const),\n )\n if (!acquired) {\n // Ignore trigger: no pending transaction is produced.\n return\n }\n try {\n yield* runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config)\n } finally {\n yield* Ref.set(busyRef, false)\n }\n })\n\n return yield* Stream.runDrain(stream.pipe(Stream.mapEffect(mapper, { concurrency })))\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n }\n\n if (mode === 'parallel') {\n return Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit(runtime)\n\n return yield* Stream.runDrain(\n stream.pipe(\n Stream.mapEffect((payload) => runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config), {\n concurrency,\n }),\n ),\n )\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n }\n\n // mode === \"task\"(sequential)\n return Stream.runForEach(stream, (payload) =>\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n}\n","import { Cause, Effect, FiberRef, Layer, Logger } from 'effect'\nimport {\n projectJsonValue,\n type DowngradeReason as JsonDowngradeReason,\n type JsonValue,\n type JsonValueProjectionStats,\n} from '../../observability/jsonValue.js'\nimport type * as ReplayLog from './ReplayLog.js'\nimport {\n toSerializableErrorSummary,\n type DowngradeReason as ErrorDowngradeReason,\n type SerializableErrorSummary,\n} from './errorSummary.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport type * as ProcessProtocol from './process/protocol.js'\nimport type { ConvergeStaticIrExport } from '../../state-trait/converge-ir.js'\n\nexport interface TriggerRef {\n readonly kind: string\n readonly name?: string\n readonly details?: unknown\n}\n\ntype TraceEventType = `trace:${string}`\ntype GenericTraceEventType = Exclude<\n TraceEventType,\n 'trace:trait:converge' | 'trace:trait:check' | 'trace:trait:validate'\n>\n\n/**\n * ReplayEventRef:\n * - Replay event structure referenced from Debug events.\n * - Based on ReplayLog.Event, enriched with txn/trigger association fields for Devtools aggregation and explanation.\n */\nexport type ReplayEventRef = ReplayLog.ReplayLogEvent & {\n readonly txnId?: string\n readonly trigger?: TriggerRef\n}\n\nexport type Event =\n | {\n readonly type: 'module:init'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'module:destroy'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:phase'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly phase: 'init' | 'run' | 'destroy' | 'platform'\n readonly name: string\n readonly payload?: unknown\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'action:dispatch'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly action: unknown\n readonly actionTag?: string\n readonly unknownAction?: boolean\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'state:update'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly state: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n /**\n * Optional: Static IR digest aligned with FieldPathId/StepId (for consumer-side reverse-mapping & alignment).\n * - When missing or mismatched, consumers must not attempt to reverse-map rootIds -> rootPaths (avoid wrong UI).\n * - Allowed to be omitted on near-zero-cost diagnostics=off paths.\n */\n readonly staticIrDigest?: string\n /**\n * Optional: the affected scope aggregated by this commit (field-level dirty-set).\n * - Populated by Runtime at commit time;\n * - Must stay slim and serializable;\n * - Devtools can use it to explain \"why converge/validate ran / why it degraded to full\".\n */\n readonly dirtySet?: unknown\n /**\n * Optional: patch count aggregated by this commit (from StateTransaction).\n * - Populated by Runtime only on transaction paths.\n * - Devtools can use it as a lightweight transaction summary metric.\n */\n readonly patchCount?: number\n /**\n * Optional: whether patch records were truncated (bounded) under full instrumentation.\n */\n readonly patchesTruncated?: boolean\n /**\n * Optional: truncation reason code (stable enum).\n */\n readonly patchesTruncatedReason?: 'max_patches'\n /**\n * Optional: commit mode (normal/batched/low-priority, etc).\n * - Populated by Runtime;\n * - Default is chosen by the caller (typically \"normal\").\n */\n readonly commitMode?: string\n /**\n * Optional: external visibility priority (normal/low).\n * - Populated by Runtime.\n * - Mainly used by React external subscription scheduling (avoid unnecessary renders).\n */\n readonly priority?: string\n /**\n * Optional: transaction origin kind (origin.kind) that triggered this state commit:\n * - e.g. \"action\" / \"source-refresh\" / \"service-callback\" / \"devtools\".\n * - Populated by Runtime only on StateTransaction-based paths.\n * - Devtools can distinguish app transactions vs devtools time-travel operations.\n */\n readonly originKind?: string\n /**\n * Optional: transaction origin name (origin.name) that triggered this state commit:\n * - e.g. action dispatch / fieldPath / task:success/task:failure, etc.\n * - Populated by Runtime only on StateTransaction-based paths.\n */\n readonly originName?: string\n /**\n * Reserved: Trait converge summary (for Devtools window-level stats / TopN costs / degrade reasons, etc.).\n * - Phase 2: field slot only; structure is not fixed.\n * - Later phases will align with the Trait/Replay event model into an explainable structure.\n */\n readonly traitSummary?: unknown\n /**\n * Reserved: replay event associated with this transaction (re-emit source of truth from ReplayLog).\n * - Phase 2: field slot only.\n * - Later phases will align with ReplayLog.Event structure.\n */\n readonly replayEvent?: ReplayEventRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly identity: ProcessProtocol.ProcessInstanceIdentity\n readonly severity: 'info' | 'warning' | 'error'\n readonly eventSeq: number\n readonly timestampMs: number\n readonly trigger?: ProcessProtocol.ProcessTrigger\n readonly dispatch?: {\n readonly moduleId: string\n readonly instanceId: string\n readonly actionId: string\n }\n readonly error?: ProcessProtocol.SerializableErrorSummary\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly cause: unknown\n readonly phase?: 'init' | 'run' | 'destroy' | 'platform'\n readonly hook?: 'initRequired' | 'start' | 'destroy' | 'suspend' | 'resume' | 'reset' | 'unknown'\n readonly taskId?: string\n readonly opSeq?: number\n readonly origin?: string\n readonly txnSeq?: number\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'diagnostic'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n readonly txnSeq?: number\n readonly txnId?: string\n readonly trigger?: TriggerRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n /**\n * trace:* events:\n * - Extension hook for runtime tracing / Playground / Alignment Lab.\n * - Only the type prefix and moduleId are standardized; payload shape is defined by higher layers (e.g. spanId/attributes in data).\n */\n | {\n readonly type: 'trace:trait:converge'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:check'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:validate'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: GenericTraceEventType\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data?: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n\nexport interface Sink {\n readonly record: (event: Event) => Effect.Effect<void>\n}\nexport const currentDebugSinks = FiberRef.unsafeMake<ReadonlyArray<Sink>>([])\nexport const currentRuntimeLabel = FiberRef.unsafeMake<string | undefined>(undefined)\nexport const currentTxnId = FiberRef.unsafeMake<string | undefined>(undefined)\nexport const currentOpSeq = FiberRef.unsafeMake<number | undefined>(undefined)\nexport type DiagnosticsLevel = 'off' | 'light' | 'sampled' | 'full'\nexport const currentDiagnosticsLevel = FiberRef.unsafeMake<DiagnosticsLevel>('off')\n\nexport const diagnosticsLevel = (level: DiagnosticsLevel): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentDiagnosticsLevel as any, () => level) as Layer.Layer<any, never, never>\n\nexport interface TraitConvergeDiagnosticsSamplingConfig {\n /**\n * Sample once every N txns (deterministic, based on stable txnSeq).\n * - 1: sample every txn (timing granularity similar to full, while keeping payload slim)\n */\n readonly sampleEveryN: number\n /**\n * Max number of TopK hotspots to output (recommended ≤ 3).\n */\n readonly topK: number\n}\n\nexport const currentTraitConvergeDiagnosticsSampling = FiberRef.unsafeMake<TraitConvergeDiagnosticsSamplingConfig>({\n sampleEveryN: 32,\n topK: 3,\n})\n\nexport const traitConvergeDiagnosticsSampling = (\n config: TraitConvergeDiagnosticsSamplingConfig,\n): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentTraitConvergeDiagnosticsSampling as any, () => config) as Layer.Layer<\n any,\n never,\n never\n >\n\nexport const appendSinks = (sinks: ReadonlyArray<Sink>): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentDebugSinks, (current) => [...current, ...sinks]) as Layer.Layer<\n any,\n never,\n never\n >\n\nexport type RuntimeDebugEventKind =\n | 'action'\n | 'state'\n | 'service'\n | 'process'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'lifecycle'\n | 'react-render'\n | 'devtools'\n | 'diagnostic'\n | (string & {})\n\nexport interface RuntimeDebugEventRef {\n readonly eventId: string\n readonly eventSeq: number\n readonly moduleId: string\n readonly instanceId: string\n readonly runtimeLabel?: string\n readonly txnSeq: number\n readonly txnId?: string\n /**\n * linkId:\n * - Current operation chain id (shared by boundary ops in the same chain).\n * - Created by Runtime at the boundary root and propagated via FiberRef across nested/cross-module chains.\n */\n readonly linkId?: string\n readonly timestamp: number\n readonly kind: RuntimeDebugEventKind\n readonly label: string\n readonly meta?: JsonValue\n readonly errorSummary?: SerializableErrorSummary\n readonly downgrade?: {\n readonly reason?: 'non_serializable' | 'oversized' | 'unknown'\n }\n}\n\nexport type TxnLaneEvidenceReason =\n | 'disabled'\n | 'forced_off'\n | 'forced_sync'\n | 'queued_non_urgent'\n | 'preempted_by_urgent'\n | 'budget_yield'\n | 'coalesced'\n | 'canceled'\n | 'max_lag_forced'\n | 'starvation_protection'\n\nexport type TxnLaneNonUrgentYieldReason = 'none' | 'input_pending' | 'budget_exceeded' | 'forced_frame_yield'\n\nexport type TxnLaneEvidence = {\n readonly anchor: {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly opSeq?: number\n }\n readonly lane: 'urgent' | 'nonUrgent'\n readonly kind: string\n readonly policy: {\n readonly enabled: boolean\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n readonly configScope: 'provider' | 'runtime_module' | 'runtime_default' | 'builtin'\n readonly budgetMs: number\n readonly debounceMs: number\n readonly maxLagMs: number\n readonly allowCoalesce: boolean\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n readonly queueMode?: 'fifo' | 'lanes'\n }\n readonly backlog: {\n readonly pendingCount: number\n readonly ageMs?: number\n readonly coalescedCount?: number\n readonly canceledCount?: number\n }\n readonly budget?: {\n readonly budgetMs?: number\n readonly sliceDurationMs?: number\n readonly yieldCount?: number\n readonly yielded?: boolean\n readonly yieldReason?: TxnLaneNonUrgentYieldReason\n }\n readonly starvation?: {\n readonly triggered?: boolean\n readonly reason?: string\n }\n readonly reasons: ReadonlyArray<TxnLaneEvidenceReason>\n}\n\nlet nextGlobalEventSeq = 0\n\nexport const clearRuntimeDebugEventSeq = (): void => {\n nextGlobalEventSeq = 0\n}\n\nconst nextEventSeq = (): number => {\n nextGlobalEventSeq += 1\n return nextGlobalEventSeq\n}\n\nconst makeEventId = (instanceId: string, eventSeq: number): string => `${instanceId}::e${eventSeq}`\n\ntype DowngradeReason = JsonDowngradeReason | ErrorDowngradeReason\n\nconst mergeDowngrade = (\n current: DowngradeReason | undefined,\n next: DowngradeReason | undefined,\n): DowngradeReason | undefined => {\n if (!current) return next\n if (!next) return current\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\n// In browsers, to reduce duplicated noise caused by React StrictMode, etc.,\n// de-duplicate lifecycle:error and diagnostic events: print the same moduleId+payload only once.\nconst browserLifecycleSeen = new Set<string>()\nconst browserDiagnosticSeen = new Set<string>()\n\n// Align trace:react-render events with the most recent state:update txn (UI-only association).\nconst lastTxnByInstance = new Map<string, { readonly txnId: string; readonly txnSeq: number }>()\n\n// trace:react-render / trace:react-selector may enter the sink before state:update (reordering due to concurrency/scheduling).\n// To provide usable txn anchors in Devtools/UI, we allow a one-time backfill for refs missing txn fields.\nconst pendingTxnAlignmentByInstance = new Map<string, Array<RuntimeDebugEventRef>>()\n\nconst enqueuePendingTxnAlignment = (instanceId: string, ref: RuntimeDebugEventRef): void => {\n const list = pendingTxnAlignmentByInstance.get(instanceId)\n if (!list) {\n pendingTxnAlignmentByInstance.set(instanceId, [ref])\n return\n }\n list.push(ref)\n if (list.length > 64) {\n list.shift()\n }\n}\n\nconst backfillPendingTxnAlignment = (\n instanceId: string,\n txn: { readonly txnId: string; readonly txnSeq: number },\n): void => {\n const pending = pendingTxnAlignmentByInstance.get(instanceId)\n if (!pending || pending.length === 0) {\n pendingTxnAlignmentByInstance.delete(instanceId)\n return\n }\n\n for (const ref of pending) {\n const anyRef: any = ref as any\n if (anyRef.txnId == null) {\n anyRef.txnId = txn.txnId\n }\n if (typeof anyRef.txnSeq !== 'number' || anyRef.txnSeq <= 0) {\n anyRef.txnSeq = txn.txnSeq\n }\n }\n\n pendingTxnAlignmentByInstance.delete(instanceId)\n}\n\nconst lifecycleErrorLog = (event: Extract<Event, { readonly type: 'lifecycle:error' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>, {\n renderErrorCause: true,\n })\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const message = `[Logix][module=${moduleId}] lifecycle:error\\n${causePretty}`\n\n return Effect.logError(message).pipe(\n Effect.annotateLogs({\n 'logix.moduleId': moduleId,\n 'logix.event': 'lifecycle:error',\n 'logix.cause': causePretty,\n }),\n )\n}\n\nconst diagnosticLog = (event: Extract<Event, { readonly type: 'diagnostic' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const header = `[Logix][module=${moduleId}] diagnostic(${event.severity})`\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n const msg = `${header}\\n${detail}`\n\n const base =\n event.severity === 'warning'\n ? Effect.logWarning(msg)\n : event.severity === 'info'\n ? Effect.logInfo(msg)\n : Effect.logError(msg)\n\n const annotations: Record<string, unknown> = {\n 'logix.moduleId': moduleId,\n 'logix.event': `diagnostic(${event.severity})`,\n 'logix.diagnostic.code': event.code,\n 'logix.diagnostic.message': event.message,\n }\n if (event.hint) {\n annotations['logix.diagnostic.hint'] = event.hint\n }\n if (event.actionTag) {\n annotations['logix.diagnostic.actionTag'] = event.actionTag\n }\n\n return base.pipe(Effect.annotateLogs(annotations))\n}\n\n/**\n * Default Layer composition based on FiberRef.currentDebugSinks:\n * - Uses Layer.locallyScoped to inject Debug sinks via FiberRef state.\n * - Avoids misusing FiberRef as a Context.Tag.\n */\nexport const noopLayer = Layer.locallyScoped(currentDebugSinks, [])\n\n/**\n * errorOnlyLayer:\n * - Default DebugSink implementation that only cares about lifecycle:error events.\n * - Suitable as a \"minimum observability\" layer so fatal errors don't silently disappear.\n * - Other events (module:init/destroy, action:dispatch, state:update) are not recorded by default.\n */\nconst errorOnlySink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void,\n}\n\nexport const errorOnlyLayer = Layer.locallyScoped(currentDebugSinks, [errorOnlySink])\n\nexport const isErrorOnlyOnlySinks = (sinks: ReadonlyArray<Sink>): boolean => sinks.length === 1 && sinks[0] === errorOnlySink\n\n/**\n * consoleLayer:\n * - Full debug layer that logs all Debug events via Effect logs (logfmt / structured).\n * - Suitable as an observability layer for general environments (Node / tests).\n */\nconst consoleSink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event }),\n}\n\nexport const consoleLayer = Layer.locallyScoped(currentDebugSinks, [consoleSink])\n\nconst isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined'\n\n// Shared browser console rendering logic used by the default DebugSink and browserConsoleLayer.\nconst renderBrowserConsoleEvent = (event: Event): Effect.Effect<void> => {\n // trace:* events: shown as separate groups in browsers for Playground / DevTools observation.\n if (typeof (event as any).type === 'string' && (event as any).type.startsWith('trace:')) {\n const moduleId = (event as any).moduleId ?? 'unknown'\n const type = (event as any).type\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c trace %c' + moduleId + '%c ' + String(type),\n 'color:#6b7280;font-weight:bold', // tag\n 'color:#3b82f6', // label\n 'color:#9ca3af', // module id\n 'color:#6b7280', // type\n )\n // eslint-disable-next-line no-console\n console.log(event)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'lifecycle:error') {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>, { renderErrorCause: true })\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const key = `${moduleId}|${causePretty}`\n if (browserLifecycleSeen.has(key)) {\n return Effect.void\n }\n browserLifecycleSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c lifecycle:error %c' + moduleId,\n 'color:#ef4444;font-weight:bold', // tag\n 'color:#ef4444', // label\n 'color:#9ca3af', // module id\n )\n // eslint-disable-next-line no-console\n console.error(causePretty)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'diagnostic') {\n const moduleId = event.moduleId ?? 'unknown'\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n\n const color =\n event.severity === 'warning' ? 'color:#d97706' : event.severity === 'info' ? 'color:#3b82f6' : 'color:#ef4444'\n\n const label =\n event.severity === 'warning'\n ? 'diagnostic(warning)'\n : event.severity === 'info'\n ? 'diagnostic(info)'\n : 'diagnostic(error)'\n\n const key = `${moduleId}|${event.code}|${event.message}`\n if (browserDiagnosticSeen.has(key)) {\n return Effect.void\n }\n browserDiagnosticSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c ' + label + '%c module=' + moduleId,\n 'color:#6b7280;font-weight:bold',\n color,\n 'color:#9ca3af',\n )\n if (event.severity === 'warning') {\n // eslint-disable-next-line no-console\n console.warn(detail)\n } else if (event.severity === 'info') {\n // eslint-disable-next-line no-console\n console.info(detail)\n } else {\n // eslint-disable-next-line no-console\n console.error(detail)\n }\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n // Other events are not printed to the browser console by default to avoid being too noisy during development.\n // For internal debug events, use a custom Debug sink or use consoleLayer in Node.\n return Effect.void\n}\n\n/**\n * Browser console debug layer:\n * - In browsers, uses console.groupCollapsed + colored labels to simulate pretty logger grouping.\n * - In non-browser environments, falls back to consoleLayer's Effect logging implementation.\n */\nconst browserConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n // Non-browser: fall back to consoleLayer behavior (Effect.log*).\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event })\n }\n\n return renderBrowserConsoleEvent(event)\n },\n}\n\nexport const browserConsoleLayer = Layer.locallyScoped(currentDebugSinks, [browserConsoleSink])\n\n/**\n * Browser diagnostic-only debug layer:\n * - In browsers, prints only lifecycle:error + diagnostic(warning/error) via console.groupCollapsed.\n * - Drops trace:* and other high-frequency events from the browser console (use DevtoolsHub instead).\n * - In non-browser environments, behaves like errorOnlySink (Effect.log*).\n */\nconst browserDiagnosticConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void\n }\n\n return event.type === 'lifecycle:error' || (event.type === 'diagnostic' && event.severity !== 'info')\n ? renderBrowserConsoleEvent(event)\n : Effect.void\n },\n}\n\nexport const browserDiagnosticConsoleLayer = Layer.locallyScoped(currentDebugSinks, [browserDiagnosticConsoleSink])\n\n/**\n * Browser-friendly Logger layer: replaces the default logger with Effect's pretty logger (browser mode).\n * - Avoids hand-written console styles; reuses Effect's colored/grouped formatting.\n * - Safely degrades to the default logger in server environments.\n */\nexport const browserPrettyLoggerLayer = Logger.replace(\n Logger.defaultLogger,\n Logger.prettyLogger({ mode: 'browser', colors: true }),\n)\n\n/**\n * defaultLayer:\n * - Public default layer; currently equivalent to errorOnlyLayer.\n * - Records lifecycle:error only, avoiding a large volume of action/state logs by default.\n */\nexport const defaultLayer = errorOnlyLayer\n\nexport const record = (event: Event) =>\n Effect.gen(function* () {\n const sinks = yield* FiberRef.get(currentDebugSinks)\n\n // Fast path: production default installs errorOnlyLayer (sinks=1).\n // Avoid paying diagnostics FiberRef + enrichment costs for high-frequency events that are always dropped by errorOnly.\n if (isErrorOnlyOnlySinks(sinks)) {\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n if (event.severity !== 'info') {\n yield* diagnosticLog(event)\n } else {\n yield* Effect.void\n }\n return\n }\n yield* Effect.void\n return\n }\n\n // Fast path: when no sinks are installed, only a small subset of events are ever surfaced.\n // Avoid paying per-event FiberRef + enrichment costs for high-frequency events like state:update.\n if (sinks.length === 0) {\n if (isBrowser) {\n if (event.type === 'lifecycle:error' || event.type === 'diagnostic') {\n yield* renderBrowserConsoleEvent(event)\n return\n }\n yield* Effect.void\n return\n }\n\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n yield* diagnosticLog(event)\n return\n }\n yield* Effect.void\n return\n }\n\n const enriched = event as Event\n\n const diagnosticsLevel = yield* FiberRef.get(currentDiagnosticsLevel)\n\n // Enrich Debug.Event with basic fields (enabled only when diagnosticsLevel!=off):\n // - timestamp: for Devtools/Timeline/Overview time aggregation; avoids UI-side \"first observed time\" distortion.\n // - runtimeLabel: from FiberRef for grouping by runtime (injected only when not already provided by the event).\n let now: number | undefined\n const getNow = (): number => {\n if (now === undefined) now = Date.now()\n return now\n }\n\n // diagnostics=off: keep near-zero cost; do not add timestamp for high-frequency events (avoid extra Date.now()).\n // Low-frequency events (lifecycle:error/diagnostic) may still get timestamp for easier debugging.\n if (\n enriched.timestamp === undefined &&\n (diagnosticsLevel !== 'off' || enriched.type === 'lifecycle:error' || enriched.type === 'diagnostic')\n ) {\n ;(enriched as any).timestamp = getNow()\n }\n if (diagnosticsLevel !== 'off' && enriched.runtimeLabel === undefined) {\n const runtimeLabel = yield* FiberRef.get(currentRuntimeLabel)\n if (runtimeLabel) {\n ;(enriched as any).runtimeLabel = runtimeLabel\n }\n }\n\n if (enriched.type === 'diagnostic' && (enriched as any).txnId === undefined) {\n const txnId = yield* FiberRef.get(currentTxnId)\n if (txnId) {\n ;(enriched as any).txnId = txnId\n }\n }\n // linkId is meaningful only for EffectOp events: avoid extra FiberRef reads on high-frequency events (state:update, etc.).\n if (\n diagnosticsLevel !== 'off' &&\n (enriched as any).type === 'trace:effectop' &&\n (enriched as any).linkId === undefined\n ) {\n const linkId = yield* FiberRef.get(EffectOpCore.currentLinkId)\n if (linkId) {\n ;(enriched as any).linkId = linkId\n }\n }\n\n if (sinks.length === 1) {\n yield* sinks[0]!.record(enriched)\n return\n }\n\n yield* Effect.forEach(sinks, (sink) => sink.record(enriched), { discard: true })\n })\n\n/**\n * Normalizes internal Debug.Event into RuntimeDebugEventRef:\n * - Allows Devtools / Runtime to consume Debug events uniformly.\n * - Does not change DebugSink behavior; provides a structured view only.\n */\nexport const toRuntimeDebugEventRef = (\n event: Event,\n options?: {\n readonly diagnosticsLevel?: DiagnosticsLevel\n readonly eventSeq?: number\n readonly resolveConvergeStaticIr?: (staticIrDigest: string) => ConvergeStaticIrExport | undefined\n readonly onMetaProjection?: (projection: {\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: JsonDowngradeReason\n }) => void\n },\n): RuntimeDebugEventRef | undefined => {\n const diagnosticsLevel = options?.diagnosticsLevel ?? 'full'\n if (diagnosticsLevel === 'off') {\n return undefined\n }\n\n const isLightLike = diagnosticsLevel === 'light' || diagnosticsLevel === 'sampled'\n\n const timestamp =\n typeof event.timestamp === 'number' && Number.isFinite(event.timestamp) ? event.timestamp : Date.now()\n\n const moduleIdRaw = (event as any).moduleId\n const moduleId = typeof moduleIdRaw === 'string' && moduleIdRaw.length > 0 ? moduleIdRaw : 'unknown'\n\n const instanceIdRaw = (event as any).instanceId\n const instanceId = typeof instanceIdRaw === 'string' && instanceIdRaw.length > 0 ? instanceIdRaw : 'unknown'\n\n const runtimeLabelRaw = (event as any).runtimeLabel\n const runtimeLabel = typeof runtimeLabelRaw === 'string' && runtimeLabelRaw.length > 0 ? runtimeLabelRaw : undefined\n\n const txnSeqRaw = (event as any).txnSeq\n const txnSeq =\n typeof txnSeqRaw === 'number' && Number.isFinite(txnSeqRaw) && txnSeqRaw >= 0 ? Math.floor(txnSeqRaw) : 0\n\n const txnIdRaw = (event as any).txnId\n const txnId =\n typeof txnIdRaw === 'string' && txnIdRaw.length > 0\n ? txnIdRaw\n : txnSeq > 0\n ? `${instanceId}::t${txnSeq}`\n : undefined\n\n const linkId = (() => {\n const linkIdRaw = (event as any).linkId\n if (typeof linkIdRaw === 'string' && linkIdRaw.length > 0) return linkIdRaw\n\n // trace:*: allow fallback extraction from data.meta.linkId (avoid UI diving into deep meta).\n if (typeof (event as any).type !== 'string' || !(event as any).type.startsWith('trace:')) {\n return undefined\n }\n\n const data: any = (event as any).data\n const meta: any = data?.meta\n const linkIdFromMeta = meta?.linkId\n if (typeof linkIdFromMeta === 'string' && linkIdFromMeta.length > 0) return linkIdFromMeta\n\n return undefined\n })()\n\n const eventSeqRaw = options?.eventSeq\n const eventSeq =\n typeof eventSeqRaw === 'number' && Number.isFinite(eventSeqRaw) && eventSeqRaw > 0\n ? Math.floor(eventSeqRaw)\n : nextEventSeq()\n const eventId = makeEventId(instanceId, eventSeq)\n\n const base = {\n eventId,\n eventSeq,\n moduleId,\n instanceId,\n runtimeLabel,\n txnSeq,\n txnId,\n linkId,\n timestamp,\n } as const\n\n let downgrade: DowngradeReason | undefined\n\n const withDowngrade = (ref: Omit<RuntimeDebugEventRef, 'downgrade'>): RuntimeDebugEventRef => {\n if (!downgrade) return ref\n return { ...ref, downgrade: { reason: downgrade } }\n }\n\n switch (event.type) {\n case 'module:init':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:init',\n })\n case 'module:destroy':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:destroy',\n })\n case 'lifecycle:phase': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:phase' }>\n const metaInput = isLightLike\n ? { type: 'lifecycle:phase', phase: e.phase, name: e.name }\n : { type: 'lifecycle:phase', phase: e.phase, name: e.name, payload: e.payload }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: e.name,\n meta: metaProjection.value,\n })\n }\n case 'action:dispatch': {\n const action: any = (event as any).action\n const actionTagRaw = (event as any).actionTag\n const tag = typeof actionTagRaw === 'string' && actionTagRaw.length > 0 ? actionTagRaw : (action?._tag ?? action?.type)\n const label = String(tag ?? 'action:dispatch')\n const labelNormalized = label.length > 0 ? label : 'unknown'\n const unknownAction = (event as any).unknownAction === true ? true : undefined\n const metaInput = isLightLike\n ? { actionTag: labelNormalized, ...(unknownAction ? { unknownAction: true } : {}) }\n : { action, ...(unknownAction ? { unknownAction: true } : {}) }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (unknownAction) {\n downgrade = mergeDowngrade(downgrade, 'unknown')\n }\n return withDowngrade({\n ...base,\n kind: 'action',\n label: labelNormalized,\n meta: metaProjection.value,\n })\n }\n case 'state:update': {\n const e = event as Extract<Event, { readonly type: 'state:update' }>\n\n const resolveDirtySetRootPaths = (): ReadonlyArray<JsonValue> | undefined => {\n const resolve = options?.resolveConvergeStaticIr\n if (!resolve) return undefined\n\n const digest = e.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return undefined\n\n const dirtySet = e.dirtySet as any\n if (!dirtySet || typeof dirtySet !== 'object' || Array.isArray(dirtySet)) return undefined\n\n const rootIds = dirtySet.rootIds\n if (!Array.isArray(rootIds) || rootIds.length === 0) return undefined\n\n const ir = resolve(digest) as ConvergeStaticIrExport | undefined\n const fieldPaths = (ir as any)?.fieldPaths as unknown\n if (!Array.isArray(fieldPaths) || fieldPaths.length === 0) return undefined\n\n const out: Array<JsonValue> = []\n for (const rawId of rootIds) {\n if (typeof rawId !== 'number' || !Number.isFinite(rawId)) continue\n const id = Math.floor(rawId)\n if (id < 0) continue\n const path = (fieldPaths as any)[id] as unknown\n if (!Array.isArray(path) || path.length === 0) continue\n if (!path.every((seg) => typeof seg === 'string' && seg.length > 0)) continue\n out.push(path as any)\n }\n\n return out.length > 0 ? out : undefined\n }\n\n const dirtySetWithRootPaths = (() => {\n const rootPaths = resolveDirtySetRootPaths()\n if (!rootPaths) return e.dirtySet\n const dirtySet = e.dirtySet as any\n if (!dirtySet || typeof dirtySet !== 'object' || Array.isArray(dirtySet)) return e.dirtySet\n return { ...dirtySet, rootPaths }\n })()\n\n const metaInput = isLightLike\n ? {\n state: e.state,\n dirtySet: dirtySetWithRootPaths,\n patchCount: e.patchCount,\n patchesTruncated: e.patchesTruncated,\n patchesTruncatedReason: e.patchesTruncatedReason,\n staticIrDigest: e.staticIrDigest,\n commitMode: e.commitMode,\n priority: e.priority,\n originKind: e.originKind,\n originName: e.originName,\n }\n : {\n state: e.state,\n dirtySet: dirtySetWithRootPaths,\n patchCount: e.patchCount,\n patchesTruncated: e.patchesTruncated,\n patchesTruncatedReason: e.patchesTruncatedReason,\n staticIrDigest: e.staticIrDigest,\n commitMode: e.commitMode,\n priority: e.priority,\n originKind: e.originKind,\n originName: e.originName,\n traitSummary: e.traitSummary,\n replayEvent: e.replayEvent,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (txnId) {\n lastTxnByInstance.set(instanceId, { txnId, txnSeq })\n backfillPendingTxnAlignment(instanceId, { txnId, txnSeq })\n }\n return withDowngrade({\n ...base,\n kind: 'state',\n label: 'state:update',\n meta: metaProjection.value,\n })\n }\n case 'process:start':\n case 'process:stop':\n case 'process:restart':\n case 'process:trigger':\n case 'process:dispatch':\n case 'process:error': {\n const e = event as Extract<\n Event,\n {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n }\n >\n\n const ts2 = typeof e.timestampMs === 'number' && Number.isFinite(e.timestampMs) ? e.timestampMs : timestamp\n\n const metaInput = {\n identity: e.identity,\n severity: e.severity,\n eventSeq: e.eventSeq,\n timestampMs: e.timestampMs,\n trigger: e.trigger,\n dispatch: e.dispatch,\n error: e.error,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const errorSummary =\n e.type === 'process:error' || e.type === 'process:restart'\n ? (e.error as any as SerializableErrorSummary | undefined)\n : undefined\n\n return withDowngrade({\n ...base,\n timestamp: ts2,\n kind: 'process',\n label: e.type,\n meta: metaProjection.value,\n errorSummary,\n })\n }\n case 'lifecycle:error': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:error' }>\n const summary = toSerializableErrorSummary(e.cause)\n downgrade = mergeDowngrade(downgrade, summary.downgrade)\n const metaInput = isLightLike\n ? { type: 'lifecycle:error', phase: e.phase, name: e.hook }\n : {\n type: 'lifecycle:error',\n phase: e.phase,\n name: e.hook,\n hook: e.hook,\n taskId: e.taskId,\n origin: e.origin,\n txnSeq: e.txnSeq,\n opSeq: e.opSeq,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'lifecycle:error',\n meta: metaProjection.value,\n errorSummary: summary.errorSummary,\n })\n }\n case 'diagnostic': {\n const e = event as Extract<Event, { readonly type: 'diagnostic' }>\n const metaInput = {\n code: e.code,\n severity: e.severity,\n message: e.message,\n hint: e.hint,\n actionTag: e.actionTag,\n kind: e.kind,\n trigger: e.trigger,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.code,\n meta: metaProjection.value,\n })\n }\n default: {\n if (typeof event.type !== 'string' || !event.type.startsWith('trace:')) {\n return undefined\n }\n\n // trace:txn-lane: slim evidence for Txn Lanes (lane/backlog/reasons), used for Devtools summary and offline export.\n if (event.type === 'trace:txn-lane') {\n const data: any = (event as any).data\n const evidence = data?.evidence ?? data\n\n const metaProjection = projectJsonValue(evidence)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const label =\n typeof evidence?.kind === 'string' && evidence.kind.length > 0 ? String(evidence.kind) : 'txn-lane'\n\n return withDowngrade({\n ...base,\n kind: 'txn-lane',\n label,\n meta: metaProjection.value,\n })\n }\n\n // trace:react-render / trace:react-selector: keep slim meta only (field trimming is handled by JsonValue projection).\n if (event.type === 'trace:react-render' || event.type === 'trace:react-selector') {\n const data: any = (event as any).data\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n }\n : {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n meta: data?.meta,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n const label =\n typeof data?.componentLabel === 'string' && data.componentLabel.length > 0\n ? data.componentLabel\n : event.type === 'trace:react-selector'\n ? 'react-selector'\n : 'react-render'\n const last = lastTxnByInstance.get(instanceId)\n const txnSeqFromMeta =\n typeof data?.meta?.txnSeq === 'number' && Number.isFinite(data.meta.txnSeq) && data.meta.txnSeq >= 0\n ? Math.floor(data.meta.txnSeq)\n : undefined\n const txnIdFromMeta =\n typeof data?.meta?.txnId === 'string' && data.meta.txnId.length > 0 ? data.meta.txnId : undefined\n const txnIdAligned = txnIdFromMeta ?? base.txnId ?? last?.txnId\n const txnSeqAligned = txnSeqFromMeta ?? (base.txnSeq > 0 ? base.txnSeq : (last?.txnSeq ?? base.txnSeq))\n const ref = withDowngrade({\n ...base,\n txnId: txnIdAligned,\n txnSeq: txnSeqAligned,\n kind: event.type === 'trace:react-selector' ? 'react-selector' : 'react-render',\n label,\n meta: metaProjection.value,\n })\n\n if (instanceId !== 'unknown' && (ref.txnId == null || ref.txnSeq <= 0)) {\n enqueuePendingTxnAlignment(instanceId, ref)\n }\n\n return ref\n }\n\n // trace:selector:eval: SelectorGraph evaluation evidence within commit (used for txn→selector→render causal chain).\n if (event.type === 'trace:selector:eval') {\n const data: any = (event as any).data\n const metaInput = {\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n changed: data?.changed,\n evalMs: data?.evalMs,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:exec-vm: Exec VM hit/miss evidence (049). In light tier we keep minimal summary fields.\n if (event.type === 'trace:exec-vm') {\n const data: any = (event as any).data\n const metaInput = {\n version: data?.version,\n stage: data?.stage,\n hit: data?.hit,\n reasonCode: data?.reasonCode ?? data?.reason,\n reasonDetail: data?.reasonDetail,\n execIrVersion: data?.execIrVersion,\n execIrHash: data?.execIrHash,\n serviceId: data?.serviceId,\n implId: data?.implId,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:converge: converge evidence must be exportable (JsonValue hard gate) and trims heavy fields in light tier.\n if (event.type === 'trace:trait:converge') {\n const resolveDirtyRootPaths = (args: {\n readonly staticIrDigest: unknown\n readonly rootIds: unknown\n }): ReadonlyArray<JsonValue> | undefined => {\n const resolve = options?.resolveConvergeStaticIr\n if (!resolve) return undefined\n const digest = args.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return undefined\n\n const rootIds = args.rootIds\n if (!Array.isArray(rootIds) || rootIds.length === 0) return undefined\n\n const ir = resolve(digest) as ConvergeStaticIrExport | undefined\n const fieldPaths = (ir as any)?.fieldPaths as unknown\n if (!Array.isArray(fieldPaths) || fieldPaths.length === 0) return undefined\n\n const out: Array<JsonValue> = []\n for (const id of rootIds) {\n if (typeof id !== 'number' || !Number.isFinite(id)) continue\n const idx = Math.floor(id)\n if (idx < 0 || idx >= fieldPaths.length) continue\n const path = fieldPaths[idx]\n if (Array.isArray(path)) {\n out.push(path as any)\n }\n }\n\n return out.length > 0 ? out : undefined\n }\n\n const enrichDirtyRootPaths = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n if (!dirty || typeof dirty !== 'object' || Array.isArray(dirty)) return value\n\n const dirtyRootPaths = resolveDirtyRootPaths({\n staticIrDigest: anyValue.staticIrDigest,\n rootIds: dirty?.rootIds,\n })\n if (!dirtyRootPaths) return value\n\n return {\n ...anyValue,\n dirty: {\n ...(dirty as any),\n rootPaths: dirtyRootPaths,\n },\n } as JsonValue\n }\n\n const stripHeavyLight = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtyRootPaths = resolveDirtyRootPaths({\n staticIrDigest: anyValue.staticIrDigest,\n rootIds: dirty?.rootIds,\n })\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n ...(Array.isArray((dirty as any).rootIds) ? { rootIds: (dirty as any).rootIds } : null),\n ...(typeof (dirty as any).rootIdsTruncated === 'boolean'\n ? { rootIdsTruncated: (dirty as any).rootIdsTruncated }\n : null),\n ...(dirtyRootPaths ? { rootPaths: dirtyRootPaths } : null),\n }\n : undefined\n\n const { top3, dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n }\n\n const stripHeavySampled = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n }\n : undefined\n\n const { dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n }\n\n const data = (event as Extract<Event, { readonly type: 'trace:trait:converge' }>).data\n const metaInput =\n diagnosticsLevel === 'light'\n ? stripHeavyLight(data)\n : diagnosticsLevel === 'sampled'\n ? stripHeavySampled(data)\n : enrichDirtyRootPaths(data)\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:converge',\n label: 'trait:converge',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:check: validation diagnostics must be exportable and stay slim in light tier (keep key fields).\n if (event.type === 'trace:trait:check') {\n const stripHeavy = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n const anyValue = value as any\n const degraded = anyValue.degraded\n const degradedSlim =\n degraded && typeof degraded === 'object' && !Array.isArray(degraded)\n ? { kind: (degraded as any).kind }\n : undefined\n\n const { degraded: _degraded, ...rest } = anyValue\n return (degradedSlim ? { ...rest, degraded: degradedSlim } : rest) as JsonValue\n }\n\n const data = (event as Extract<Event, { readonly type: 'trace:trait:check' }>).data\n const metaInput = isLightLike ? stripHeavy(data) : data\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:check',\n label: 'trait:check',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:validate: validation decision summary must be exportable and slim in light tier (no heavy fields by default).\n if (event.type === 'trace:trait:validate') {\n const data = (event as Extract<Event, { readonly type: 'trace:trait:validate' }>).data\n const metaProjection = projectJsonValue(data)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:validate',\n label: 'trait:validate',\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits: final traits snapshot must be exportable and slim in light tier (digest/count).\n if (event.type === 'trace:module:traits') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n digest: data?.digest,\n count: data?.count,\n }\n : {\n digest: data?.digest,\n count: data?.count,\n traits: data?.traits,\n provenanceIndex: data?.provenanceIndex,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits:conflict: conflict details must be exportable; avoid relying on truncated lifecycle:error messages.\n if (event.type === 'trace:module:traits:conflict') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n conflictCount: data?.conflictCount,\n traitIds: data?.traitIds,\n }\n : {\n conflictCount: data?.conflictCount,\n conflicts: data?.conflicts,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:descriptor: keep key anchors even in light tier (avoid data being fully trimmed).\n if (event.type === 'trace:module:descriptor') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n id: data?.id,\n traits: data?.traits,\n source: data?.source,\n }\n : { data }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:effectop: keep slim op meta and prefer EffectOp.meta.moduleId when present.\n if (event.type === 'trace:effectop') {\n const data: any = (event as any).data\n const opMeta: any = data?.meta\n const opKind = (data?.kind ?? 'service') as RuntimeDebugEventKind\n const label = typeof data?.name === 'string' ? data.name : 'effectop'\n const moduleId2 = typeof opMeta?.moduleId === 'string' ? opMeta.moduleId : moduleId\n const txnId2 = typeof opMeta?.txnId === 'string' && opMeta.txnId.length > 0 ? opMeta.txnId : base.txnId\n const txnSeq2 =\n typeof opMeta?.txnSeq === 'number' && Number.isFinite(opMeta.txnSeq) && opMeta.txnSeq >= 0\n ? Math.floor(opMeta.txnSeq)\n : base.txnSeq\n\n const metaInput = isLightLike\n ? {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n meta: opMeta,\n }\n : {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n payload: data?.payload,\n meta: opMeta,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n moduleId: moduleId2,\n txnId: txnId2,\n txnSeq: txnSeq2,\n kind: opKind,\n label,\n meta: metaProjection.value,\n })\n }\n\n // Other trace:* events: categorize as devtools and trim meta by tier.\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n data: undefined,\n }\n : {\n data: (event as any).data,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n }\n}\n","export type JsonValue =\n | null\n | boolean\n | number\n | string\n | ReadonlyArray<JsonValue>\n | { readonly [key: string]: JsonValue }\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport const isJsonValue = (input: unknown): input is JsonValue => {\n const seen = new WeakSet<object>()\n\n const loop = (value: unknown, depth: number): value is JsonValue => {\n if (depth > 64) return false\n if (value === null) return true\n\n switch (typeof value) {\n case 'string':\n case 'boolean':\n return true\n case 'number':\n return Number.isFinite(value)\n case 'object': {\n if (Array.isArray(value)) {\n if (seen.has(value)) return false\n seen.add(value)\n for (const item of value) {\n if (!loop(item, depth + 1)) return false\n }\n return true\n }\n\n if (!isPlainRecord(value)) return false\n if (seen.has(value)) return false\n seen.add(value)\n\n for (const v of Object.values(value)) {\n if (!loop(v, depth + 1)) return false\n }\n\n return true\n }\n default:\n return false\n }\n }\n\n return loop(input, 0)\n}\n\nexport interface JsonValueProjectionStats {\n readonly dropped: number\n readonly oversized: number\n readonly nonSerializable: number\n}\n\nexport interface JsonValueProjection {\n readonly value: JsonValue\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: DowngradeReason\n}\n\nexport interface JsonValueProjectOptions {\n readonly maxDepth?: number\n readonly maxObjectKeys?: number\n readonly maxArrayLength?: number\n readonly maxStringLength?: number\n readonly maxJsonBytes?: number\n readonly oversizedPreviewBytes?: number\n}\n\nconst defaultOptions: Required<JsonValueProjectOptions> = {\n maxDepth: 6,\n maxObjectKeys: 32,\n maxArrayLength: 32,\n maxStringLength: 256,\n maxJsonBytes: 4 * 1024,\n oversizedPreviewBytes: 256,\n}\n\nconst truncateString = (value: string, maxLen: number, stats: MutableStats): string => {\n if (value.length <= maxLen) return value\n stats.oversized += 1\n return value.slice(0, maxLen)\n}\n\ntype MutableStats = {\n dropped: number\n oversized: number\n nonSerializable: number\n}\n\nconst mergeDowngrade = (current: DowngradeReason | undefined, next: DowngradeReason): DowngradeReason => {\n if (!current) return next\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\nfunction isPlainRecord(value: unknown): value is Record<string, unknown> {\n if (typeof value !== 'object' || value === null) return false\n const proto = Object.getPrototypeOf(value)\n return proto === Object.prototype || proto === null\n}\n\nconst asNumber = (value: number, stats: MutableStats): JsonValue => {\n if (Number.isFinite(value)) return value\n stats.nonSerializable += 1\n return String(value)\n}\n\nconst toJsonValueInternal = (\n input: unknown,\n options: Required<JsonValueProjectOptions>,\n stats: MutableStats,\n seen: WeakSet<object>,\n depth: number,\n): JsonValue => {\n if (input === null) return null\n\n switch (typeof input) {\n case 'string':\n return truncateString(input, options.maxStringLength, stats)\n case 'number':\n return asNumber(input, stats)\n case 'boolean':\n return input\n case 'bigint':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'symbol':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'function':\n stats.nonSerializable += 1\n return '[Function]'\n case 'undefined':\n stats.dropped += 1\n return null\n }\n\n // object\n if (depth >= options.maxDepth) {\n stats.oversized += 1\n return '[Truncated]'\n }\n\n if (input instanceof Date) {\n return input.toISOString()\n }\n\n if (input instanceof Error) {\n stats.nonSerializable += 1\n return {\n name: truncateString(input.name, options.maxStringLength, stats),\n message: truncateString(input.message, options.maxStringLength, stats),\n }\n }\n\n if (typeof input === 'object') {\n if (seen.has(input)) {\n stats.nonSerializable += 1\n return '[Circular]'\n }\n seen.add(input)\n }\n\n if (Array.isArray(input)) {\n const out: Array<JsonValue> = []\n const limit = Math.min(input.length, options.maxArrayLength)\n for (let i = 0; i < limit; i++) {\n out.push(toJsonValueInternal(input[i], options, stats, seen, depth + 1))\n }\n if (input.length > limit) {\n stats.oversized += 1\n out.push(`[...${input.length - limit} more]`)\n }\n return out\n }\n\n if (!isPlainRecord(input)) {\n stats.nonSerializable += 1\n return truncateString(String(input), options.maxStringLength, stats)\n }\n\n const entries = Object.entries(input)\n const limit = Math.min(entries.length, options.maxObjectKeys)\n const out: Record<string, JsonValue> = {}\n\n for (let i = 0; i < limit; i++) {\n const [rawKey, rawValue] = entries[i]!\n const key = truncateString(rawKey, options.maxStringLength, stats)\n if (rawValue === undefined) {\n stats.dropped += 1\n continue\n }\n out[key] = toJsonValueInternal(rawValue, options, stats, seen, depth + 1)\n }\n\n if (entries.length > limit) {\n stats.oversized += 1\n out.__truncatedKeys = entries.length - limit\n }\n\n return out\n}\n\nexport const projectJsonValue = (input: unknown, options?: JsonValueProjectOptions): JsonValueProjection => {\n const resolved: Required<JsonValueProjectOptions> = { ...defaultOptions, ...(options ?? {}) }\n const stats: MutableStats = { dropped: 0, oversized: 0, nonSerializable: 0 }\n const seen = new WeakSet<object>()\n\n let downgrade: DowngradeReason | undefined\n const value = toJsonValueInternal(input, resolved, stats, seen, 0)\n\n if (stats.nonSerializable > 0) {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n }\n if (stats.oversized > 0) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n }\n\n // Hard gate: ensure JSON.stringify never throws and respect the max byte budget.\n try {\n const json = JSON.stringify(value)\n if (json.length > resolved.maxJsonBytes) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n const preview = json.slice(0, Math.min(resolved.oversizedPreviewBytes, resolved.maxJsonBytes))\n return {\n value: {\n _tag: 'oversized',\n bytes: json.length,\n preview,\n },\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized + 1,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n }\n } catch {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n return {\n value: '[Unserializable]',\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable + 1,\n },\n downgrade,\n }\n }\n\n return {\n value,\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n}\n","import { Cause } from 'effect'\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport interface SerializableErrorSummary {\n readonly message: string\n readonly name?: string\n readonly code?: string\n readonly hint?: string\n}\n\nexport interface ErrorSummaryResult {\n readonly errorSummary: SerializableErrorSummary\n readonly downgrade?: DowngradeReason\n}\n\nconst truncate = (value: string, maxLen: number): { readonly value: string; readonly truncated: boolean } => {\n if (value.length <= maxLen) return { value, truncated: false }\n return { value: value.slice(0, maxLen), truncated: true }\n}\n\nconst safeStringify = (value: unknown): { readonly ok: true; readonly json: string } | { readonly ok: false } => {\n try {\n return { ok: true, json: JSON.stringify(value) }\n } catch {\n return { ok: false }\n }\n}\n\nconst getMessageFromUnknown = (cause: unknown): string => {\n if (typeof cause === 'string') return cause\n if (typeof cause === 'number' || typeof cause === 'boolean' || typeof cause === 'bigint') return String(cause)\n if (cause instanceof Error) return cause.message || cause.name || 'Error'\n if (cause && typeof cause === 'object' && 'message' in (cause as any) && typeof (cause as any).message === 'string') {\n return (cause as any).message as string\n }\n\n // Try Effect Cause pretty (best-effort). This may include more details than needed,\n // so callers MUST still treat it as an untrusted/oversized string and truncate.\n try {\n const pretty = Cause.pretty(cause as Cause.Cause<unknown>, { renderErrorCause: true })\n if (typeof pretty === 'string' && pretty.length > 0) return pretty\n } catch {\n // ignore\n }\n\n return 'Unknown error'\n}\n\nexport const toSerializableErrorSummary = (\n cause: unknown,\n options?: {\n readonly maxMessageLength?: number\n },\n): ErrorSummaryResult => {\n const maxMessageLength = options?.maxMessageLength ?? 256\n\n const messageRaw = getMessageFromUnknown(cause)\n const { value: message, truncated } = truncate(messageRaw, maxMessageLength)\n\n const summary: { message: string; name?: string; code?: string; hint?: string } = {\n message,\n }\n\n if (cause instanceof Error) {\n if (cause.name && cause.name !== 'Error') summary.name = cause.name\n const anyCause = cause as any\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n else if (typeof anyCause.code === 'number' && Number.isFinite(anyCause.code)) summary.code = String(anyCause.code)\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n return {\n errorSummary: summary,\n downgrade: truncated ? 'oversized' : undefined,\n }\n }\n\n if (cause && typeof cause === 'object') {\n const anyCause = cause as any\n if (typeof anyCause.name === 'string' && anyCause.name.length > 0) summary.name = anyCause.name\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n }\n\n // If the original cause isn't JSON-serializable, mark it explicitly.\n const stringifyResult = safeStringify(cause)\n if (!stringifyResult.ok) {\n return {\n errorSummary: summary,\n downgrade: 'non_serializable',\n }\n }\n\n if (truncated) {\n return {\n errorSummary: summary,\n downgrade: 'oversized',\n }\n }\n\n if (message === 'Unknown error') {\n return {\n errorSummary: summary,\n downgrade: 'unknown',\n }\n }\n\n return { errorSummary: summary }\n}\n","// EffectOp core model and middleware composition logic.\n// For higher-level Runtime / Devtools integration, see:\n// specs/000-module-traits-runtime/references/effectop-and-middleware.md\n\nimport { Context, Effect, FiberRef } from 'effect'\n\n/**\n * currentLinkId:\n * - Stores the current operation chain id (linkId) in a FiberRef.\n * - Used to correlate multiple boundary ops within the same chain (can be shared across modules via the same FiberRef).\n */\nexport const currentLinkId = FiberRef.unsafeMake<string | undefined>(undefined)\n\n/**\n * OperationPolicy:\n * - Local policy markers (intent only; no rule logic attached).\n *\n * Constraints (enforced by Runtime/middleware together):\n * - Only observation-only capabilities (Observer) may be disabled; global guards must not be disabled.\n */\nexport interface OperationPolicy {\n readonly disableObservers?: boolean\n}\n\n/**\n * OperationRejected:\n * - Unified failure result when a guard rejects execution.\n * - Semantics: explicit failure with no business side effects (rejection must happen before user code executes).\n */\nexport interface OperationRejected {\n readonly _tag: 'OperationRejected'\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}\n\n/**\n * OperationError:\n * - Any boundary operation executed via EffectOp may be explicitly rejected by Guard middleware.\n * - Therefore, the middleware error channel must allow OperationRejected to be added.\n */\nexport type OperationError<E> = E | OperationRejected\n\nexport const makeOperationRejected = (params: {\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}): OperationRejected => ({\n _tag: 'OperationRejected',\n message: params.message,\n kind: params.kind,\n name: params.name,\n linkId: params.linkId,\n details: params.details,\n})\n\n/**\n * EffectOp: a unified representation of an Effect execution at an \"observable boundary\".\n *\n * - Out / Err / Env are the generic parameters of the underlying Effect.\n * - meta carries structured context needed by Devtools / Middleware.\n */\nexport interface EffectOp<Out = unknown, Err = unknown, Env = unknown> {\n readonly id: string\n readonly kind:\n | 'action'\n | 'flow'\n | 'state'\n | 'service'\n | 'lifecycle'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'devtools'\n readonly name: string\n readonly payload?: unknown\n readonly meta?: {\n /**\n * linkId:\n * - Operation chain id: multiple boundary ops in the same chain must share it.\n * - Runtime ensures this field is populated on all boundary ops.\n */\n linkId?: string\n moduleId?: string\n instanceId?: string\n runtimeLabel?: string\n txnId?: string\n txnSeq?: number\n opSeq?: number\n fieldPath?: string\n deps?: ReadonlyArray<string>\n from?: string\n to?: string\n traitNodeId?: string\n stepId?: string\n resourceId?: string\n key?: unknown\n trace?: ReadonlyArray<string>\n tags?: ReadonlyArray<string>\n policy?: OperationPolicy\n // Reserved extension slot for middleware/devtools to attach extra information.\n readonly [k: string]: unknown\n }\n readonly effect: Effect.Effect<Out, Err, Env>\n}\n\n/**\n * Middleware: the general middleware model for observing / wrapping / guarding EffectOps.\n */\nexport type Middleware = <A, E, R>(op: EffectOp<A, E, R>) => Effect.Effect<A, OperationError<E>, R>\n\nexport type MiddlewareStack = ReadonlyArray<Middleware>\n\n/**\n * EffectOpMiddlewareEnv:\n * - A Service in Effect Env that carries the current Runtime's MiddlewareStack.\n * - Injected by Runtime.ts when constructing a ManagedRuntime.\n * - Runtime code (e.g. StateTrait.install) uses this Service to decide which MiddlewareStack to use.\n */\nexport interface EffectOpMiddlewareEnv {\n readonly stack: MiddlewareStack\n}\n\nexport class EffectOpMiddlewareTag extends Context.Tag('Logix/EffectOpMiddleware')<\n EffectOpMiddlewareTag,\n EffectOpMiddlewareEnv\n>() {}\n\n/**\n * composeMiddleware:\n * - Composes Middleware from \"outer to inner\" in declaration order:\n * - stack = [mw1, mw2] => mw1 -> mw2 -> effect -> mw2 -> mw1\n * - Matches the reduceRight example in the reference docs.\n */\nexport const composeMiddleware = (stack: MiddlewareStack): Middleware => {\n return <A, E, R>(op: EffectOp<A, E, R>): Effect.Effect<A, OperationError<E>, R> =>\n stack.reduceRight<Effect.Effect<A, OperationError<E>, R>>(\n (eff, mw) => mw({ ...op, effect: eff } as any) as any,\n op.effect as Effect.Effect<A, OperationError<E>, R>,\n )\n}\n\n/**\n * runWithMiddleware:\n * - Executes a given EffectOp with a MiddlewareStack according to the composition rules.\n * - If the stack is empty, returns op.effect directly.\n */\nexport const runWithMiddleware = <A, E, R>(op: EffectOp<A, E, R>, stack: MiddlewareStack): Effect.Effect<A, E, R> => {\n return Effect.gen(function* () {\n const existing = yield* FiberRef.get(currentLinkId)\n const metaLinkId = (op.meta as any)?.linkId\n const linkId = typeof metaLinkId === 'string' && metaLinkId.length > 0 ? metaLinkId : (existing ?? op.id)\n\n const nextOp: EffectOp<A, E, R> = {\n ...op,\n meta: {\n ...(op.meta ?? {}),\n linkId,\n },\n }\n\n const program = stack.length ? composeMiddleware(stack)(nextOp) : nextOp.effect\n\n // linkId is created at the boundary root and reused for nested ops (the FiberRef is the global single source of truth).\n // NOTE: middleware may explicitly reject with OperationRejected.\n return yield* Effect.locally(currentLinkId, linkId)(program as any)\n }) as Effect.Effect<A, E, R>\n}\n","import { Context, Layer } from 'effect'\nimport type { TraitConvergeRequestedMode } from '../../state-trait/model.js'\nimport type { ReadQueryStrictGateConfig } from './ReadQuery.js'\n\n// Unified runtime env detection, avoiding bundlers inlining NODE_ENV at build time.\nexport const getNodeEnv = (): string | undefined => {\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const env = (globalThis as any)?.process?.env\n return typeof env?.NODE_ENV === 'string' ? env.NODE_ENV : undefined\n } catch {\n return undefined\n }\n}\n\nexport const isDevEnv = (): boolean => getNodeEnv() !== 'production'\n\nexport type StateTransactionInstrumentation = 'full' | 'light'\n\n/**\n * getDefaultStateTxnInstrumentation:\n * - Currently chooses default instrumentation by NODE_ENV:\n * - dev / test: full (keep patches and snapshots for debugging).\n * - production: light (keep minimal semantics to reduce overhead).\n * - May evolve with finer-grained overrides in Runtime.make / Module.make.\n */\nexport const getDefaultStateTxnInstrumentation = (): StateTransactionInstrumentation => (isDevEnv() ? 'full' : 'light')\n\n/**\n * Runtime-level StateTransaction config Service:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime.make can read runtime-level defaults from Env.\n *\n * Notes:\n * - instrumentation is only a runtime-level default.\n * - Explicit instrumentation in ModuleImpl / ModuleRuntimeOptions has higher priority.\n */\nexport interface StateTransactionRuntimeConfig {\n readonly instrumentation?: StateTransactionInstrumentation\n /**\n * StateTrait derived converge budget (ms):\n * - Exceeding the budget triggers a soft degrade (freeze derived fields, preserve base writes and 0/1 commit semantics).\n * - Default is 200ms (aligned with the 007 spec threshold).\n */\n readonly traitConvergeBudgetMs?: number\n /**\n * Auto-mode decision budget (ms):\n * - Only used during the decision phase when requestedMode=\"auto\".\n * - Exceeding the budget must immediately fall back to full (and record evidence).\n */\n readonly traitConvergeDecisionBudgetMs?: number\n /**\n * StateTrait converge scheduling strategy:\n * - full: full topo execution (current default; safest).\n * - dirty: minimal triggering based on dirtyPaths + deps in the txn window (requires accurate deps).\n */\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n /**\n * 043: Trait converge time-slicing (explicit opt-in). Disabled by default.\n */\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n /**\n * 060: Txn Lanes (priority scheduling for transaction follow-up work). Enabled by default since 062.\n */\n readonly txnLanes?: TxnLanesPatch\n /**\n * Runtime-level per-module overrides (hotfix path):\n * - Only affects converge behavior for the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /**\n * 060: Txn Lanes runtime_module overrides (hotfix / gradual tuning).\n * - Only affects the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionConfigTagImpl extends Context.Tag('@logixjs/core/StateTransactionRuntimeConfig')<\n StateTransactionConfigTagImpl,\n StateTransactionRuntimeConfig\n>() {}\n\nexport const StateTransactionConfigTag = StateTransactionConfigTagImpl\n\nexport type ReadQueryStrictGateRuntimeConfig = ReadQueryStrictGateConfig\n\nclass ReadQueryStrictGateConfigTagImpl extends Context.Tag('@logixjs/core/ReadQueryStrictGateRuntimeConfig')<\n ReadQueryStrictGateConfigTagImpl,\n ReadQueryStrictGateRuntimeConfig\n>() {}\n\nexport const ReadQueryStrictGateConfigTag = ReadQueryStrictGateConfigTagImpl\n\nexport type ReplayMode = 'live' | 'replay'\n\nexport interface ReplayModeConfig {\n readonly mode: ReplayMode\n}\n\nclass ReplayModeConfigTagImpl extends Context.Tag('@logixjs/core/ReplayModeConfig')<\n ReplayModeConfigTagImpl,\n ReplayModeConfig\n>() {}\n\nexport const ReplayModeConfigTag = ReplayModeConfigTagImpl\n\nexport const replayModeLayer = (mode: ReplayMode): Layer.Layer<ReplayModeConfigTagImpl, never, never> =>\n Layer.succeed(ReplayModeConfigTag, { mode })\n\nexport interface StateTransactionTraitConvergeOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n}\n\nexport interface TxnLanesPatch {\n /**\n * enabled: whether Txn Lanes is enabled (default on since 062).\n * - undefined: default enabled (when not explicitly configured)\n * - false: disabled (returns to baseline behavior)\n * - true: enabled (only affects scheduling of follow-up work outside the transaction; transactions remain synchronous)\n */\n readonly enabled?: boolean\n /**\n * overrideMode: runtime temporary override (for debugging/rollback/comparison).\n * - forced_off: forcibly disables Txn Lanes (returns to baseline behavior).\n * - forced_sync: forces fully synchronous execution (ignores non-urgent deferral and time-slicing; used for comparisons).\n *\n * Notes:\n * - Override precedence follows StateTransactionOverrides: provider > runtime_module > runtime_default > builtin.\n * - Overrides must be explainable by evidence (see 060 LaneEvidence reasons).\n */\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n /** non-urgent work loop slice budget (ms). */\n readonly budgetMs?: number\n /** Non-urgent backlog coalescing window (ms). */\n readonly debounceMs?: number\n /** Max lag upper bound (ms): exceeding it triggers an explainable starvation protection (forced catch-up). */\n readonly maxLagMs?: number\n /** Whether to allow coalescing/canceling intermediate non-urgent work (must preserve eventual consistency). */\n readonly allowCoalesce?: boolean\n /**\n * Yield strategy for the non-urgent work loop (progressive enhancement).\n * - baseline: uses only time budget + hard upper bound\n * - inputPending: when supported by browsers, also consults `navigator.scheduling.isInputPending`\n */\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n}\n\nexport interface TraitConvergeTimeSlicingPatch {\n /**\n * enabled:\n * - false/undefined: disabled (default)\n * - true: enables time-slicing (only affects computed/link explicitly marked as deferred)\n */\n readonly enabled?: boolean\n /**\n * debounceMs: coalescing interval (ms) for the deferral window; merges high-frequency inputs into one deferred flush.\n */\n readonly debounceMs?: number\n /**\n * maxLagMs: max lag upper bound (ms); exceeding it triggers an explainable forced flush (starvation protection).\n */\n readonly maxLagMs?: number\n}\n\n/**\n * Provider-scoped StateTransactionOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface StateTransactionOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /** 060: Txn Lanes provider-level overrides (delta overrides). */\n readonly txnLanes?: TxnLanesPatch\n /** 060: Txn Lanes provider_module overrides (by moduleId). */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionOverridesTagImpl extends Context.Tag('@logixjs/core/StateTransactionOverrides')<\n StateTransactionOverridesTagImpl,\n StateTransactionOverrides\n>() {}\n\nexport const StateTransactionOverridesTag = StateTransactionOverridesTagImpl\n\nexport type ConcurrencyLimit = number | 'unbounded'\n\nexport interface ConcurrencyPolicyPatch {\n readonly concurrencyLimit?: ConcurrencyLimit\n readonly losslessBackpressureCapacity?: number\n readonly allowUnbounded?: boolean\n readonly pressureWarningThreshold?: {\n readonly backlogCount?: number\n readonly backlogDurationMs?: number\n }\n readonly warningCooldownMs?: number\n}\n\n/**\n * Runtime-level ConcurrencyPolicy:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime merges sources via a resolver (builtin/runtime_module/provider, etc.).\n *\n * Notes:\n * - overridesByModuleId is used for runtime_module hot-switching (hotfix / gradual tuning) and is lower priority than provider overrides.\n */\nexport interface ConcurrencyPolicy extends ConcurrencyPolicyPatch {\n readonly overridesByModuleId?: Readonly<Record<string, ConcurrencyPolicyPatch>>\n}\n\nclass ConcurrencyPolicyTagImpl extends Context.Tag('@logixjs/core/ConcurrencyPolicy')<\n ConcurrencyPolicyTagImpl,\n ConcurrencyPolicy\n>() {}\n\nexport const ConcurrencyPolicyTag = ConcurrencyPolicyTagImpl\n\n/**\n * Provider-scoped ConcurrencyPolicyOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface ConcurrencyPolicyOverrides extends ConcurrencyPolicyPatch {\n readonly overridesByModuleId?: Readonly<Record<string, ConcurrencyPolicyPatch>>\n}\n\nclass ConcurrencyPolicyOverridesTagImpl extends Context.Tag('@logixjs/core/ConcurrencyPolicyOverrides')<\n ConcurrencyPolicyOverridesTagImpl,\n ConcurrencyPolicyOverrides\n>() {}\n\nexport const ConcurrencyPolicyOverridesTag = ConcurrencyPolicyOverridesTagImpl\n","import { Context, Effect, Layer } from 'effect'\n\nexport type ResourceSnapshotPhase = 'idle' | 'loading' | 'success' | 'error'\n\nexport type ReplayLogEvent =\n | {\n readonly _tag: 'ResourceSnapshot'\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n /**\n * Optional: source concurrency policy (e.g. \"switch\" / \"exhaust-trailing\").\n * - Must remain slim & serializable.\n * - Used by Devtools/replay to explain why old results are dropped / why trailing happens.\n */\n readonly concurrency?: string\n readonly phase: ResourceSnapshotPhase\n readonly snapshot: unknown\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n }\n | {\n readonly _tag: 'InvalidateRequest'\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'resource' | 'query'\n readonly target: string\n readonly meta?: unknown\n }\n\nexport type ResourceSnapshotEvent = Extract<ReplayLogEvent, { readonly _tag: 'ResourceSnapshot' }>\n\nexport interface ReplayLogService {\n readonly record: (event: ReplayLogEvent) => Effect.Effect<void>\n readonly snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>>\n readonly resetCursor: Effect.Effect<void>\n readonly consumeNext: (predicate: (event: ReplayLogEvent) => boolean) => Effect.Effect<ReplayLogEvent | undefined>\n readonly consumeNextResourceSnapshot: (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }) => Effect.Effect<ResourceSnapshotEvent | undefined>\n}\n\nexport class ReplayLog extends Context.Tag('@logixjs/core/ReplayLog')<ReplayLog, ReplayLogService>() {}\n\nexport const make = (initial?: ReadonlyArray<ReplayLogEvent>): ReplayLogService => {\n const events: Array<ReplayLogEvent> = initial ? Array.from(initial) : []\n let cursor = 0\n\n const consumeNext = (predicate: (event: ReplayLogEvent) => boolean): Effect.Effect<ReplayLogEvent | undefined> =>\n Effect.sync(() => {\n for (let i = cursor; i < events.length; i++) {\n const event = events[i]\n if (!predicate(event)) continue\n cursor = i + 1\n return event\n }\n return undefined\n })\n\n const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }): Effect.Effect<ResourceSnapshotEvent | undefined> =>\n consumeNext((event): event is ResourceSnapshotEvent => {\n if (event._tag !== 'ResourceSnapshot') return false\n if (event.resourceId !== params.resourceId) return false\n if (event.fieldPath !== params.fieldPath) return false\n if (params.keyHash !== undefined && event.keyHash !== params.keyHash) {\n return false\n }\n if (params.phase !== undefined && event.phase !== params.phase) {\n return false\n }\n return true\n }).pipe(Effect.map((event) => event as ResourceSnapshotEvent | undefined))\n\n return {\n record: (event) => Effect.sync(() => events.push(event)),\n snapshot: Effect.sync(() => events.slice()),\n resetCursor: Effect.sync(() => {\n cursor = 0\n }),\n consumeNext,\n consumeNextResourceSnapshot,\n }\n}\n\nexport const layer = (initial?: ReadonlyArray<ReplayLogEvent>): Layer.Layer<ReplayLog, never, never> =>\n Layer.succeed(ReplayLog, make(initial))\n\nexport const record = (event: ReplayLogEvent): Effect.Effect<void, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.record(event)\n })\n\nexport const snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.snapshot\n})\n\nexport const resetCursor: Effect.Effect<void, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.resetCursor\n})\n\nexport const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n}): Effect.Effect<ResourceSnapshotEvent | undefined, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.consumeNextResourceSnapshot(params)\n })\n","export type RowId = string\n\nexport interface ListConfig {\n readonly path: string\n readonly trackBy?: string\n}\n\nexport type ListRemovalListener = (rowId: RowId) => void\n\ntype Segment = string | number\n\nconst parseSegments = (path: string): ReadonlyArray<Segment> => {\n if (!path) return []\n return path.split('.').map((seg) => (/^[0-9]+$/.test(seg) ? Number(seg) : seg))\n}\n\nexport const getAtPath = (state: any, path: string): any => {\n if (!path || state == null) return state\n const segments = parseSegments(path)\n let current: any = state\n for (const seg of segments) {\n if (current == null) return undefined\n if (typeof seg === 'number') {\n current = Array.isArray(current) ? current[seg] : current[String(seg)]\n continue\n }\n current = current[seg]\n }\n return current\n}\n\nexport const setAtPathMutating = (draft: unknown, path: string, value: unknown): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const nextKey = segments[i + 1]!\n\n const next = current?.[key as any]\n if (next == null || typeof next !== 'object') {\n current[key as any] = typeof nextKey === 'number' ? [] : {}\n }\n current = current[key as any]\n }\n\n const last = segments[segments.length - 1]!\n current[last as any] = value\n}\n\nexport const unsetAtPathMutating = (draft: unknown, path: string): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const next = current?.[key as any]\n if (next == null || typeof next !== 'object') {\n return\n }\n current = next\n }\n\n const last = segments[segments.length - 1]!\n if (Array.isArray(current) && typeof last === 'number') {\n current[last] = undefined\n return\n }\n\n if (current && typeof current === 'object') {\n delete current[last as any]\n }\n}\n\nexport const joinPath = (prefix: string, suffix: string): string => {\n if (!prefix) return suffix\n if (!suffix) return prefix\n return `${prefix}.${suffix}`\n}\n\nexport interface ListItemFieldPath {\n readonly listPath: string\n readonly itemPath: string\n}\n\n/**\n * parseListItemFieldPath:\n * - Recognize a list.item field path like \"items[].profileResource\".\n * - Supports nested arrays: returns listPath/itemPath for the innermost list.\n */\nexport const parseListItemFieldPath = (fieldPath: string): ListItemFieldPath | undefined => {\n const raw = typeof fieldPath === 'string' ? fieldPath.trim() : ''\n if (!raw) return undefined\n\n const segments = raw.split('.').filter(Boolean)\n let lastListSeg = -1\n for (let i = 0; i < segments.length; i++) {\n if (segments[i]!.endsWith('[]')) lastListSeg = i\n }\n if (lastListSeg < 0) return undefined\n\n const strip = (seg: string): string => (seg.endsWith('[]') ? seg.slice(0, -2) : seg)\n\n const listPath = segments\n .slice(0, lastListSeg + 1)\n .map(strip)\n .join('.')\n\n const itemPath = segments\n .slice(lastListSeg + 1)\n .map(strip)\n .join('.')\n\n return { listPath, itemPath }\n}\n\nexport const toListItemValuePath = (listPath: string, index: number, itemPath: string): string =>\n itemPath ? `${listPath}.${index}.${itemPath}` : `${listPath}.${index}`\n\ntype ListState = {\n readonly listPath: string\n readonly parentRowId?: RowId\n readonly itemsRef: ReadonlyArray<unknown>\n readonly ids: ReadonlyArray<RowId>\n readonly indexById: ReadonlyMap<RowId, number>\n readonly trackBy?: string\n}\n\nconst readTrackBy = (item: unknown, trackBy: string): unknown => {\n if (!item || typeof item !== 'object') return undefined\n const segments = trackBy.split('.')\n let current: any = item\n for (const seg of segments) {\n if (current == null) return undefined\n current = current[seg as any]\n }\n return current\n}\n\nconst didReorderByReference = (prevItems: ReadonlyArray<unknown>, nextItems: ReadonlyArray<unknown>): boolean => {\n const buckets = new Map<unknown, Array<number>>()\n for (let i = 0; i < prevItems.length; i++) {\n const item = prevItems[i]\n const list = buckets.get(item) ?? []\n list.push(i)\n buckets.set(item, list)\n }\n\n for (let nextIndex = 0; nextIndex < nextItems.length; nextIndex++) {\n const item = nextItems[nextIndex]\n const q = buckets.get(item)\n if (!q || q.length === 0) continue\n const prevIndex = q.shift()!\n if (prevIndex !== nextIndex) {\n return true\n }\n }\n\n return false\n}\n\nconst hasStableTrackByKeys = (items: ReadonlyArray<unknown>, trackBy: string): boolean =>\n items.every((item) => readTrackBy(item, trackBy) !== undefined)\n\nconst isSameTrackBySequence = (\n prevItems: ReadonlyArray<unknown>,\n nextItems: ReadonlyArray<unknown>,\n trackBy: string,\n): boolean => {\n if (prevItems.length !== nextItems.length) return false\n for (let i = 0; i < prevItems.length; i++) {\n if (!Object.is(readTrackBy(prevItems[i], trackBy), readTrackBy(nextItems[i], trackBy))) {\n return false\n }\n }\n return true\n}\n\nconst reconcileIds = (\n prev: ListState | undefined,\n nextItems: ReadonlyArray<unknown>,\n trackBy?: string,\n makeRowId?: () => RowId,\n): { readonly ids: ReadonlyArray<RowId>; readonly removed: ReadonlyArray<RowId> } => {\n const nextRowId =\n makeRowId ??\n (() => {\n let rowSeq = 0\n return () => {\n rowSeq += 1\n return `r${rowSeq}`\n }\n })()\n\n if (!prev) {\n return {\n ids: nextItems.map(() => nextRowId()),\n removed: [],\n }\n }\n\n if (prev.itemsRef === nextItems) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n\n // Important: keep RowId stable under \"non-structural changes\" (field updates / object clones only),\n // otherwise in-flight state and caches would be invalidated needlessly.\n const sameLength = prev.itemsRef.length === nextItems.length\n if (sameLength) {\n // trackBy case: if the key sequence is identical, we can reuse old ids;\n // otherwise we must reconcile by key (avoid misclassifying clone+reorder as \"no reorder\").\n if (trackBy) {\n const canUseKeys = hasStableTrackByKeys(prev.itemsRef, trackBy) && hasStableTrackByKeys(nextItems, trackBy)\n if (canUseKeys) {\n if (isSameTrackBySequence(prev.itemsRef, nextItems, trackBy)) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n } else {\n // If trackBy keys are not available, fall back to reference-level detection (keep stability for \"cloned but not reordered\").\n if (!didReorderByReference(prev.itemsRef, nextItems)) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n }\n } else if (!didReorderByReference(prev.itemsRef, nextItems)) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n }\n\n const keyOf = (item: unknown): unknown => {\n if (!trackBy) return item\n const k = readTrackBy(item, trackBy)\n return k !== undefined ? k : item\n }\n\n const buckets = new Map<unknown, Array<RowId>>()\n for (let i = 0; i < prev.itemsRef.length; i++) {\n const key = keyOf(prev.itemsRef[i])\n const list = buckets.get(key) ?? []\n list.push(prev.ids[i]!)\n buckets.set(key, list)\n }\n\n const ids: Array<RowId> = []\n for (let i = 0; i < nextItems.length; i++) {\n const key = keyOf(nextItems[i])\n const list = buckets.get(key)\n if (list && list.length > 0) {\n ids.push(list.shift()!)\n } else {\n ids.push(nextRowId())\n }\n }\n\n const removed: Array<RowId> = []\n for (const list of buckets.values()) {\n removed.push(...list)\n }\n\n return { ids, removed }\n}\n\nconst buildIndexById = (ids: ReadonlyArray<RowId>): ReadonlyMap<RowId, number> => {\n const map = new Map<RowId, number>()\n for (let i = 0; i < ids.length; i++) {\n map.set(ids[i]!, i)\n }\n return map\n}\n\nexport class RowIdStore {\n private readonly lists = new Map<string, ListState>()\n private readonly removalListeners = new Map<string, Set<ListRemovalListener>>()\n private readonly rowIdIndex = new Map<\n RowId,\n { readonly key: string; readonly listPath: string; readonly index: number }\n >()\n private nextRowSeq = 0\n\n constructor(private readonly instanceId?: string) {}\n\n private listKey = (listPath: string, parentRowId?: RowId): string =>\n parentRowId ? `${listPath}@@${parentRowId}` : listPath\n\n private makeRowId = (): RowId => {\n this.nextRowSeq += 1\n return this.instanceId ? `${this.instanceId}::r${this.nextRowSeq}` : `r${this.nextRowSeq}`\n }\n\n private notifyRemoved(listPath: string, rowId: RowId): void {\n const listeners = this.removalListeners.get(listPath)\n if (!listeners || listeners.size === 0) return\n for (const fn of listeners) {\n try {\n fn(rowId)\n } catch {\n // listener failures should never break runtime behavior\n }\n }\n }\n\n private removeDescendants(parentRowId: RowId): void {\n const keys: Array<string> = []\n for (const [k, st] of this.lists.entries()) {\n if (st.parentRowId === parentRowId) keys.push(k)\n }\n\n for (const key of keys) {\n const st = this.lists.get(key)\n if (!st) continue\n this.lists.delete(key)\n for (let i = 0; i < st.ids.length; i++) {\n const rowId = st.ids[i]!\n this.rowIdIndex.delete(rowId)\n this.notifyRemoved(st.listPath, rowId)\n this.removeDescendants(rowId)\n }\n }\n }\n\n onRemoved(listPath: string, listener: ListRemovalListener): () => void {\n const set = this.removalListeners.get(listPath) ?? new Set<ListRemovalListener>()\n set.add(listener)\n this.removalListeners.set(listPath, set)\n return () => {\n const current = this.removalListeners.get(listPath)\n if (!current) return\n current.delete(listener)\n if (current.size === 0) this.removalListeners.delete(listPath)\n }\n }\n\n getRowId(listPath: string, index: number, parentRowId?: RowId): RowId | undefined {\n const state = this.lists.get(this.listKey(listPath, parentRowId))\n return state ? state.ids[index] : undefined\n }\n\n getIndex(listPath: string, rowId: RowId): number | undefined {\n const info = this.rowIdIndex.get(rowId)\n if (!info) return undefined\n if (info.listPath !== listPath) return undefined\n return info.index\n }\n\n /**\n * ensureList:\n * - Align RowID mapping for the given listPath with the current items.\n * - Returns the latest ids (index -> RowId).\n */\n ensureList(\n listPath: string,\n items: ReadonlyArray<unknown>,\n trackBy?: string,\n parentRowId?: RowId,\n ): ReadonlyArray<RowId> {\n const key = this.listKey(listPath, parentRowId)\n const prev = this.lists.get(key)\n const { ids, removed } = reconcileIds(prev, items, trackBy ?? prev?.trackBy, this.makeRowId)\n\n const next: ListState = {\n listPath,\n parentRowId,\n itemsRef: items,\n ids,\n indexById: buildIndexById(ids),\n trackBy: trackBy ?? prev?.trackBy,\n }\n this.lists.set(key, next)\n\n if (removed.length > 0) {\n for (const rowId of removed) {\n this.rowIdIndex.delete(rowId)\n this.notifyRemoved(listPath, rowId)\n this.removeDescendants(rowId)\n }\n }\n\n // Refresh the reverse rowId -> index mapping (index changes are allowed).\n for (let i = 0; i < ids.length; i++) {\n const rowId = ids[i]!\n this.rowIdIndex.set(rowId, { key, listPath, index: i })\n }\n\n return ids\n }\n\n /**\n * updateAll:\n * - After each commit, align RowID mappings for all known lists.\n * - configs come from list declarations in StateTraitProgram.spec (may include trackBy).\n */\n updateAll(state: unknown, configs: ReadonlyArray<ListConfig>): void {\n const cfgByPath = new Map<string, ListConfig>()\n const paths: Array<string> = []\n for (const cfg of configs) {\n if (!cfg || typeof cfg.path !== 'string') continue\n const p = cfg.path.trim()\n if (!p) continue\n cfgByPath.set(p, cfg)\n paths.push(p)\n }\n\n const pathSet = new Set(paths)\n\n const parentOf = (path: string): string | undefined => {\n const segments = path.split('.').filter(Boolean)\n let best: string | undefined\n for (let i = 1; i < segments.length; i++) {\n const prefix = segments.slice(0, i).join('.')\n if (pathSet.has(prefix)) best = prefix\n }\n return best\n }\n\n const parentByPath = new Map<string, string | undefined>()\n const suffixByPath = new Map<string, string>()\n const childrenByParent = new Map<string | undefined, Array<string>>()\n\n for (const path of paths) {\n const parent = parentOf(path)\n parentByPath.set(path, parent)\n const suffix = parent ? path.slice(parent.length + 1) : path\n suffixByPath.set(path, suffix)\n const list = childrenByParent.get(parent) ?? []\n list.push(path)\n childrenByParent.set(parent, list)\n }\n\n // roots first (and deterministic traversal)\n const roots = (childrenByParent.get(undefined) ?? []).slice().sort((a, b) => a.localeCompare(b))\n\n const visit = (listPath: string, parentRowId: RowId | undefined, listValue: unknown): void => {\n const cfg = cfgByPath.get(listPath)\n const items = Array.isArray(listValue) ? (listValue as ReadonlyArray<unknown>) : []\n const ids = this.ensureList(listPath, items, cfg?.trackBy, parentRowId)\n\n const children = (childrenByParent.get(listPath) ?? []).slice().sort((a, b) => a.localeCompare(b))\n if (children.length === 0) return\n\n for (let i = 0; i < items.length; i++) {\n const row = items[i]\n const rowId = ids[i]\n if (!rowId) continue\n for (const childPath of children) {\n const suffix = suffixByPath.get(childPath) ?? ''\n const childValue = suffix ? getAtPath(row as any, suffix) : undefined\n visit(childPath, rowId, childValue)\n }\n }\n }\n\n for (const root of roots) {\n const value = getAtPath(state as any, root)\n visit(root, undefined, value)\n }\n }\n}\n\nexport const collectListConfigs = (spec: Record<string, unknown>): ReadonlyArray<ListConfig> => {\n const configs: Array<ListConfig> = []\n for (const key in spec) {\n if (!Object.prototype.hasOwnProperty.call(spec, key)) continue\n const raw = spec[key]\n if (!raw || typeof raw !== 'object') continue\n const tag = (raw as any)._tag\n if (tag !== 'StateTraitList') continue\n const trackBy = (raw as any).identityHint?.trackBy\n configs.push({\n path: key,\n trackBy: typeof trackBy === 'string' ? trackBy : undefined,\n })\n }\n return configs\n}\n","import { isDevEnv } from './env.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\n\nconst RUNTIME_INTERNALS = Symbol.for('@logixjs/core/runtimeInternals')\nconst BOUND_INTERNALS = Symbol.for('@logixjs/core/boundInternals')\nconst MODULE_TRAITS_PROGRAM = Symbol.for('@logixjs/core/moduleTraitsProgram')\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nexport const setRuntimeInternals = (runtime: object, internals: RuntimeInternals): void => {\n defineHidden(runtime, RUNTIME_INTERNALS, internals)\n}\n\nexport const setBoundInternals = (bound: object, internals: RuntimeInternals): void => {\n defineHidden(bound, BOUND_INTERNALS, internals)\n}\n\n/**\n * ModuleTraitsProgram(StateTraitProgram):\n * - Attaches a traits Program to a module definition object (used by TraitLifecycle/Debug).\n * - Uses Symbol + non-enumerable properties to avoid spreading `.__*` magic fields.\n *\n * Note: this is a \"module-definition-side\" internal slot, not RuntimeInternals (instance-level); the semantics differ.\n */\nexport const setModuleTraitsProgram = (module: object, program: unknown): void => {\n defineHidden(module, MODULE_TRAITS_PROGRAM, program)\n}\n\nexport const getModuleTraitsProgram = (module: unknown): unknown | undefined => {\n if (!module) return undefined\n if (typeof module !== 'object' && typeof module !== 'function') return undefined\n return (module as any)[MODULE_TRAITS_PROGRAM] as unknown | undefined\n}\n\nconst formatScope = (moduleId: unknown, instanceId: unknown): string => {\n const m = typeof moduleId === 'string' && moduleId.length > 0 ? moduleId : 'unknown'\n const i = typeof instanceId === 'string' && instanceId.length > 0 ? instanceId : 'unknown'\n return `moduleId=${m}, instanceId=${i}`\n}\n\nexport const getRuntimeInternals = (runtime: object): RuntimeInternals => {\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n const internals = (runtime as any)[RUNTIME_INTERNALS] as RuntimeInternals | undefined\n if (!internals) {\n const msg = isDevEnv()\n ? [\n '[MissingRuntimeInternals] Runtime internals not installed on ModuleRuntime instance.',\n `scope: ${formatScope(scope.moduleId, scope.instanceId)}`,\n 'fix:',\n '- Ensure ModuleRuntime.make calls internalHooks.installInternalHooks (020 foundation).',\n '- If you created a mock runtime for tests, attach internals or avoid calling internal-only APIs.',\n ].join('\\n')\n : 'Runtime internals not installed'\n throw new Error(msg)\n }\n\n const runtimeInstanceId = scope.instanceId\n if (\n typeof runtimeInstanceId === 'string' &&\n runtimeInstanceId.length > 0 &&\n runtimeInstanceId !== internals.instanceId\n ) {\n throw new Error(\n isDevEnv()\n ? [\n '[InconsistentRuntimeInternals] Runtime internals instanceId mismatch.',\n `runtime: ${formatScope(scope.moduleId, runtimeInstanceId)}`,\n `internals: ${formatScope(internals.moduleId, internals.instanceId)}`,\n ].join('\\n')\n : 'Runtime internals mismatch',\n )\n }\n\n return internals\n}\n\nexport const getBoundInternals = (bound: object): RuntimeInternals => {\n const internals = (bound as any)[BOUND_INTERNALS] as RuntimeInternals | undefined\n if (!internals) {\n const msg = isDevEnv()\n ? [\n '[MissingBoundInternals] Bound internals not installed on Bound API instance.',\n 'fix:',\n '- Ensure BoundApiRuntime attaches internals (020 foundation).',\n '- If you created a mock bound for tests, attach internals or avoid calling internal-only APIs.',\n ].join('\\n')\n : 'Bound internals not installed'\n throw new Error(msg)\n }\n\n return internals\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA,aAAAA;AAAA,EAAA,eAAAC;AAAA,EAAA,eAAAC;AAAA,EAAA,wBAAAC;AAAA,EAAA,qBAAAC;AAAA,EAAA,sBAAAC;AAAA;AAAA;;;ACAA,IAAAC,iBAAyC;;;ACAzC,IAAAC,iBAA4D;;;ACA5D,IAAAC,iBAAuD;;;ACwEvD,IAAM,iBAAoD;AAAA,EACxD,UAAU;AAAA,EACV,eAAe;AAAA,EACf,gBAAgB;AAAA,EAChB,iBAAiB;AAAA,EACjB,cAAc,IAAI;AAAA,EAClB,uBAAuB;AACzB;;;AC/EA,oBAAsB;;;ACItB,IAAAC,iBAA0C;AAOnC,IAAM,gBAAgB,wBAAS,WAA+B,MAAS;AAoHvE,IAAM,wBAAN,cAAoC,uBAAQ,IAAI,0BAA0B,EAG/E,EAAE;AAAC;;;AH6HE,IAAM,oBAAoB,wBAAS,WAAgC,CAAC,CAAC;AACrE,IAAM,sBAAsB,wBAAS,WAA+B,MAAS;AAC7E,IAAM,eAAe,wBAAS,WAA+B,MAAS;AACtE,IAAM,eAAe,wBAAS,WAA+B,MAAS;AAEtE,IAAM,0BAA0B,wBAAS,WAA6B,KAAK;AAiB3E,IAAM,0CAA0C,wBAAS,WAAmD;AAAA,EACjH,cAAc;AAAA,EACd,MAAM;AACR,CAAC;AA0ID,IAAM,uBAAuB,oBAAI,IAAY;AAC7C,IAAM,wBAAwB,oBAAI,IAAY;AA4C9C,IAAM,oBAAoB,CAAC,UAAgE;AACzF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,eAAe,MAAM;AACzB,QAAI;AACF,aAAO,qBAAM,OAAO,MAAM,OAA+B;AAAA,QACvD,kBAAkB;AAAA,MACpB,CAAC;AAAA,IACH,QAAQ;AACN,UAAI;AACF,eAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,MAC5C,QAAQ;AACN,eAAO,OAAO,MAAM,KAAK;AAAA,MAC3B;AAAA,IACF;AAAA,EACF,GAAG;AAEH,QAAM,UAAU,kBAAkB,QAAQ;AAAA,EAAsB,WAAW;AAE3E,SAAO,sBAAO,SAAS,OAAO,EAAE;AAAA,IAC9B,sBAAO,aAAa;AAAA,MAClB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,gBAAgB,CAAC,UAA2D;AAChF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,SAAS,kBAAkB,QAAQ,gBAAgB,MAAM,QAAQ;AACvE,QAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAC5C,QAAM,MAAM,GAAG,MAAM;AAAA,EAAK,MAAM;AAEhC,QAAM,OACJ,MAAM,aAAa,YACf,sBAAO,WAAW,GAAG,IACrB,MAAM,aAAa,SACjB,sBAAO,QAAQ,GAAG,IAClB,sBAAO,SAAS,GAAG;AAE3B,QAAM,cAAuC;AAAA,IAC3C,kBAAkB;AAAA,IAClB,eAAe,cAAc,MAAM,QAAQ;AAAA,IAC3C,yBAAyB,MAAM;AAAA,IAC/B,4BAA4B,MAAM;AAAA,EACpC;AACA,MAAI,MAAM,MAAM;AACd,gBAAY,uBAAuB,IAAI,MAAM;AAAA,EAC/C;AACA,MAAI,MAAM,WAAW;AACnB,gBAAY,4BAA4B,IAAI,MAAM;AAAA,EACpD;AAEA,SAAO,KAAK,KAAK,sBAAO,aAAa,WAAW,CAAC;AACnD;AAOO,IAAM,YAAY,qBAAM,cAAc,mBAAmB,CAAC,CAAC;AAQlE,IAAM,gBAAsB;AAAA,EAC1B,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AACjB;AAEO,IAAM,iBAAiB,qBAAM,cAAc,mBAAmB,CAAC,aAAa,CAAC;AASpF,IAAM,cAAoB;AAAA,EACxB,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAC/C;AAEO,IAAM,eAAe,qBAAM,cAAc,mBAAmB,CAAC,WAAW,CAAC;AAEhF,IAAM,YAAY,OAAO,WAAW,eAAe,OAAO,aAAa;AAGvE,IAAM,4BAA4B,CAAC,UAAsC;AAEvE,MAAI,OAAQ,MAAc,SAAS,YAAa,MAAc,KAAK,WAAW,QAAQ,GAAG;AACvF,UAAM,WAAY,MAAc,YAAY;AAC5C,UAAM,OAAQ,MAAc;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,yBAAyB,WAAW,QAAQ,OAAO,IAAI;AAAA,QACvD;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,IAAI,KAAK;AAEjB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,mBAAmB;AACpC,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,eAAe,MAAM;AACzB,UAAI;AACF,eAAO,qBAAM,OAAO,MAAM,OAA+B,EAAE,kBAAkB,KAAK,CAAC;AAAA,MACrF,QAAQ;AACN,YAAI;AACF,iBAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,QAC5C,QAAQ;AACN,iBAAO,OAAO,MAAM,KAAK;AAAA,QAC3B;AAAA,MACF;AAAA,IACF,GAAG;AAEH,UAAM,MAAM,GAAG,QAAQ,IAAI,WAAW;AACtC,QAAI,qBAAqB,IAAI,GAAG,GAAG;AACjC,aAAO,sBAAO;AAAA,IAChB;AACA,yBAAqB,IAAI,GAAG;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,mCAAmC;AAAA,QACnC;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,MAAM,WAAW;AAEzB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,cAAc;AAC/B,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAE5C,UAAM,QACJ,MAAM,aAAa,YAAY,kBAAkB,MAAM,aAAa,SAAS,kBAAkB;AAEjG,UAAM,QACJ,MAAM,aAAa,YACf,wBACA,MAAM,aAAa,SACjB,qBACA;AAER,UAAM,MAAM,GAAG,QAAQ,IAAI,MAAM,IAAI,IAAI,MAAM,OAAO;AACtD,QAAI,sBAAsB,IAAI,GAAG,GAAG;AAClC,aAAO,sBAAO;AAAA,IAChB;AACA,0BAAsB,IAAI,GAAG;AAE7B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,iBAAiB,QAAQ,eAAe;AAAA,QACxC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,UAAI,MAAM,aAAa,WAAW;AAEhC,gBAAQ,KAAK,MAAM;AAAA,MACrB,WAAW,MAAM,aAAa,QAAQ;AAEpC,gBAAQ,KAAK,MAAM;AAAA,MACrB,OAAO;AAEL,gBAAQ,MAAM,MAAM;AAAA,MACtB;AAEA,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAIA,SAAO,sBAAO;AAChB;AAOA,IAAM,qBAA2B;AAAA,EAC/B,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AAEd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAAA,IAC7C;AAEA,WAAO,0BAA0B,KAAK;AAAA,EACxC;AACF;AAEO,IAAM,sBAAsB,qBAAM,cAAc,mBAAmB,CAAC,kBAAkB,CAAC;AAQ9F,IAAM,+BAAqC;AAAA,EACzC,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AACd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AAAA,IACf;AAEA,WAAO,MAAM,SAAS,qBAAsB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAC1F,0BAA0B,KAAK,IAC/B,sBAAO;AAAA,EACb;AACF;AAEO,IAAM,gCAAgC,qBAAM,cAAc,mBAAmB,CAAC,4BAA4B,CAAC;AAO3G,IAAM,2BAA2B,sBAAO;AAAA,EAC7C,sBAAO;AAAA,EACP,sBAAO,aAAa,EAAE,MAAM,WAAW,QAAQ,KAAK,CAAC;AACvD;;;AIvtBA,IAAAC,iBAA+B;AAKxB,IAAM,aAAa,MAA0B;AAClD,MAAI;AAEF,UAAM,MAAO,YAAoB,SAAS;AAC1C,WAAO,OAAO,KAAK,aAAa,WAAW,IAAI,WAAW;AAAA,EAC5D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,IAAM,WAAW,MAAe,WAAW,MAAM;AAgExD,IAAM,gCAAN,cAA4C,uBAAQ,IAAI,6CAA6C,EAGnG,EAAE;AAAC;AAML,IAAM,mCAAN,cAA+C,uBAAQ,IAAI,gDAAgD,EAGzG,EAAE;AAAC;AAUL,IAAM,0BAAN,cAAsC,uBAAQ,IAAI,gCAAgC,EAGhF,EAAE;AAAC;AAkFL,IAAM,mCAAN,cAA+C,uBAAQ,IAAI,yCAAyC,EAGlG,EAAE;AAAC;AA6BL,IAAM,2BAAN,cAAuC,uBAAQ,IAAI,iCAAiC,EAGlF,EAAE;AAAC;AAaL,IAAM,oCAAN,cAAgD,uBAAQ,IAAI,0CAA0C,EAGpG,EAAE;AAAC;;;AL/NE,IAAM,yBAAyB,wBAAS,WAAW,KAAK;AAUxD,IAAM,qBAAqB,wBAAS,WAAW,KAAK;;;AMxB3D,IAAAC,iBAAuC;AA+ChC,IAAM,YAAN,cAAwB,uBAAQ,IAAI,yBAAyB,EAA+B,EAAE;AAAC;AAwD/F,IAAM,WAA2E,sBAAO,IAAI,aAAa;AAC9G,QAAM,MAAM,OAAO;AACnB,SAAO,OAAO,IAAI;AACpB,CAAC;AAEM,IAAM,cAAqD,sBAAO,IAAI,aAAa;AACxF,QAAM,MAAM,OAAO;AACnB,SAAO,IAAI;AACb,CAAC;;;ACpGD,IAAM,gBAAgB,CAAC,SAAyC;AAC9D,MAAI,CAAC,KAAM,QAAO,CAAC;AACnB,SAAO,KAAK,MAAM,GAAG,EAAE,IAAI,CAAC,QAAS,WAAW,KAAK,GAAG,IAAI,OAAO,GAAG,IAAI,GAAI;AAChF;AAsCO,IAAM,sBAAsB,CAAC,OAAgB,SAAuB;AACzE,MAAI,CAAC,KAAM;AACX,QAAM,WAAW,cAAc,IAAI;AACnC,MAAI,SAAS,WAAW,EAAG;AAE3B,MAAI,UAAe;AACnB,WAAS,IAAI,GAAG,IAAI,SAAS,SAAS,GAAG,KAAK;AAC5C,UAAM,MAAM,SAAS,CAAC;AACtB,UAAM,OAAO,UAAU,GAAU;AACjC,QAAI,QAAQ,QAAQ,OAAO,SAAS,UAAU;AAC5C;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,QAAM,OAAO,SAAS,SAAS,SAAS,CAAC;AACzC,MAAI,MAAM,QAAQ,OAAO,KAAK,OAAO,SAAS,UAAU;AACtD,YAAQ,IAAI,IAAI;AAChB;AAAA,EACF;AAEA,MAAI,WAAW,OAAO,YAAY,UAAU;AAC1C,WAAO,QAAQ,IAAW;AAAA,EAC5B;AACF;;;ACxEA,IAAM,kBAAkB,uBAAO,IAAI,8BAA8B;AACjE,IAAM,wBAAwB,uBAAO,IAAI,mCAAmC;AA8BrE,IAAM,yBAAyB,CAACC,YAAyC;AAC9E,MAAI,CAACA,QAAQ,QAAO;AACpB,MAAI,OAAOA,YAAW,YAAY,OAAOA,YAAW,WAAY,QAAO;AACvE,SAAQA,QAAe,qBAAqB;AAC9C;AA4CO,IAAM,oBAAoB,CAAC,UAAoC;AACpE,QAAM,YAAa,MAAc,eAAe;AAChD,MAAI,CAAC,WAAW;AACd,UAAM,MAAM,SAAS,IACjB;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,EAAE,KAAK,IAAI,IACX;AACJ,UAAM,IAAI,MAAM,GAAG;AAAA,EACrB;AAEA,SAAO;AACT;;;AThFO,IAAMC,OAAM;AAAA,EACjB,OAAO,CAAC,UAA4B,EAAE,MAAM,SAAS,KAAK;AAAA,EAC1D,MAAM,CAAC,MAAc,mBAAqD;AAAA,IACxE,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AAAA,EACA,MAAM,CACJ,MACA,OACA,aACc;AAAA,IACd,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,eAAe,SAAS;AAAA,IACxB,OAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,eAAe,CAAC,cAAgC;AAC9C,UAAM,MAAM,OAAO,cAAc,WAAW,UAAU,KAAK,IAAI;AAC/D,QAAI,CAAC,OAAO,QAAQ,QAAS,QAAOA,KAAI,KAAK;AAE7C,UAAM,WAAW,IAAI,MAAM,GAAG,EAAE,OAAO,OAAO;AAC9C,QAAI,SAAS,WAAW,EAAG,QAAOA,KAAI,KAAK;AAE3C,UAAM,UAAU,CAAC,QAAyB,WAAW,KAAK,GAAG;AAC7D,UAAM,eAAe,CAAC,QAAyB,IAAI,SAAS,IAAI,IAAI,IAAI,MAAM,GAAG,EAAE,IAAI;AAEvF,UAAM,iBAA0E,CAAC;AACjF,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,MAAM,SAAS,CAAC;AACtB,UAAI,CAAC,QAAQ,GAAG,EAAG;AACnB,YAAM,QAAQ,OAAO,GAAG;AACxB,UAAI,CAAC,OAAO,SAAS,KAAK,KAAK,QAAQ,EAAG,QAAOA,KAAI,MAAM,GAAG;AAC9D,qBAAe,KAAK,EAAE,KAAK,GAAG,MAAM,CAAC;AAAA,IACvC;AAGA,QAAI,eAAe,SAAS,GAAG;AAC7B,YAAM,OAAO,eAAe,eAAe,SAAS,CAAC;AACrD,YAAM,gBAAgB,eAAe,MAAM,GAAG,EAAE,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK;AAEpE,YAAM,WAAW,SACd,MAAM,GAAG,KAAK,GAAG,EACjB,OAAO,CAAC,QAAQ,CAAC,QAAQ,GAAG,CAAC,EAC7B,IAAI,YAAY,EAChB,KAAK,GAAG;AAEX,UAAI,CAAC,SAAU,QAAOA,KAAI,MAAM,GAAG;AAEnC,YAAM,YAAY,SACf,MAAM,KAAK,MAAM,CAAC,EAClB,IAAI,YAAY,EAChB,KAAK,GAAG;AAEX,aAAOA,KAAI,KAAK,UAAU,KAAK,OAAO;AAAA,QACpC,eAAe,cAAc,SAAS,IAAI,gBAAgB;AAAA,QAC1D,OAAO,YAAY,YAAY;AAAA,MACjC,CAAC;AAAA,IACH;AAGA,QAAI,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI,CAAC,GAAG;AAC1C,YAAM,OAAO,SAAS,SAAS,SAAS,CAAC;AACzC,UAAI,KAAK,SAAS,IAAI,GAAG;AACvB,eAAOA,KAAI,KAAK,SAAS,IAAI,YAAY,EAAE,KAAK,GAAG,CAAC;AAAA,MACtD;AAAA,IACF;AAEA,WAAOA,KAAI,MAAM,GAAG;AAAA,EACtB;AAAA,EACA,MAAM,OAAiB,EAAE,MAAM,OAAO;AACxC;AAKO,IAAM,iBAAiB,CAAC,OAA2B,YACxD,sBAAO,IAAI,aAAa;AACtB,MAAI;AACJ,MAAI;AACF,gBAAY,kBAAkB,KAAY;AAAA,EAC5C,QAAQ;AACN;AAAA,EACF;AAEA,QAAM,UAAU,UAAU,OAAO;AAIjC,MAAI,CAAC,QAAS;AAEd,QAAM,WAAW,CAAC,QAAqD;AACrE,QAAI,IAAI,SAAS,OAAQ,QAAO,EAAE,MAAM,OAAO;AAC/C,QAAI,IAAI,SAAS,QAAS,QAAO,EAAE,MAAM,SAAS,MAAM,IAAI,KAAK;AACjE,QAAI,IAAI,SAAS,QAAQ;AACvB,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM,IAAI;AAAA,QACV,GAAI,IAAI,gBAAgB,EAAE,eAAe,IAAI,cAAc,IAAI,CAAC;AAAA,MAClE;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,GAAI,IAAI,gBAAgB,EAAE,eAAe,IAAI,cAAc,IAAI,CAAC;AAAA,MAChE,OAAO,IAAI;AAAA,IACb;AAAA,EACF;AAEA,QAAM,WAAqD;AAAA,IACzD,MAAM,QAAQ;AAAA,IACd,QAAQ,SAAS,QAAQ,MAAM;AAAA,EACjC;AAEA,QAAM,QAAQ,OAAO,wBAAS,IAAe,sBAAsB;AACnE,MAAI,OAAO;AACT,YAAQ,QAAQ;AAChB;AAAA,EACF;AAEA,SAAO,UAAU,IAAI;AAAA,IAAwB,EAAE,MAAM,SAAS,MAAM,kBAAkB,SAAS,QAAQ;AAAA,IAAG,MACxG,sBAAO,KAAK,MAAM;AAChB,cAAQ,QAAQ;AAAA,IAClB,CAAC;AAAA,EACH;AACF,CAAC;AAKI,IAAM,gBAAgB,CAAC,OAA2B,YACvD,sBAAO,IAAI,aAAa;AACtB,MAAI,QAAQ,SAAS,kBAAkB;AACrC,UAAM,cAAc,CAAC,QAAsC;AACzD,UAAI,IAAI,SAAS,OAAQ,QAAO;AAChC,UAAI,IAAI,SAAS,WAAW,IAAI,SAAS,OAAQ,QAAO,IAAI;AAC5D,YAAM,OAAO,GAAG,IAAI,IAAI;AACxB,aAAO,IAAI,QAAQ,GAAG,IAAI,IAAI,IAAI,KAAK,KAAK;AAAA,IAC9C;AAEA,UAAM,YAAY,YAAY,QAAQ,MAAM;AAC5C,QAAI,CAAC,WAAW;AACd;AAAA,IACF;AAEA,WAAO,MAAM,OAAO,OAAO,QAAQ,SAAS;AAC5C;AAAA,EACF;AAEA,MAAI,QAAQ,SAAS,oBAAoB;AACvC;AAAA,EACF;AAEA,QAAM,eAAe,OAAO,sBAAO,cAAwB,SAAS;AACpE,MAAI,sBAAO,OAAO,YAAY,GAAG;AAC/B;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AACF,UAAM,YAAY,kBAAkB,KAAY;AAChD,eAAW,UAAU;AACrB,iBAAa,UAAU;AAAA,EACzB,QAAQ;AACN,eAAW;AACX,iBAAa;AAAA,EACf;AAEA,SAAO,aAAa,MAAM,OAAO;AAAA,IAC/B,MAAM;AAAA,IACN,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,QAAQ;AAAA,EAChB,CAAC;AACH,CAAC;AAKI,IAAM,UAAU,CAAC,OAA2B,YACjD,sBAAO,IAAI,aAAa;AACtB,QAAM,QAAQ,MACZ,MAAM,MAAM,OAAO,CAAC,UAAU;AAC5B,UAAM,UAAU,CAAC,MAAuB,SAAuB;AAC7D,UAAI,CAAC,KAAM;AACX,MAAM,oBAAoB,OAAO,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,IACpD;AAEA,QAAI,QAAQ,SAAS,oBAAoB;AACvC,YAAM,SAAS,QAAQ;AACvB,UAAI,OAAO,SAAS,QAAS;AAC7B,cAAQ,UAAU,OAAO,IAAI;AAC7B,cAAQ,MAAM,OAAO,IAAI;AACzB;AAAA,IACF;AAEA,QAAI,QAAQ,SAAS,oBAAoB;AACvC,YAAM,SAAS,QAAQ;AACvB,UAAI,OAAO,SAAS,OAAQ;AAC5B,YAAM,OAAO,GAAG,OAAO,IAAI,IAAI,OAAO,KAAK;AAC3C,YAAM,OAAO,OAAO,QAAQ,GAAG,IAAI,IAAI,OAAO,KAAK,KAAK;AACxD,cAAQ,UAAU,IAAI;AACtB,cAAQ,MAAM,IAAI;AAClB;AAAA,IACF;AAEA,QAAI,QAAQ,SAAS,gBAAgB;AAGnC;AAAA,IACF;AAAA,EACF,CAAC;AAEH,QAAM,QAAQ,OAAO,wBAAS,IAAe,sBAAsB;AACnE,MAAI,OAAO;AACT,WAAO,OAAO,MAAM;AAAA,EACtB;AAEA,MAAI;AAOJ,MAAI;AACF,UAAM,YAAY,kBAAkB,KAAY;AAChD,iBAAa,CAAC,QAAQ,SAAS,UAAU,IAAI,wBAAwB,QAAe,IAAI;AAAA,EAC1F,QAAQ;AACN,iBAAa;AAAA,EACf;AAEA,MAAI,CAAC,YAAY;AACf,WAAO,OAAO,MAAM;AAAA,EACtB;AAEA,SAAO,OAAO,WAAW,EAAE,MAAM,SAAS,MAAM,WAAW,SAAS,QAAQ,GAAG,KAAK;AACtF,CAAC;AAOH,IAAM,gBAAgB,CAAC,SACrB,SAAS,YAAY,SAAS,QAAQ,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,KAAK;AAE3F,IAAM,gBAAgB,CAAC,SAAyB;AAC9C,MAAI,CAAC,KAAM,QAAO;AAClB,QAAM,WAAW,KAAK,MAAM,GAAG,EAAE,OAAO,OAAO;AAC/C,SAAO,SACJ,IAAI,CAAC,QAAS,WAAW,KAAK,GAAG,IAAI,OAAO,GAAI,EAChD,KAAK,GAAG,EACR,QAAQ,WAAW,IAAI;AAC5B;AAEA,IAAM,wBAAwB,CAAC,KAAa,YAA6B;AACvE,MAAI,CAAC,OAAO,CAAC,QAAS,QAAO;AAC7B,MAAI,QAAQ,QAAS,QAAO;AAC5B,MAAI,QAAQ,WAAW,GAAG,GAAG,GAAG,EAAG,QAAO;AAC1C,MAAI,QAAQ,WAAW,GAAG,GAAG,IAAI,EAAG,QAAO;AAE3C,MAAI,IAAI,WAAW,GAAG,OAAO,IAAI,EAAG,QAAO;AAC3C,SAAO;AACT;AAEO,IAAM,mBAAmB,CAAC,OAA2BC,YAAkC;AAC5F,QAAM,UAAU,uBAAuBA,OAAa;AAEpD,QAAM,UAAU,SAAS,SAAS,OAAO,CAAC,MAAM,KAAK,EAAE,SAAS,QAAQ,KAAK,CAAC;AAE9E,QAAM,gBAAgB,QAAQ;AAAA,IAAO,CAAC,MACpC,MAAM,QAAQ,GAAG,MAAM,QAAQ,IAAI,EAAE,KAAK,SAAS,SAAS,SAAS,IAAI;AAAA,EAC3E;AAEA,QAAM,oBAAoB,QAAQ;AAAA,IAAO,CAAC,MACxC,MAAM,QAAQ,GAAG,MAAM,QAAQ,IAAI,EAAE,KAAK,SAAS,SAAS,aAAa,IAAI;AAAA,EAC/E;AAEA,QAAM,QAAQ,sBAAO,KAAK,MAAM;AAC9B,QAAI,cAAc,WAAW,EAAG;AAChC,UAAM,UAAU;AAAA,MACd,sBAAO,QAAQ,eAAe,CAAC,UAAe,MAAM,OAAO,OAAO,QAAQ,MAAM,SAAS,GAAG;AAAA,QAC1F,aAAa;AAAA,MACf,CAAC,EAAE,KAAK,sBAAO,MAAM;AAAA,IACvB;AAAA,EACF,CAAC;AAED,QAAM,qBAAqB,CAAC,gBAC1B,sBAAO,IAAI,aAAa;AACtB,QAAI,CAAC,eAAe,cAAc,WAAW,EAAG;AAChD,QAAI,kBAAkB,WAAW,EAAG;AAEpC,UAAM,iBAAiB,cAAc,WAAW;AAEhD,WAAO,sBAAO;AAAA,MACZ;AAAA,MACA,CAAC,UAAe;AACd,cAAM,OAAQ,OAAO,MAAM,QAAQ,CAAC;AACpC,cAAM,WAAW,KAAK,KAAK,CAAC,QAAQ,sBAAsB,KAAK,cAAc,CAAC;AAC9E,YAAI,CAAC,SAAU,QAAO,sBAAO;AAC7B,eAAO,MAAM,OAAO,OAAO,QAAQ,MAAM,SAAS;AAAA,MACpD;AAAA,MACA,EAAE,aAAa,YAAY;AAAA,IAC7B;AAAA,EACF,CAAC,EAAE,KAAK,sBAAO,MAAM;AAEvB,SAAO,EAAE,OAAO,mBAAmB;AACrC;AASO,IAAM,UAAU,CAAC,WAAgE,sBAAO;;;ADjVxF,IAAMC,OAAeA;AAErB,IAAMC,kBAAiB,CAAC,OAA2B,YAC/C,eAAe,OAAc,OAAO;AAExC,IAAMC,iBAAgB,CAAC,OAA2B,YAC9C,cAAc,OAAc,OAAO;AAEvC,IAAMC,WAAU,CAAC,OAA2B,YACxC,QAAQ,OAAc,OAAO;AAEjC,IAAMC,oBAAmB,CAAC,OAA2BC,YACjD,iBAAiB,OAAcA,OAAM;AAEzC,IAAMC,WAAU,CAAC,UAAwE,QAAQ,KAAY;","names":["Ref","cleanup","install","makeSourceWiring","scopedExecute","scopedValidate","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","module","Ref","module","Ref","scopedValidate","scopedExecute","cleanup","makeSourceWiring","module","install"]}
|
|
1
|
+
{"version":3,"sources":["../src/TraitLifecycle.ts","../src/internal/trait-lifecycle/index.ts","../src/internal/runtime/core/TaskRunner.ts","../src/internal/runtime/core/DebugSink.record.ts","../src/internal/observability/jsonValue.ts","../src/internal/runtime/core/errorSummary.ts","../src/internal/runtime/core/EffectOpCore.ts","../src/internal/runtime/core/env.ts","../src/internal/runtime/core/HostScheduler.ts","../src/internal/runtime/core/RuntimeStore.ts","../src/internal/runtime/core/TickScheduler.ts","../src/internal/runtime/core/DevtoolsHub.ts","../src/internal/runtime/core/DeclarativeLinkRuntime.ts","../src/internal/runtime/core/ReplayLog.ts","../src/internal/state-trait/rowid.ts","../src/internal/runtime/core/runtimeInternalsAccessor.ts"],"sourcesContent":["// TraitLifecycle: shared lower-level interface for Form/Query (@logixjs/core/TraitLifecycle, Phase 2 placeholder).\n//\n// - Exposes serializable FieldRef and unified request protocols.\n// - Concrete implementation lives in internal/trait-lifecycle/*; this module provides public API + type exports.\n\nimport type { BoundApi } from './Bound.js'\nimport { Effect } from 'effect'\nimport * as Internal from './internal/trait-lifecycle/index.js'\n\nexport type FieldRef = Internal.FieldRef\nexport type ValidateMode = Internal.ValidateRequest['mode']\nexport type ValidateRequest = Internal.ValidateRequest\nexport type ExecuteRequest = Internal.ExecuteRequest\nexport type CleanupRequest = Internal.CleanupRequest\nexport type SourceWiring = ReturnType<typeof Internal.makeSourceWiring>\n\nexport const Ref = Internal.Ref\n\nexport const scopedValidate = (bound: BoundApi<any, any>, request: ValidateRequest): Effect.Effect<void, never, any> =>\n Internal.scopedValidate(bound as any, request)\n\nexport const scopedExecute = (bound: BoundApi<any, any>, request: ExecuteRequest): Effect.Effect<void, never, any> =>\n Internal.scopedExecute(bound as any, request)\n\nexport const cleanup = (bound: BoundApi<any, any>, request: CleanupRequest): Effect.Effect<void, never, any> =>\n Internal.cleanup(bound as any, request)\n\nexport const makeSourceWiring = (bound: BoundApi<any, any>, module: unknown): SourceWiring =>\n Internal.makeSourceWiring(bound as any, module)\n\nexport const install = (bound: BoundApi<any, any>): Effect.Effect<void, never, any> => Internal.install(bound as any)\n","import { Effect, FiberRef, Option } from 'effect'\nimport type { BoundApi } from '../runtime/core/module.js'\nimport * as TaskRunner from '../runtime/core/TaskRunner.js'\nimport * as ReplayLog from '../runtime/core/ReplayLog.js'\nimport type { CleanupRequest, ExecuteRequest, FieldRef, ValidateRequest } from './model.js'\nimport type * as StateTraitValidate from '../state-trait/validate.js'\nimport * as RowId from '../state-trait/rowid.js'\nimport { getBoundInternals, getModuleTraitsProgram } from '../runtime/core/runtimeInternalsAccessor.js'\n\nexport type { CleanupRequest, ExecuteRequest, FieldRef, ValidateRequest }\n\n/**\n * Ref: builders for FieldRef (serializable and comparable).\n *\n * Notes:\n * - Phase 2 provides only minimal constructors.\n * - Array optimizations (RowID/trackBy) and richer ref semantics will land in later phases.\n */\nexport const Ref = {\n field: (path: string): FieldRef => ({ kind: 'field', path }),\n list: (path: string, listIndexPath?: ReadonlyArray<number>): FieldRef => ({\n kind: 'list',\n path,\n listIndexPath,\n }),\n item: (\n path: string,\n index: number,\n options?: { readonly listIndexPath?: ReadonlyArray<number>; readonly field?: string },\n ): FieldRef => ({\n kind: 'item',\n path,\n index,\n listIndexPath: options?.listIndexPath,\n field: options?.field,\n }),\n /**\n * fromValuePath:\n * - Parse a valuePath (e.g. \"items.0.warehouseId\") into a FieldRef.\n * - Supports a single index (required) and multi-level indices via listIndexPath (optional but recommended).\n *\n * Conventions:\n * - If numeric segments are present, return an item ref (the last index becomes item.index; preceding indices go into listIndexPath).\n * - If no numeric segments are present:\n * - \"items[]\" (pattern list root) returns a list ref.\n * - Otherwise return a field ref (including pattern fields like \"items[].x\").\n * - \"$root\" / empty string returns the root ref.\n */\n fromValuePath: (valuePath: string): FieldRef => {\n const raw = typeof valuePath === 'string' ? valuePath.trim() : ''\n if (!raw || raw === '$root') return Ref.root()\n\n const segments = raw.split('.').filter(Boolean)\n if (segments.length === 0) return Ref.root()\n\n const isIndex = (seg: string): boolean => /^[0-9]+$/.test(seg)\n const stripPattern = (seg: string): string => (seg.endsWith('[]') ? seg.slice(0, -2) : seg)\n\n const indexPositions: Array<{ readonly pos: number; readonly index: number }> = []\n for (let i = 0; i < segments.length; i++) {\n const seg = segments[i]!\n if (!isIndex(seg)) continue\n const index = Number(seg)\n if (!Number.isFinite(index) || index < 0) return Ref.field(raw)\n indexPositions.push({ pos: i, index })\n }\n\n // valuePath with indices => item ref\n if (indexPositions.length > 0) {\n const last = indexPositions[indexPositions.length - 1]!\n const listIndexPath = indexPositions.slice(0, -1).map((p) => p.index)\n\n const listPath = segments\n .slice(0, last.pos)\n .filter((seg) => !isIndex(seg))\n .map(stripPattern)\n .join('.')\n\n if (!listPath) return Ref.field(raw)\n\n const fieldRest = segments\n .slice(last.pos + 1)\n .map(stripPattern)\n .join('.')\n\n return Ref.item(listPath, last.index, {\n listIndexPath: listIndexPath.length > 0 ? listIndexPath : undefined,\n field: fieldRest ? fieldRest : undefined,\n })\n }\n\n // pattern list root => list ref\n if (segments.some((s) => s.includes('[]'))) {\n const last = segments[segments.length - 1]!\n if (last.endsWith('[]')) {\n return Ref.list(segments.map(stripPattern).join('.'))\n }\n }\n\n return Ref.field(raw)\n },\n root: (): FieldRef => ({ kind: 'root' }),\n} as const\n\n/**\n * scopedValidate (placeholder): in later phases this will compute the minimal set via ReverseClosure and write back into the error tree.\n */\nexport const scopedValidate = (bound: BoundApi<any, any>, request: ValidateRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n let internals: ReturnType<typeof getBoundInternals> | undefined\n try {\n internals = getBoundInternals(bound as any)\n } catch {\n return\n }\n\n const enqueue = internals.traits.enqueueStateTraitValidateRequest as unknown as\n | ((r: StateTraitValidate.ScopedValidateRequest) => void)\n | undefined\n\n if (!enqueue) return\n\n const toTarget = (ref: FieldRef): StateTraitValidate.ValidateTarget => {\n if (ref.kind === 'root') return { kind: 'root' }\n if (ref.kind === 'field') return { kind: 'field', path: ref.path }\n if (ref.kind === 'list') {\n return {\n kind: 'list',\n path: ref.path,\n ...(ref.listIndexPath ? { listIndexPath: ref.listIndexPath } : {}),\n }\n }\n return {\n kind: 'item',\n path: ref.path,\n index: ref.index,\n ...(ref.listIndexPath ? { listIndexPath: ref.listIndexPath } : {}),\n field: ref.field,\n }\n }\n\n const internal: StateTraitValidate.ScopedValidateRequest = {\n mode: request.mode as any,\n target: toTarget(request.target),\n }\n\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n enqueue(internal)\n return\n }\n\n yield* internals.txn.runWithStateTransaction({ kind: 'trait', name: 'scopedValidate', details: request }, () =>\n Effect.sync(() => {\n enqueue(internal)\n }),\n )\n })\n\n/**\n * scopedExecute (placeholder): a unified execution entrypoint for query/resource actions (refresh/invalidate, etc.).\n */\nexport const scopedExecute = (bound: BoundApi<any, any>, request: ExecuteRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n if (request.kind === 'source:refresh') {\n const toFieldPath = (ref: FieldRef): string | undefined => {\n if (ref.kind === 'root') return undefined\n if (ref.kind === 'field' || ref.kind === 'list') return ref.path\n const base = `${ref.path}[]`\n return ref.field ? `${base}.${ref.field}` : base\n }\n\n const fieldPath = toFieldPath(request.target)\n if (!fieldPath) {\n return\n }\n\n yield* bound.traits.source.refresh(fieldPath) as Effect.Effect<void, never, any>\n return\n }\n\n if (request.kind !== 'query:invalidate') {\n return\n }\n\n const replayLogOpt = yield* Effect.serviceOption(ReplayLog.ReplayLog)\n if (Option.isNone(replayLogOpt)) {\n return\n }\n\n let moduleId: string | undefined\n let instanceId: string | undefined\n try {\n const internals = getBoundInternals(bound as any)\n moduleId = internals.moduleId\n instanceId = internals.instanceId\n } catch {\n moduleId = undefined\n instanceId = undefined\n }\n\n yield* replayLogOpt.value.record({\n _tag: 'InvalidateRequest',\n timestamp: Date.now(),\n moduleId,\n instanceId,\n kind: 'query',\n target: 'query',\n meta: request.request,\n })\n })\n\n/**\n * cleanup (placeholder): deterministic cleanup under structural changes (errors/ui/resources).\n */\nexport const cleanup = (bound: BoundApi<any, any>, request: CleanupRequest): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n const apply = () =>\n bound.state.mutate((draft) => {\n const clearAt = (root: 'errors' | 'ui', path: string): void => {\n if (!path) return\n RowId.unsetAtPathMutating(draft, `${root}.${path}`)\n }\n\n if (request.kind === 'field:unregister') {\n const target = request.target\n if (target.kind !== 'field') return\n clearAt('errors', target.path)\n clearAt('ui', target.path)\n return\n }\n\n if (request.kind === 'list:item:remove') {\n const target = request.target\n if (target.kind !== 'item') return\n const base = `${target.path}.${target.index}`\n const path = target.field ? `${base}.${target.field}` : base\n clearAt('errors', path)\n clearAt('ui', path)\n return\n }\n\n if (request.kind === 'list:reorder') {\n // Reorder does not change the external index semantics by itself.\n // Alignment of errors/ui should be handled by domain reducers or higher-level logic; keep it a no-op here.\n return\n }\n })\n\n const inTxn = yield* FiberRef.get(TaskRunner.inSyncTransactionFiber)\n if (inTxn) {\n return yield* apply()\n }\n\n let runWithTxn:\n | ((\n origin: { readonly kind: string; readonly name?: string; readonly details?: unknown },\n body: () => Effect.Effect<void, never, any>,\n ) => Effect.Effect<void, never, any>)\n | undefined\n\n try {\n const internals = getBoundInternals(bound as any)\n runWithTxn = (origin, body) => internals.txn.runWithStateTransaction(origin as any, body)\n } catch {\n runWithTxn = undefined\n }\n\n if (!runWithTxn) {\n return yield* apply()\n }\n\n return yield* runWithTxn({ kind: 'trait', name: 'cleanup', details: request }, apply)\n })\n\ntype SourceWiring = {\n readonly setup: Effect.Effect<void, never, any>\n readonly refreshOnKeyChange: (changedPath: string) => Effect.Effect<void, never, any>\n}\n\nconst isAuxRootPath = (path: string): boolean =>\n path === 'errors' || path === 'ui' || path.startsWith('errors.') || path.startsWith('ui.')\n\nconst toPatternPath = (path: string): string => {\n if (!path) return path\n const segments = path.split('.').filter(Boolean)\n return segments\n .map((seg) => (/^[0-9]+$/.test(seg) ? '[]' : seg))\n .join('.')\n .replace(/\\.\\[\\]/g, '[]')\n}\n\nconst isDepAffectedByChange = (dep: string, changed: string): boolean => {\n if (!dep || !changed) return false\n if (dep === changed) return true\n if (changed.startsWith(`${dep}.`)) return true\n if (changed.startsWith(`${dep}[]`)) return true\n // list root structural change (e.g. changed=\"items\") should affect any list-item deps (e.g. dep=\"items[].x\").\n if (dep.startsWith(`${changed}[]`)) return true\n return false\n}\n\nexport const makeSourceWiring = (bound: BoundApi<any, any>, module: unknown): SourceWiring => {\n const program = getModuleTraitsProgram(module as any) as { readonly entries?: ReadonlyArray<any> } | undefined\n\n const sources = program?.entries?.filter((e) => e && e.kind === 'source') ?? []\n\n const sourceOnMount = sources.filter((e: any) =>\n Array.isArray(e?.meta?.triggers) ? e.meta.triggers.includes('onMount') : false,\n )\n\n const sourceOnKeyChange = sources.filter((e: any) =>\n Array.isArray(e?.meta?.triggers) ? e.meta.triggers.includes('onKeyChange') : false,\n )\n\n const setup = Effect.sync(() => {\n if (sourceOnMount.length === 0) return\n bound.lifecycle.onStart(\n Effect.forEach(sourceOnMount, (entry: any) => bound.traits.source.refresh(entry.fieldPath), {\n concurrency: 'unbounded',\n }).pipe(Effect.asVoid),\n )\n })\n\n const refreshOnKeyChange = (changedPath: string): Effect.Effect<void, never, any> =>\n Effect.gen(function* () {\n if (!changedPath || isAuxRootPath(changedPath)) return\n if (sourceOnKeyChange.length === 0) return\n\n const changedPattern = toPatternPath(changedPath)\n\n yield* Effect.forEach(\n sourceOnKeyChange,\n (entry: any) => {\n const deps = (entry?.meta?.deps ?? []) as ReadonlyArray<string>\n const affected = deps.some((dep) => isDepAffectedByChange(dep, changedPattern))\n if (!affected) return Effect.void\n return bound.traits.source.refresh(entry.fieldPath)\n },\n { concurrency: 'unbounded' },\n )\n }).pipe(Effect.asVoid)\n\n return { setup, refreshOnKeyChange }\n}\n\n/**\n * install (placeholder): the default wiring entrypoint for TraitLifecycle.\n *\n * Notes:\n * - Phase 2 only provides an entrypoint that feature packages can depend on.\n * - Concrete wiring for \"domain event → request → in-transaction execution\" will be composed by Form/Query default logics in later phases.\n */\nexport const install = (_bound: BoundApi<any, any>): Effect.Effect<void, never, any> => Effect.void\n","import { Cause, Effect, Fiber, FiberRef, Ref, Stream } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport { isDevEnv } from './env.js'\nimport type * as Logic from './LogicMiddleware.js'\nimport type { AnyModuleShape, LogicEffect } from './module.js'\nimport type { RuntimeInternalsResolvedConcurrencyPolicy } from './RuntimeInternals.js'\nimport type { StateTxnOrigin } from './StateTransaction.js'\n\n/**\n * Prevents calling run*Task inside a \"synchronous transaction execution fiber\" (it would deadlock the txnQueue).\n *\n * - ModuleRuntime locally marks it as true while executing each transaction (dispatch/source-refresh/devtools/...).\n * - run*Task checks the flag on start: when true, it emits diagnostics only in dev/test and then no-ops.\n */\nexport const inSyncTransactionFiber = FiberRef.unsafeMake(false)\n\n/**\n * Force source.refresh:\n * - Default: when snapshot keyHash is unchanged and a non-idle snapshot already exists, refresh SHOULD be a no-op\n * (avoid redundant IO/writeback).\n * - Exception: explicit refresh (manual refresh) / invalidation-driven refresh needs to \"re-fetch even with the same keyHash\".\n *\n * Note: use a FiberRef to locally pass \"whether this refresh is forced\", avoiding expanding the source refresh handler signature.\n */\nexport const forceSourceRefresh = FiberRef.unsafeMake(false)\n\n/**\n * Synchronous transaction window (process-level) marker:\n * - Used as a hard guard in \"non-Effect API\" entry points (e.g. Promise/async functions).\n * - FiberRef cannot reliably read the \"current fiber\" in such entry points, so we need a synchronous callstack-level marker.\n *\n * Note: if a transaction body incorrectly crosses async boundaries, this marker will be held longer; that is a severe violation.\n */\nlet inSyncTransactionGlobalDepth = 0\n\nexport const enterSyncTransaction = (): void => {\n inSyncTransactionGlobalDepth += 1\n}\n\nexport const exitSyncTransaction = (): void => {\n inSyncTransactionGlobalDepth = Math.max(0, inSyncTransactionGlobalDepth - 1)\n}\n\nexport const isInSyncTransaction = (): boolean => inSyncTransactionGlobalDepth > 0\n\nexport type TaskRunnerMode =\n | 'task' // sequential\n | 'parallel'\n | 'latest'\n | 'exhaust'\n\nexport type TaskStatus = 'idle' | 'pending' | 'running' | 'success' | 'failure' | 'interrupted'\n\nexport interface TaskExecution {\n readonly taskId: number\n readonly status: TaskStatus\n readonly acceptedAt: number\n readonly startedAt?: number\n readonly endedAt?: number\n}\n\nexport interface TaskRunnerOrigins {\n readonly pending?: StateTxnOrigin\n readonly success?: StateTxnOrigin\n readonly failure?: StateTxnOrigin\n}\n\ntype TaskHandler<Payload, Sh extends AnyModuleShape, R> =\n | LogicEffect<Sh, R, void, never>\n | ((payload: Payload) => LogicEffect<Sh, R, void, never>)\n\ntype TaskEffect<Payload, Sh extends AnyModuleShape, R, A, E> =\n | LogicEffect<Sh, R, A, E>\n | ((payload: Payload) => LogicEffect<Sh, R, A, E>)\n\nexport interface TaskRunnerConfig<Payload, Sh extends AnyModuleShape, R, A = void, E = never> {\n /**\n * Optional: trigger source name (e.g. actionTag / fieldPath), used as the default pending origin.name.\n * - BoundApiRuntime may fill this in for onAction(\"xxx\") / traits.source.refresh(\"field\"), etc.\n * - Other callers are not required to provide it.\n */\n readonly triggerName?: string\n\n /**\n * pending: synchronous state writes (loading=true / clearing errors, etc.), always a separate transaction entry.\n * - Only executed for tasks that are accepted and actually started (ignored triggers in runExhaustTask do not run pending).\n */\n readonly pending?: TaskHandler<Payload, Sh, R>\n\n /**\n * effect: real IO / async work (must run outside the transaction window).\n */\n readonly effect: TaskEffect<Payload, Sh, R, A, E>\n\n /**\n * success: success writeback (separate transaction entry).\n */\n readonly success?: (result: A, payload: Payload) => LogicEffect<Sh, R, void, never>\n\n /**\n * failure: failure writeback (separate transaction entry).\n *\n * Note: takes a Cause to preserve defect/interrupt semantics; interrupts do not trigger failure writeback by default.\n */\n readonly failure?: (cause: Cause.Cause<E>, payload: Payload) => LogicEffect<Sh, R, void, never>\n\n /**\n * origin: optional override for the three transaction origins.\n * - Default: pending.kind=\"task:pending\"; success/failure.kind=\"service-callback\".\n */\n readonly origin?: TaskRunnerOrigins\n\n /**\n * priority: reserved for future debugging/sorting; does not change transaction boundaries or concurrency semantics.\n */\n readonly priority?: number\n}\n\nexport interface TaskRunnerRuntime {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runWithStateTransaction: (\n origin: StateTxnOrigin,\n body: () => Effect.Effect<void, never, any>,\n ) => Effect.Effect<void, never, any>\n readonly resolveConcurrencyPolicy?: () => Effect.Effect<RuntimeInternalsResolvedConcurrencyPolicy, never, any>\n}\n\nconst resolve = <Payload, Sh extends AnyModuleShape, R, A, E>(\n eff: TaskEffect<Payload, Sh, R, A, E> | TaskHandler<Payload, Sh, R>,\n payload: Payload,\n): any => (typeof eff === 'function' ? (eff as any)(payload) : eff)\n\nconst defaultOrigins = (triggerName: string | undefined): Required<TaskRunnerOrigins> => ({\n pending: {\n kind: 'task:pending',\n name: triggerName,\n },\n success: {\n kind: 'service-callback',\n name: 'task:success',\n },\n failure: {\n kind: 'service-callback',\n name: 'task:failure',\n },\n})\n\nexport const shouldNoopInSyncTransactionFiber = (options: {\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n}): Effect.Effect<boolean> =>\n Effect.gen(function* () {\n const inTxn = yield* FiberRef.get(inSyncTransactionFiber)\n if (!inTxn) {\n return false\n }\n // Always no-op regardless of env (otherwise we may deadlock); diagnostics are emitted only in dev/test.\n if (isDevEnv()) {\n yield* Debug.record({\n type: 'diagnostic',\n moduleId: options.moduleId,\n instanceId: options.instanceId,\n code: options.code,\n severity: options.severity,\n message: options.message,\n hint: options.hint,\n actionTag: options.actionTag,\n kind: options.kind,\n })\n }\n return true\n })\n\nconst resolveConcurrencyLimit = (runtime: TaskRunnerRuntime): Effect.Effect<number | 'unbounded', never, any> =>\n runtime.resolveConcurrencyPolicy\n ? runtime.resolveConcurrencyPolicy().pipe(Effect.map((p) => p.concurrencyLimit))\n : Effect.succeed(16)\n\nconst runTaskLifecycle = <Payload, Sh extends AnyModuleShape, R, A, E>(\n payload: Payload,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n getCanWriteBack?: Effect.Effect<boolean>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> =>\n Effect.gen(function* () {\n const noop = yield* shouldNoopInSyncTransactionFiber({\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'logic::invalid_usage',\n severity: 'error',\n message: 'run*Task is not allowed inside a synchronous StateTransaction body (it may deadlock the txnQueue).',\n hint:\n 'Call run*Task from the run section of a watcher (e.g. $.onAction/$.onState/$.on); ' +\n 'do not call it directly inside a reducer / trait.run / synchronous transaction body. For long-lived flows, use a multi-entry pattern (pending → IO → writeback).',\n kind: 'run_task_in_transaction',\n })\n if (noop) {\n return\n }\n\n const defaults = defaultOrigins(config.triggerName)\n const origins: Required<TaskRunnerOrigins> = {\n pending: config.origin?.pending ?? defaults.pending,\n success: config.origin?.success ?? defaults.success,\n failure: config.origin?.failure ?? defaults.failure,\n }\n\n // 1) pending: separate transaction entry; once started it should not be interrupted by runLatest.\n const pending = config.pending\n if (pending) {\n yield* Effect.uninterruptible(\n runtime.runWithStateTransaction(origins.pending, () => Effect.asVoid(resolve(pending, payload))),\n )\n }\n\n // 2) IO: runs outside the transaction window.\n const io = resolve(config.effect, payload) as Effect.Effect<A, E, Logic.Env<Sh, R>>\n const exit = yield* Effect.exit(io)\n\n // 3) writeback: use the guard to confirm it's still the current task (runLatestTask).\n if (getCanWriteBack) {\n const ok = yield* getCanWriteBack\n if (!ok) {\n return\n }\n }\n\n if (exit._tag === 'Success') {\n const success = config.success\n if (success) {\n yield* runtime.runWithStateTransaction(origins.success, () => Effect.asVoid(success(exit.value, payload)))\n }\n return\n }\n\n // Failure: interruptions do not trigger failure writeback (e.g. runLatestTask cancellation, Scope ending).\n const cause = exit.cause as Cause.Cause<E>\n if (Cause.isInterrupted(cause)) {\n return\n }\n\n const failure = config.failure\n if (failure) {\n yield* runtime.runWithStateTransaction(origins.failure, () => Effect.asVoid(failure(cause, payload)))\n }\n }).pipe(\n // Watchers must not crash as a whole due to a single task failure: swallow errors, but keep them diagnosable.\n Effect.catchAllCause((cause) =>\n Debug.record({\n type: 'diagnostic',\n moduleId: runtime.moduleId,\n instanceId: runtime.instanceId,\n code: 'task_runner::unhandled_failure',\n severity: 'error',\n message: 'TaskRunner encountered an unhandled failure (pending/IO/writeback).',\n hint: 'Add a failure writeback for this task or handle errors explicitly upstream; avoid fire-and-forget swallowing errors.',\n actionTag: config.triggerName,\n kind: 'task_runner_unhandled_failure',\n trigger: {\n kind: 'task',\n name: config.triggerName,\n },\n }).pipe(Effect.zipRight(Effect.logError('TaskRunner error', cause))),\n ),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n\n/**\n * makeTaskRunner:\n * - Reuses FlowRuntime concurrency semantics (sequential/parallel/latest/exhaust).\n * - Splits a single trigger into: pending (separate txn) → IO → success/failure (separate txn).\n */\nexport const makeTaskRunner = <Payload, Sh extends AnyModuleShape, R, A = void, E = never>(\n stream: Stream.Stream<Payload>,\n mode: TaskRunnerMode,\n runtime: TaskRunnerRuntime,\n config: TaskRunnerConfig<Payload, Sh, R, A, E>,\n): Effect.Effect<void, never, Logic.Env<Sh, R>> => {\n if (mode === 'latest') {\n return Effect.gen(function* () {\n const taskIdRef = yield* Ref.make(0)\n const currentFiberRef = yield* Ref.make<Fiber.RuntimeFiber<void, never> | undefined>(undefined)\n\n const start = (payload: Payload) =>\n Effect.gen(function* () {\n const taskId = yield* Ref.updateAndGet(taskIdRef, (n) => n + 1)\n\n const prev = yield* Ref.get(currentFiberRef)\n if (prev) {\n // Do not wait for the old fiber to fully end (avoid blocking new triggers); writeback is guarded by taskId.\n yield* Fiber.interruptFork(prev)\n }\n\n const canWriteBack = Ref.get(taskIdRef).pipe(Effect.map((current) => current === taskId))\n\n const fiber = yield* Effect.fork(\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config, canWriteBack),\n )\n\n yield* Ref.set(currentFiberRef, fiber)\n })\n\n return yield* Stream.runForEach(stream, start)\n })\n }\n\n if (mode === 'exhaust') {\n return Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit(runtime)\n const busyRef = yield* Ref.make(false)\n\n const mapper = (payload: Payload) =>\n Effect.gen(function* () {\n const acquired = yield* Ref.modify(busyRef, (busy) =>\n busy ? ([false, busy] as const) : ([true, true] as const),\n )\n if (!acquired) {\n // Ignore trigger: no pending transaction is produced.\n return\n }\n try {\n yield* runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config)\n } finally {\n yield* Ref.set(busyRef, false)\n }\n })\n\n return yield* Stream.runDrain(stream.pipe(Stream.mapEffect(mapper, { concurrency })))\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n }\n\n if (mode === 'parallel') {\n return Effect.gen(function* () {\n const concurrency = yield* resolveConcurrencyLimit(runtime)\n\n return yield* Stream.runDrain(\n stream.pipe(\n Stream.mapEffect((payload) => runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config), {\n concurrency,\n }),\n ),\n )\n }) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n }\n\n // mode === \"task\"(sequential)\n return Stream.runForEach(stream, (payload) =>\n runTaskLifecycle<Payload, Sh, R, A, E>(payload, runtime, config),\n ) as Effect.Effect<void, never, Logic.Env<Sh, R>>\n}\n","import { Cause, Effect, FiberRef, Layer, Logger } from 'effect'\nimport {\n projectJsonValue,\n type DowngradeReason as JsonDowngradeReason,\n type JsonValue,\n type JsonValueProjectionStats,\n} from '../../observability/jsonValue.js'\nimport type * as ReplayLog from './ReplayLog.js'\nimport {\n toSerializableErrorSummary,\n type DowngradeReason as ErrorDowngradeReason,\n type SerializableErrorSummary,\n} from './errorSummary.js'\nimport * as EffectOpCore from './EffectOpCore.js'\nimport type * as ProcessProtocol from './process/protocol.js'\nimport type { ConvergeStaticIrExport } from '../../state-trait/converge-ir.js'\n\nexport interface TriggerRef {\n readonly kind: string\n readonly name?: string\n readonly details?: unknown\n}\n\ntype TraceEventType = `trace:${string}`\ntype GenericTraceEventType = Exclude<\n TraceEventType,\n 'trace:trait:converge' | 'trace:trait:check' | 'trace:trait:validate'\n>\n\n/**\n * ReplayEventRef:\n * - Replay event structure referenced from Debug events.\n * - Based on ReplayLog.Event, enriched with txn/trigger association fields for Devtools aggregation and explanation.\n */\nexport type ReplayEventRef = ReplayLog.ReplayLogEvent & {\n readonly txnId?: string\n readonly trigger?: TriggerRef\n}\n\nexport type Event =\n | {\n readonly type: 'module:init'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'module:destroy'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:phase'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly phase: 'init' | 'run' | 'destroy' | 'platform'\n readonly name: string\n readonly payload?: unknown\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'action:dispatch'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly action: unknown\n readonly actionTag?: string\n readonly unknownAction?: boolean\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'state:update'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly state: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n /**\n * Optional: Static IR digest aligned with FieldPathId/StepId (for consumer-side reverse-mapping & alignment).\n * - When missing or mismatched, consumers must not attempt to reverse-map rootIds -> rootPaths (avoid wrong UI).\n * - Allowed to be omitted on near-zero-cost diagnostics=off paths.\n */\n readonly staticIrDigest?: string\n /**\n * Optional: the affected scope aggregated by this commit (field-level dirty-set).\n * - Populated by Runtime at commit time;\n * - Must stay slim and serializable;\n * - Devtools can use it to explain \"why converge/validate ran / why it degraded to full\".\n */\n readonly dirtySet?: unknown\n /**\n * Optional: patch count aggregated by this commit (from StateTransaction).\n * - Populated by Runtime only on transaction paths.\n * - Devtools can use it as a lightweight transaction summary metric.\n */\n readonly patchCount?: number\n /**\n * Optional: whether patch records were truncated (bounded) under full instrumentation.\n */\n readonly patchesTruncated?: boolean\n /**\n * Optional: truncation reason code (stable enum).\n */\n readonly patchesTruncatedReason?: 'max_patches'\n /**\n * Optional: commit mode (normal/batched/low-priority, etc).\n * - Populated by Runtime;\n * - Default is chosen by the caller (typically \"normal\").\n */\n readonly commitMode?: string\n /**\n * Optional: external visibility priority (normal/low).\n * - Populated by Runtime.\n * - Mainly used by React external subscription scheduling (avoid unnecessary renders).\n */\n readonly priority?: string\n /**\n * Optional: transaction origin kind (origin.kind) that triggered this state commit:\n * - e.g. \"action\" / \"source-refresh\" / \"service-callback\" / \"devtools\".\n * - Populated by Runtime only on StateTransaction-based paths.\n * - Devtools can distinguish app transactions vs devtools time-travel operations.\n */\n readonly originKind?: string\n /**\n * Optional: transaction origin name (origin.name) that triggered this state commit:\n * - e.g. action dispatch / fieldPath / task:success/task:failure, etc.\n * - Populated by Runtime only on StateTransaction-based paths.\n */\n readonly originName?: string\n /**\n * Reserved: Trait converge summary (for Devtools window-level stats / TopN costs / degrade reasons, etc.).\n * - Phase 2: field slot only; structure is not fixed.\n * - Later phases will align with the Trait/Replay event model into an explainable structure.\n */\n readonly traitSummary?: unknown\n /**\n * Reserved: replay event associated with this transaction (re-emit source of truth from ReplayLog).\n * - Phase 2: field slot only.\n * - Later phases will align with ReplayLog.Event structure.\n */\n readonly replayEvent?: ReplayEventRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly identity: ProcessProtocol.ProcessInstanceIdentity\n readonly severity: 'info' | 'warning' | 'error'\n readonly eventSeq: number\n readonly timestampMs: number\n readonly trigger?: ProcessProtocol.ProcessTrigger\n readonly dispatch?: {\n readonly moduleId: string\n readonly instanceId: string\n readonly actionId: string\n }\n readonly error?: ProcessProtocol.SerializableErrorSummary\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'lifecycle:error'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly cause: unknown\n readonly phase?: 'init' | 'run' | 'destroy' | 'platform'\n readonly hook?: 'initRequired' | 'start' | 'destroy' | 'suspend' | 'resume' | 'reset' | 'unknown'\n readonly taskId?: string\n readonly opSeq?: number\n readonly origin?: string\n readonly txnSeq?: number\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'diagnostic'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly code: string\n readonly severity: 'error' | 'warning' | 'info'\n readonly message: string\n readonly hint?: string\n readonly actionTag?: string\n readonly kind?: string\n readonly txnSeq?: number\n readonly txnId?: string\n readonly trigger?: TriggerRef\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'warn:priority-inversion'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly tickSeq: number\n readonly reason: 'deferredBacklog' | 'subscribedNonUrgent'\n readonly selectorId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'warn:microtask-starvation'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly tickSeq: number\n readonly microtaskChainDepth?: number\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n /**\n * trace:* events:\n * - Extension hook for runtime tracing / Playground / Alignment Lab.\n * - Only the type prefix and moduleId are standardized; payload shape is defined by higher layers (e.g. spanId/attributes in data).\n */\n | {\n readonly type: 'trace:trait:converge'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:check'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: 'trace:trait:validate'\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data: JsonValue\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n | {\n readonly type: GenericTraceEventType\n readonly moduleId?: string\n readonly instanceId?: string\n readonly data?: unknown\n readonly txnSeq?: number\n readonly txnId?: string\n readonly runtimeLabel?: string\n readonly timestamp?: number\n }\n\nexport interface Sink {\n readonly record: (event: Event) => Effect.Effect<void>\n}\nexport const currentDebugSinks = FiberRef.unsafeMake<ReadonlyArray<Sink>>([])\nexport const currentRuntimeLabel = FiberRef.unsafeMake<string | undefined>(undefined)\nexport const currentTxnId = FiberRef.unsafeMake<string | undefined>(undefined)\nexport const currentOpSeq = FiberRef.unsafeMake<number | undefined>(undefined)\nexport type DiagnosticsLevel = 'off' | 'light' | 'sampled' | 'full'\nexport const currentDiagnosticsLevel = FiberRef.unsafeMake<DiagnosticsLevel>('off')\n\nexport const diagnosticsLevel = (level: DiagnosticsLevel): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentDiagnosticsLevel as any, () => level) as Layer.Layer<any, never, never>\n\nexport interface TraitConvergeDiagnosticsSamplingConfig {\n /**\n * Sample once every N txns (deterministic, based on stable txnSeq).\n * - 1: sample every txn (timing granularity similar to full, while keeping payload slim)\n */\n readonly sampleEveryN: number\n /**\n * Max number of TopK hotspots to output (recommended ≤ 3).\n */\n readonly topK: number\n}\n\nexport const currentTraitConvergeDiagnosticsSampling = FiberRef.unsafeMake<TraitConvergeDiagnosticsSamplingConfig>({\n sampleEveryN: 32,\n topK: 3,\n})\n\nexport const traitConvergeDiagnosticsSampling = (\n config: TraitConvergeDiagnosticsSamplingConfig,\n): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentTraitConvergeDiagnosticsSampling as any, () => config) as Layer.Layer<\n any,\n never,\n never\n >\n\nexport const appendSinks = (sinks: ReadonlyArray<Sink>): Layer.Layer<any, never, never> =>\n Layer.fiberRefLocallyScopedWith(currentDebugSinks, (current) => [...current, ...sinks]) as Layer.Layer<\n any,\n never,\n never\n >\n\nexport type RuntimeDebugEventKind =\n | 'action'\n | 'state'\n | 'service'\n | 'process'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'lifecycle'\n | 'react-render'\n | 'devtools'\n | 'diagnostic'\n | (string & {})\n\nexport interface RuntimeDebugEventRef {\n readonly eventId: string\n readonly eventSeq: number\n readonly moduleId: string\n readonly instanceId: string\n readonly runtimeLabel?: string\n readonly txnSeq: number\n readonly txnId?: string\n /**\n * linkId:\n * - Current operation chain id (shared by boundary ops in the same chain).\n * - Created by Runtime at the boundary root and propagated via FiberRef across nested/cross-module chains.\n */\n readonly linkId?: string\n readonly timestamp: number\n readonly kind: RuntimeDebugEventKind\n readonly label: string\n readonly meta?: JsonValue\n readonly errorSummary?: SerializableErrorSummary\n readonly downgrade?: {\n readonly reason?: 'non_serializable' | 'oversized' | 'unknown'\n }\n}\n\nexport type TxnLaneEvidenceReason =\n | 'disabled'\n | 'forced_off'\n | 'forced_sync'\n | 'queued_non_urgent'\n | 'preempted_by_urgent'\n | 'budget_yield'\n | 'coalesced'\n | 'canceled'\n | 'max_lag_forced'\n | 'starvation_protection'\n\nexport type TxnLaneNonUrgentYieldReason = 'none' | 'input_pending' | 'budget_exceeded' | 'forced_frame_yield'\n\nexport type TxnLaneEvidence = {\n readonly anchor: {\n readonly moduleId: string\n readonly instanceId: string\n readonly txnSeq: number\n readonly opSeq?: number\n }\n readonly lane: 'urgent' | 'nonUrgent'\n readonly kind: string\n readonly policy: {\n readonly enabled: boolean\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n readonly configScope: 'provider' | 'runtime_module' | 'runtime_default' | 'builtin'\n readonly budgetMs: number\n readonly debounceMs: number\n readonly maxLagMs: number\n readonly allowCoalesce: boolean\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n readonly queueMode?: 'fifo' | 'lanes'\n }\n readonly backlog: {\n readonly pendingCount: number\n readonly ageMs?: number\n readonly coalescedCount?: number\n readonly canceledCount?: number\n }\n readonly budget?: {\n readonly budgetMs?: number\n readonly sliceDurationMs?: number\n readonly yieldCount?: number\n readonly yielded?: boolean\n readonly yieldReason?: TxnLaneNonUrgentYieldReason\n }\n readonly starvation?: {\n readonly triggered?: boolean\n readonly reason?: string\n }\n readonly reasons: ReadonlyArray<TxnLaneEvidenceReason>\n}\n\nlet nextGlobalEventSeq = 0\n\nexport const clearRuntimeDebugEventSeq = (): void => {\n nextGlobalEventSeq = 0\n}\n\nconst nextEventSeq = (): number => {\n nextGlobalEventSeq += 1\n return nextGlobalEventSeq\n}\n\nconst makeEventId = (instanceId: string, eventSeq: number): string => `${instanceId}::e${eventSeq}`\n\ntype DowngradeReason = JsonDowngradeReason | ErrorDowngradeReason\n\nconst mergeDowngrade = (\n current: DowngradeReason | undefined,\n next: DowngradeReason | undefined,\n): DowngradeReason | undefined => {\n if (!current) return next\n if (!next) return current\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\n// In browsers, to reduce duplicated noise caused by React StrictMode, etc.,\n// de-duplicate lifecycle:error and diagnostic events: print the same moduleId+payload only once.\nconst browserLifecycleSeen = new Set<string>()\nconst browserDiagnosticSeen = new Set<string>()\n\n// Align trace:react-render events with the most recent state:update txn (UI-only association).\nconst lastTxnByInstance = new Map<string, { readonly txnId: string; readonly txnSeq: number }>()\n\n// trace:react-render / trace:react-selector may enter the sink before state:update (reordering due to concurrency/scheduling).\n// To provide usable txn anchors in Devtools/UI, we allow a one-time backfill for refs missing txn fields.\nconst pendingTxnAlignmentByInstance = new Map<string, Array<RuntimeDebugEventRef>>()\n\nconst enqueuePendingTxnAlignment = (instanceId: string, ref: RuntimeDebugEventRef): void => {\n const list = pendingTxnAlignmentByInstance.get(instanceId)\n if (!list) {\n pendingTxnAlignmentByInstance.set(instanceId, [ref])\n return\n }\n list.push(ref)\n if (list.length > 64) {\n list.shift()\n }\n}\n\nconst backfillPendingTxnAlignment = (\n instanceId: string,\n txn: { readonly txnId: string; readonly txnSeq: number },\n): void => {\n const pending = pendingTxnAlignmentByInstance.get(instanceId)\n if (!pending || pending.length === 0) {\n pendingTxnAlignmentByInstance.delete(instanceId)\n return\n }\n\n for (const ref of pending) {\n const anyRef: any = ref as any\n if (anyRef.txnId == null) {\n anyRef.txnId = txn.txnId\n }\n if (typeof anyRef.txnSeq !== 'number' || anyRef.txnSeq <= 0) {\n anyRef.txnSeq = txn.txnSeq\n }\n }\n\n pendingTxnAlignmentByInstance.delete(instanceId)\n}\n\nconst lifecycleErrorLog = (event: Extract<Event, { readonly type: 'lifecycle:error' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>, {\n renderErrorCause: true,\n })\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const message = `[Logix][module=${moduleId}] lifecycle:error\\n${causePretty}`\n\n return Effect.logError(message).pipe(\n Effect.annotateLogs({\n 'logix.moduleId': moduleId,\n 'logix.event': 'lifecycle:error',\n 'logix.cause': causePretty,\n }),\n )\n}\n\nconst diagnosticLog = (event: Extract<Event, { readonly type: 'diagnostic' }>) => {\n const moduleId = event.moduleId ?? 'unknown'\n const header = `[Logix][module=${moduleId}] diagnostic(${event.severity})`\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n const msg = `${header}\\n${detail}`\n\n const base =\n event.severity === 'warning'\n ? Effect.logWarning(msg)\n : event.severity === 'info'\n ? Effect.logInfo(msg)\n : Effect.logError(msg)\n\n const annotations: Record<string, unknown> = {\n 'logix.moduleId': moduleId,\n 'logix.event': `diagnostic(${event.severity})`,\n 'logix.diagnostic.code': event.code,\n 'logix.diagnostic.message': event.message,\n }\n if (event.hint) {\n annotations['logix.diagnostic.hint'] = event.hint\n }\n if (event.actionTag) {\n annotations['logix.diagnostic.actionTag'] = event.actionTag\n }\n\n return base.pipe(Effect.annotateLogs(annotations))\n}\n\n/**\n * Default Layer composition based on FiberRef.currentDebugSinks:\n * - Uses Layer.locallyScoped to inject Debug sinks via FiberRef state.\n * - Avoids misusing FiberRef as a Context.Tag.\n */\nexport const noopLayer = Layer.locallyScoped(currentDebugSinks, [])\n\n/**\n * errorOnlyLayer:\n * - Default DebugSink implementation that only cares about lifecycle:error events.\n * - Suitable as a \"minimum observability\" layer so fatal errors don't silently disappear.\n * - Other events (module:init/destroy, action:dispatch, state:update) are not recorded by default.\n */\nconst errorOnlySink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void,\n}\n\nexport const errorOnlyLayer = Layer.locallyScoped(currentDebugSinks, [errorOnlySink])\n\nexport const isErrorOnlyOnlySinks = (sinks: ReadonlyArray<Sink>): boolean => sinks.length === 1 && sinks[0] === errorOnlySink\n\n/**\n * consoleLayer:\n * - Full debug layer that logs all Debug events via Effect logs (logfmt / structured).\n * - Suitable as an observability layer for general environments (Node / tests).\n */\nconst consoleSink: Sink = {\n record: (event: Event) =>\n event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event }),\n}\n\nexport const consoleLayer = Layer.locallyScoped(currentDebugSinks, [consoleSink])\n\nconst isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined'\n\n// Shared browser console rendering logic used by the default DebugSink and browserConsoleLayer.\nconst renderBrowserConsoleEvent = (event: Event): Effect.Effect<void> => {\n // trace:* events: shown as separate groups in browsers for Playground / DevTools observation.\n if (typeof (event as any).type === 'string' && (event as any).type.startsWith('trace:')) {\n const moduleId = (event as any).moduleId ?? 'unknown'\n const type = (event as any).type\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c trace %c' + moduleId + '%c ' + String(type),\n 'color:#6b7280;font-weight:bold', // tag\n 'color:#3b82f6', // label\n 'color:#9ca3af', // module id\n 'color:#6b7280', // type\n )\n // eslint-disable-next-line no-console\n console.log(event)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'lifecycle:error') {\n const moduleId = event.moduleId ?? 'unknown'\n const causePretty = (() => {\n try {\n return Cause.pretty(event.cause as Cause.Cause<unknown>, { renderErrorCause: true })\n } catch {\n try {\n return JSON.stringify(event.cause, null, 2)\n } catch {\n return String(event.cause)\n }\n }\n })()\n\n const key = `${moduleId}|${causePretty}`\n if (browserLifecycleSeen.has(key)) {\n return Effect.void\n }\n browserLifecycleSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c lifecycle:error %c' + moduleId,\n 'color:#ef4444;font-weight:bold', // tag\n 'color:#ef4444', // label\n 'color:#9ca3af', // module id\n )\n // eslint-disable-next-line no-console\n console.error(causePretty)\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n if (event.type === 'diagnostic') {\n const moduleId = event.moduleId ?? 'unknown'\n const detail = `code=${event.code} message=${event.message}${\n event.actionTag ? ` action=${event.actionTag}` : ''\n }${event.hint ? `\\nhint: ${event.hint}` : ''}`\n\n const color =\n event.severity === 'warning' ? 'color:#d97706' : event.severity === 'info' ? 'color:#3b82f6' : 'color:#ef4444'\n\n const label =\n event.severity === 'warning'\n ? 'diagnostic(warning)'\n : event.severity === 'info'\n ? 'diagnostic(info)'\n : 'diagnostic(error)'\n\n const key = `${moduleId}|${event.code}|${event.message}`\n if (browserDiagnosticSeen.has(key)) {\n return Effect.void\n }\n browserDiagnosticSeen.add(key)\n\n return Effect.sync(() => {\n // eslint-disable-next-line no-console\n console.groupCollapsed(\n '%c[Logix]%c ' + label + '%c module=' + moduleId,\n 'color:#6b7280;font-weight:bold',\n color,\n 'color:#9ca3af',\n )\n if (event.severity === 'warning') {\n // eslint-disable-next-line no-console\n console.warn(detail)\n } else if (event.severity === 'info') {\n // eslint-disable-next-line no-console\n console.info(detail)\n } else {\n // eslint-disable-next-line no-console\n console.error(detail)\n }\n // eslint-disable-next-line no-console\n console.groupEnd()\n })\n }\n\n // Other events are not printed to the browser console by default to avoid being too noisy during development.\n // For internal debug events, use a custom Debug sink or use consoleLayer in Node.\n return Effect.void\n}\n\n/**\n * Browser console debug layer:\n * - In browsers, uses console.groupCollapsed + colored labels to simulate pretty logger grouping.\n * - In non-browser environments, falls back to consoleLayer's Effect logging implementation.\n */\nconst browserConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n // Non-browser: fall back to consoleLayer behavior (Effect.log*).\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic'\n ? diagnosticLog(event)\n : Effect.logDebug({ debugEvent: event })\n }\n\n return renderBrowserConsoleEvent(event)\n },\n}\n\nexport const browserConsoleLayer = Layer.locallyScoped(currentDebugSinks, [browserConsoleSink])\n\n/**\n * Browser diagnostic-only debug layer:\n * - In browsers, prints only lifecycle:error + diagnostic(warning/error) via console.groupCollapsed.\n * - Drops trace:* and other high-frequency events from the browser console (use DevtoolsHub instead).\n * - In non-browser environments, behaves like errorOnlySink (Effect.log*).\n */\nconst browserDiagnosticConsoleSink: Sink = {\n record: (event: Event) => {\n if (!isBrowser) {\n return event.type === 'lifecycle:error'\n ? lifecycleErrorLog(event)\n : event.type === 'diagnostic' && event.severity !== 'info'\n ? diagnosticLog(event)\n : Effect.void\n }\n\n return event.type === 'lifecycle:error' || (event.type === 'diagnostic' && event.severity !== 'info')\n ? renderBrowserConsoleEvent(event)\n : Effect.void\n },\n}\n\nexport const browserDiagnosticConsoleLayer = Layer.locallyScoped(currentDebugSinks, [browserDiagnosticConsoleSink])\n\n/**\n * Browser-friendly Logger layer: replaces the default logger with Effect's pretty logger (browser mode).\n * - Avoids hand-written console styles; reuses Effect's colored/grouped formatting.\n * - Safely degrades to the default logger in server environments.\n */\nexport const browserPrettyLoggerLayer = Logger.replace(\n Logger.defaultLogger,\n Logger.prettyLogger({ mode: 'browser', colors: true }),\n)\n\n/**\n * defaultLayer:\n * - Public default layer; currently equivalent to errorOnlyLayer.\n * - Records lifecycle:error only, avoiding a large volume of action/state logs by default.\n */\nexport const defaultLayer = errorOnlyLayer\n\nexport const record = (event: Event) =>\n Effect.gen(function* () {\n const sinks = yield* FiberRef.get(currentDebugSinks)\n\n // Fast path: production default installs errorOnlyLayer (sinks=1).\n // Avoid paying diagnostics FiberRef + enrichment costs for high-frequency events that are always dropped by errorOnly.\n if (isErrorOnlyOnlySinks(sinks)) {\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n if (event.severity !== 'info') {\n yield* diagnosticLog(event)\n } else {\n yield* Effect.void\n }\n return\n }\n yield* Effect.void\n return\n }\n\n // Fast path: when no sinks are installed, only a small subset of events are ever surfaced.\n // Avoid paying per-event FiberRef + enrichment costs for high-frequency events like state:update.\n if (sinks.length === 0) {\n if (isBrowser) {\n if (event.type === 'lifecycle:error' || event.type === 'diagnostic') {\n yield* renderBrowserConsoleEvent(event)\n return\n }\n yield* Effect.void\n return\n }\n\n if (event.type === 'lifecycle:error') {\n yield* lifecycleErrorLog(event)\n return\n }\n if (event.type === 'diagnostic') {\n yield* diagnosticLog(event)\n return\n }\n yield* Effect.void\n return\n }\n\n const enriched = event as Event\n\n const diagnosticsLevel = yield* FiberRef.get(currentDiagnosticsLevel)\n\n // Enrich Debug.Event with basic fields (enabled only when diagnosticsLevel!=off):\n // - timestamp: for Devtools/Timeline/Overview time aggregation; avoids UI-side \"first observed time\" distortion.\n // - runtimeLabel: from FiberRef for grouping by runtime (injected only when not already provided by the event).\n let now: number | undefined\n const getNow = (): number => {\n if (now === undefined) now = Date.now()\n return now\n }\n\n // diagnostics=off: keep near-zero cost; do not add timestamp for high-frequency events (avoid extra Date.now()).\n // Low-frequency events (lifecycle:error/diagnostic) may still get timestamp for easier debugging.\n if (\n enriched.timestamp === undefined &&\n (diagnosticsLevel !== 'off' || enriched.type === 'lifecycle:error' || enriched.type === 'diagnostic')\n ) {\n ;(enriched as any).timestamp = getNow()\n }\n if (diagnosticsLevel !== 'off' && enriched.runtimeLabel === undefined) {\n const runtimeLabel = yield* FiberRef.get(currentRuntimeLabel)\n if (runtimeLabel) {\n ;(enriched as any).runtimeLabel = runtimeLabel\n }\n }\n\n if (enriched.type === 'diagnostic' && (enriched as any).txnId === undefined) {\n const txnId = yield* FiberRef.get(currentTxnId)\n if (txnId) {\n ;(enriched as any).txnId = txnId\n }\n }\n // linkId is meaningful only for EffectOp events: avoid extra FiberRef reads on high-frequency events (state:update, etc.).\n if (\n diagnosticsLevel !== 'off' &&\n (enriched as any).type === 'trace:effectop' &&\n (enriched as any).linkId === undefined\n ) {\n const linkId = yield* FiberRef.get(EffectOpCore.currentLinkId)\n if (linkId) {\n ;(enriched as any).linkId = linkId\n }\n }\n\n if (sinks.length === 1) {\n yield* sinks[0]!.record(enriched)\n return\n }\n\n yield* Effect.forEach(sinks, (sink) => sink.record(enriched), { discard: true })\n })\n\n/**\n * Normalizes internal Debug.Event into RuntimeDebugEventRef:\n * - Allows Devtools / Runtime to consume Debug events uniformly.\n * - Does not change DebugSink behavior; provides a structured view only.\n */\nexport const toRuntimeDebugEventRef = (\n event: Event,\n options?: {\n readonly diagnosticsLevel?: DiagnosticsLevel\n readonly eventSeq?: number\n readonly resolveConvergeStaticIr?: (staticIrDigest: string) => ConvergeStaticIrExport | undefined\n readonly onMetaProjection?: (projection: {\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: JsonDowngradeReason\n }) => void\n },\n): RuntimeDebugEventRef | undefined => {\n const diagnosticsLevel = options?.diagnosticsLevel ?? 'full'\n if (diagnosticsLevel === 'off') {\n return undefined\n }\n\n const isLightLike = diagnosticsLevel === 'light' || diagnosticsLevel === 'sampled'\n\n const timestamp =\n typeof event.timestamp === 'number' && Number.isFinite(event.timestamp) ? event.timestamp : Date.now()\n\n const moduleIdRaw = (event as any).moduleId\n const moduleId = typeof moduleIdRaw === 'string' && moduleIdRaw.length > 0 ? moduleIdRaw : 'unknown'\n\n const instanceIdRaw = (event as any).instanceId\n const instanceId = typeof instanceIdRaw === 'string' && instanceIdRaw.length > 0 ? instanceIdRaw : 'unknown'\n\n const runtimeLabelRaw = (event as any).runtimeLabel\n const runtimeLabel = typeof runtimeLabelRaw === 'string' && runtimeLabelRaw.length > 0 ? runtimeLabelRaw : undefined\n\n const txnSeqRaw = (event as any).txnSeq\n const txnSeq =\n typeof txnSeqRaw === 'number' && Number.isFinite(txnSeqRaw) && txnSeqRaw >= 0 ? Math.floor(txnSeqRaw) : 0\n\n const txnIdRaw = (event as any).txnId\n const txnId =\n typeof txnIdRaw === 'string' && txnIdRaw.length > 0\n ? txnIdRaw\n : txnSeq > 0\n ? `${instanceId}::t${txnSeq}`\n : undefined\n\n const linkId = (() => {\n const linkIdRaw = (event as any).linkId\n if (typeof linkIdRaw === 'string' && linkIdRaw.length > 0) return linkIdRaw\n\n // trace:*: allow fallback extraction from data.meta.linkId (avoid UI diving into deep meta).\n if (typeof (event as any).type !== 'string' || !(event as any).type.startsWith('trace:')) {\n return undefined\n }\n\n const data: any = (event as any).data\n const meta: any = data?.meta\n const linkIdFromMeta = meta?.linkId\n if (typeof linkIdFromMeta === 'string' && linkIdFromMeta.length > 0) return linkIdFromMeta\n\n return undefined\n })()\n\n const eventSeqRaw = options?.eventSeq\n const eventSeq =\n typeof eventSeqRaw === 'number' && Number.isFinite(eventSeqRaw) && eventSeqRaw > 0\n ? Math.floor(eventSeqRaw)\n : nextEventSeq()\n const eventId = makeEventId(instanceId, eventSeq)\n\n const base = {\n eventId,\n eventSeq,\n moduleId,\n instanceId,\n runtimeLabel,\n txnSeq,\n txnId,\n linkId,\n timestamp,\n } as const\n\n let downgrade: DowngradeReason | undefined\n\n const withDowngrade = (ref: Omit<RuntimeDebugEventRef, 'downgrade'>): RuntimeDebugEventRef => {\n if (!downgrade) return ref\n return { ...ref, downgrade: { reason: downgrade } }\n }\n\n switch (event.type) {\n case 'module:init':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:init',\n })\n case 'module:destroy':\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'module:destroy',\n })\n case 'lifecycle:phase': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:phase' }>\n const metaInput = isLightLike\n ? { type: 'lifecycle:phase', phase: e.phase, name: e.name }\n : { type: 'lifecycle:phase', phase: e.phase, name: e.name, payload: e.payload }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: e.name,\n meta: metaProjection.value,\n })\n }\n case 'action:dispatch': {\n const action: any = (event as any).action\n const actionTagRaw = (event as any).actionTag\n const tag = typeof actionTagRaw === 'string' && actionTagRaw.length > 0 ? actionTagRaw : (action?._tag ?? action?.type)\n const label = String(tag ?? 'action:dispatch')\n const labelNormalized = label.length > 0 ? label : 'unknown'\n const unknownAction = (event as any).unknownAction === true ? true : undefined\n const metaInput = isLightLike\n ? { actionTag: labelNormalized, ...(unknownAction ? { unknownAction: true } : {}) }\n : { action, ...(unknownAction ? { unknownAction: true } : {}) }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (unknownAction) {\n downgrade = mergeDowngrade(downgrade, 'unknown')\n }\n return withDowngrade({\n ...base,\n kind: 'action',\n label: labelNormalized,\n meta: metaProjection.value,\n })\n }\n case 'state:update': {\n const e = event as Extract<Event, { readonly type: 'state:update' }>\n\n const resolveDirtySetRootPaths = (): ReadonlyArray<JsonValue> | undefined => {\n const resolve = options?.resolveConvergeStaticIr\n if (!resolve) return undefined\n\n const digest = e.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return undefined\n\n const dirtySet = e.dirtySet as any\n if (!dirtySet || typeof dirtySet !== 'object' || Array.isArray(dirtySet)) return undefined\n\n const rootIds = dirtySet.rootIds\n if (!Array.isArray(rootIds) || rootIds.length === 0) return undefined\n\n const ir = resolve(digest) as ConvergeStaticIrExport | undefined\n const fieldPaths = (ir as any)?.fieldPaths as unknown\n if (!Array.isArray(fieldPaths) || fieldPaths.length === 0) return undefined\n\n const out: Array<JsonValue> = []\n for (const rawId of rootIds) {\n if (typeof rawId !== 'number' || !Number.isFinite(rawId)) continue\n const id = Math.floor(rawId)\n if (id < 0) continue\n const path = (fieldPaths as any)[id] as unknown\n if (!Array.isArray(path) || path.length === 0) continue\n if (!path.every((seg) => typeof seg === 'string' && seg.length > 0)) continue\n out.push(path as any)\n }\n\n return out.length > 0 ? out : undefined\n }\n\n const dirtySetWithRootPaths = (() => {\n const rootPaths = resolveDirtySetRootPaths()\n if (!rootPaths) return e.dirtySet\n const dirtySet = e.dirtySet as any\n if (!dirtySet || typeof dirtySet !== 'object' || Array.isArray(dirtySet)) return e.dirtySet\n return { ...dirtySet, rootPaths }\n })()\n\n const metaInput = isLightLike\n ? {\n state: e.state,\n dirtySet: dirtySetWithRootPaths,\n patchCount: e.patchCount,\n patchesTruncated: e.patchesTruncated,\n patchesTruncatedReason: e.patchesTruncatedReason,\n staticIrDigest: e.staticIrDigest,\n commitMode: e.commitMode,\n priority: e.priority,\n originKind: e.originKind,\n originName: e.originName,\n }\n : {\n state: e.state,\n dirtySet: dirtySetWithRootPaths,\n patchCount: e.patchCount,\n patchesTruncated: e.patchesTruncated,\n patchesTruncatedReason: e.patchesTruncatedReason,\n staticIrDigest: e.staticIrDigest,\n commitMode: e.commitMode,\n priority: e.priority,\n originKind: e.originKind,\n originName: e.originName,\n traitSummary: e.traitSummary,\n replayEvent: e.replayEvent,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n if (txnId) {\n lastTxnByInstance.set(instanceId, { txnId, txnSeq })\n backfillPendingTxnAlignment(instanceId, { txnId, txnSeq })\n }\n return withDowngrade({\n ...base,\n kind: 'state',\n label: 'state:update',\n meta: metaProjection.value,\n })\n }\n case 'process:start':\n case 'process:stop':\n case 'process:restart':\n case 'process:trigger':\n case 'process:dispatch':\n case 'process:error': {\n const e = event as Extract<\n Event,\n {\n readonly type:\n | 'process:start'\n | 'process:stop'\n | 'process:restart'\n | 'process:trigger'\n | 'process:dispatch'\n | 'process:error'\n }\n >\n\n const ts2 = typeof e.timestampMs === 'number' && Number.isFinite(e.timestampMs) ? e.timestampMs : timestamp\n\n const metaInput = {\n identity: e.identity,\n severity: e.severity,\n eventSeq: e.eventSeq,\n timestampMs: e.timestampMs,\n trigger: e.trigger,\n dispatch: e.dispatch,\n error: e.error,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const errorSummary =\n e.type === 'process:error' || e.type === 'process:restart'\n ? (e.error as any as SerializableErrorSummary | undefined)\n : undefined\n\n return withDowngrade({\n ...base,\n timestamp: ts2,\n kind: 'process',\n label: e.type,\n meta: metaProjection.value,\n errorSummary,\n })\n }\n case 'lifecycle:error': {\n const e = event as Extract<Event, { readonly type: 'lifecycle:error' }>\n const summary = toSerializableErrorSummary(e.cause)\n downgrade = mergeDowngrade(downgrade, summary.downgrade)\n const metaInput = isLightLike\n ? { type: 'lifecycle:error', phase: e.phase, name: e.hook }\n : {\n type: 'lifecycle:error',\n phase: e.phase,\n name: e.hook,\n hook: e.hook,\n taskId: e.taskId,\n origin: e.origin,\n txnSeq: e.txnSeq,\n opSeq: e.opSeq,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'lifecycle',\n label: 'lifecycle:error',\n meta: metaProjection.value,\n errorSummary: summary.errorSummary,\n })\n }\n case 'diagnostic': {\n const e = event as Extract<Event, { readonly type: 'diagnostic' }>\n const metaInput = {\n code: e.code,\n severity: e.severity,\n message: e.message,\n hint: e.hint,\n actionTag: e.actionTag,\n kind: e.kind,\n trigger: e.trigger,\n }\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.code,\n meta: metaProjection.value,\n })\n }\n case 'warn:priority-inversion': {\n const e = event as Extract<Event, { readonly type: 'warn:priority-inversion' }>\n const metaInput = isLightLike\n ? {\n tickSeq: e.tickSeq,\n reason: e.reason,\n selectorId: e.selectorId,\n }\n : {\n tickSeq: e.tickSeq,\n reason: e.reason,\n selectorId: e.selectorId,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.type,\n meta: metaProjection.value,\n })\n }\n case 'warn:microtask-starvation': {\n const e = event as Extract<Event, { readonly type: 'warn:microtask-starvation' }>\n const metaInput = isLightLike\n ? {\n tickSeq: e.tickSeq,\n microtaskChainDepth: e.microtaskChainDepth,\n }\n : {\n tickSeq: e.tickSeq,\n microtaskChainDepth: e.microtaskChainDepth,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'diagnostic',\n label: e.type,\n meta: metaProjection.value,\n })\n }\n default: {\n if (typeof event.type !== 'string' || !event.type.startsWith('trace:')) {\n return undefined\n }\n\n // trace:tick: runtime tick evidence; keep Slim payload even in light tier.\n if (event.type === 'trace:tick') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n tickSeq: data?.tickSeq,\n phase: data?.phase,\n schedule: data?.schedule,\n triggerSummary: data?.triggerSummary,\n anchors: data?.anchors,\n budget: data?.budget,\n backlog: data?.backlog,\n result: data?.result,\n }\n : {\n data,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:txn-lane: slim evidence for Txn Lanes (lane/backlog/reasons), used for Devtools summary and offline export.\n if (event.type === 'trace:txn-lane') {\n const data: any = (event as any).data\n const evidence = data?.evidence ?? data\n\n const metaProjection = projectJsonValue(evidence)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n const label =\n typeof evidence?.kind === 'string' && evidence.kind.length > 0 ? String(evidence.kind) : 'txn-lane'\n\n return withDowngrade({\n ...base,\n kind: 'txn-lane',\n label,\n meta: metaProjection.value,\n })\n }\n\n // trace:react-render / trace:react-selector: keep slim meta only (field trimming is handled by JsonValue projection).\n if (event.type === 'trace:react-render' || event.type === 'trace:react-selector') {\n const data: any = (event as any).data\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n }\n : {\n componentLabel: data?.componentLabel,\n selectorKey: data?.selectorKey,\n fieldPaths: data?.fieldPaths,\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n strictModePhase: data?.strictModePhase,\n meta: data?.meta,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n const label =\n typeof data?.componentLabel === 'string' && data.componentLabel.length > 0\n ? data.componentLabel\n : event.type === 'trace:react-selector'\n ? 'react-selector'\n : 'react-render'\n const last = lastTxnByInstance.get(instanceId)\n const txnSeqFromMeta =\n typeof data?.meta?.txnSeq === 'number' && Number.isFinite(data.meta.txnSeq) && data.meta.txnSeq >= 0\n ? Math.floor(data.meta.txnSeq)\n : undefined\n const txnIdFromMeta =\n typeof data?.meta?.txnId === 'string' && data.meta.txnId.length > 0 ? data.meta.txnId : undefined\n const txnIdAligned = txnIdFromMeta ?? base.txnId ?? last?.txnId\n const txnSeqAligned = txnSeqFromMeta ?? (base.txnSeq > 0 ? base.txnSeq : (last?.txnSeq ?? base.txnSeq))\n const ref = withDowngrade({\n ...base,\n txnId: txnIdAligned,\n txnSeq: txnSeqAligned,\n kind: event.type === 'trace:react-selector' ? 'react-selector' : 'react-render',\n label,\n meta: metaProjection.value,\n })\n\n if (instanceId !== 'unknown' && (ref.txnId == null || ref.txnSeq <= 0)) {\n enqueuePendingTxnAlignment(instanceId, ref)\n }\n\n return ref\n }\n\n // trace:selector:eval: SelectorGraph evaluation evidence within commit (used for txn→selector→render causal chain).\n if (event.type === 'trace:selector:eval') {\n const data: any = (event as any).data\n const metaInput = {\n selectorId: data?.selectorId,\n lane: data?.lane,\n producer: data?.producer,\n fallbackReason: data?.fallbackReason,\n readsDigest: data?.readsDigest,\n equalsKind: data?.equalsKind,\n changed: data?.changed,\n evalMs: data?.evalMs,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:exec-vm: Exec VM hit/miss evidence (049). In light tier we keep minimal summary fields.\n if (event.type === 'trace:exec-vm') {\n const data: any = (event as any).data\n const metaInput = {\n version: data?.version,\n stage: data?.stage,\n hit: data?.hit,\n reasonCode: data?.reasonCode ?? data?.reason,\n reasonDetail: data?.reasonDetail,\n execIrVersion: data?.execIrVersion,\n execIrHash: data?.execIrHash,\n serviceId: data?.serviceId,\n implId: data?.implId,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:converge: converge evidence must be exportable (JsonValue hard gate) and trims heavy fields in light tier.\n if (event.type === 'trace:trait:converge') {\n const resolveDirtyRootPaths = (args: {\n readonly staticIrDigest: unknown\n readonly rootIds: unknown\n }): ReadonlyArray<JsonValue> | undefined => {\n const resolve = options?.resolveConvergeStaticIr\n if (!resolve) return undefined\n const digest = args.staticIrDigest\n if (typeof digest !== 'string' || digest.length === 0) return undefined\n\n const rootIds = args.rootIds\n if (!Array.isArray(rootIds) || rootIds.length === 0) return undefined\n\n const ir = resolve(digest) as ConvergeStaticIrExport | undefined\n const fieldPaths = (ir as any)?.fieldPaths as unknown\n if (!Array.isArray(fieldPaths) || fieldPaths.length === 0) return undefined\n\n const out: Array<JsonValue> = []\n for (const id of rootIds) {\n if (typeof id !== 'number' || !Number.isFinite(id)) continue\n const idx = Math.floor(id)\n if (idx < 0 || idx >= fieldPaths.length) continue\n const path = fieldPaths[idx]\n if (Array.isArray(path)) {\n out.push(path as any)\n }\n }\n\n return out.length > 0 ? out : undefined\n }\n\n const enrichDirtyRootPaths = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n if (!dirty || typeof dirty !== 'object' || Array.isArray(dirty)) return value\n\n const dirtyRootPaths = resolveDirtyRootPaths({\n staticIrDigest: anyValue.staticIrDigest,\n rootIds: dirty?.rootIds,\n })\n if (!dirtyRootPaths) return value\n\n return {\n ...anyValue,\n dirty: {\n ...(dirty as any),\n rootPaths: dirtyRootPaths,\n },\n } as JsonValue\n }\n\n const stripHeavyLight = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtyRootPaths = resolveDirtyRootPaths({\n staticIrDigest: anyValue.staticIrDigest,\n rootIds: dirty?.rootIds,\n })\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n ...(Array.isArray((dirty as any).rootIds) ? { rootIds: (dirty as any).rootIds } : null),\n ...(typeof (dirty as any).rootIdsTruncated === 'boolean'\n ? { rootIdsTruncated: (dirty as any).rootIdsTruncated }\n : null),\n ...(dirtyRootPaths ? { rootPaths: dirtyRootPaths } : null),\n }\n : undefined\n\n const { top3, dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n }\n\n const stripHeavySampled = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n\n const anyValue = value as any\n const dirty = anyValue.dirty\n const dirtySlim =\n dirty && typeof dirty === 'object' && !Array.isArray(dirty)\n ? {\n dirtyAll: (dirty as any).dirtyAll,\n ...(typeof (dirty as any).reason === 'string' ? { reason: (dirty as any).reason } : null),\n }\n : undefined\n\n const { dirtyRoots, ...rest } = anyValue\n return (dirtySlim ? { ...rest, dirty: dirtySlim } : rest) as JsonValue\n }\n\n const data = (event as Extract<Event, { readonly type: 'trace:trait:converge' }>).data\n const metaInput =\n diagnosticsLevel === 'light'\n ? stripHeavyLight(data)\n : diagnosticsLevel === 'sampled'\n ? stripHeavySampled(data)\n : enrichDirtyRootPaths(data)\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:converge',\n label: 'trait:converge',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:check: validation diagnostics must be exportable and stay slim in light tier (keep key fields).\n if (event.type === 'trace:trait:check') {\n const stripHeavy = (value: JsonValue): JsonValue => {\n if (!value || typeof value !== 'object' || Array.isArray(value)) return value\n const anyValue = value as any\n const degraded = anyValue.degraded\n const degradedSlim =\n degraded && typeof degraded === 'object' && !Array.isArray(degraded)\n ? { kind: (degraded as any).kind }\n : undefined\n\n const { degraded: _degraded, ...rest } = anyValue\n return (degradedSlim ? { ...rest, degraded: degradedSlim } : rest) as JsonValue\n }\n\n const data = (event as Extract<Event, { readonly type: 'trace:trait:check' }>).data\n const metaInput = isLightLike ? stripHeavy(data) : data\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:check',\n label: 'trait:check',\n meta: metaProjection.value,\n })\n }\n\n // trace:trait:validate: validation decision summary must be exportable and slim in light tier (no heavy fields by default).\n if (event.type === 'trace:trait:validate') {\n const data = (event as Extract<Event, { readonly type: 'trace:trait:validate' }>).data\n const metaProjection = projectJsonValue(data)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'trait:validate',\n label: 'trait:validate',\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits: final traits snapshot must be exportable and slim in light tier (digest/count).\n if (event.type === 'trace:module:traits') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n digest: data?.digest,\n count: data?.count,\n }\n : {\n digest: data?.digest,\n count: data?.count,\n traits: data?.traits,\n provenanceIndex: data?.provenanceIndex,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:traits:conflict: conflict details must be exportable; avoid relying on truncated lifecycle:error messages.\n if (event.type === 'trace:module:traits:conflict') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n conflictCount: data?.conflictCount,\n traitIds: data?.traitIds,\n }\n : {\n conflictCount: data?.conflictCount,\n conflicts: data?.conflicts,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:module:descriptor: keep key anchors even in light tier (avoid data being fully trimmed).\n if (event.type === 'trace:module:descriptor') {\n const data: any = (event as any).data\n const metaInput = isLightLike\n ? {\n id: data?.id,\n traits: data?.traits,\n source: data?.source,\n }\n : { data }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n\n // trace:effectop: keep slim op meta and prefer EffectOp.meta.moduleId when present.\n if (event.type === 'trace:effectop') {\n const data: any = (event as any).data\n const opMeta: any = data?.meta\n const opKind = (data?.kind ?? 'service') as RuntimeDebugEventKind\n const label = typeof data?.name === 'string' ? data.name : 'effectop'\n const moduleId2 = typeof opMeta?.moduleId === 'string' ? opMeta.moduleId : moduleId\n const txnId2 = typeof opMeta?.txnId === 'string' && opMeta.txnId.length > 0 ? opMeta.txnId : base.txnId\n const txnSeq2 =\n typeof opMeta?.txnSeq === 'number' && Number.isFinite(opMeta.txnSeq) && opMeta.txnSeq >= 0\n ? Math.floor(opMeta.txnSeq)\n : base.txnSeq\n\n const metaInput = isLightLike\n ? {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n meta: opMeta,\n }\n : {\n id: data?.id,\n kind: data?.kind,\n name: data?.name,\n payload: data?.payload,\n meta: opMeta,\n }\n\n const metaProjection = projectJsonValue(metaInput)\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n\n return withDowngrade({\n ...base,\n moduleId: moduleId2,\n txnId: txnId2,\n txnSeq: txnSeq2,\n kind: opKind,\n label,\n meta: metaProjection.value,\n })\n }\n\n // Other trace:* events: categorize as devtools and trim meta by tier.\n const metaProjection = projectJsonValue(\n isLightLike\n ? {\n data: undefined,\n }\n : {\n data: (event as any).data,\n },\n )\n options?.onMetaProjection?.({\n stats: metaProjection.stats,\n downgrade: metaProjection.downgrade,\n })\n downgrade = mergeDowngrade(downgrade, metaProjection.downgrade)\n return withDowngrade({\n ...base,\n kind: 'devtools',\n label: event.type,\n meta: metaProjection.value,\n })\n }\n }\n}\n","export type JsonValue =\n | null\n | boolean\n | number\n | string\n | ReadonlyArray<JsonValue>\n | { readonly [key: string]: JsonValue }\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport const isJsonValue = (input: unknown): input is JsonValue => {\n const seen = new WeakSet<object>()\n\n const loop = (value: unknown, depth: number): value is JsonValue => {\n if (depth > 64) return false\n if (value === null) return true\n\n switch (typeof value) {\n case 'string':\n case 'boolean':\n return true\n case 'number':\n return Number.isFinite(value)\n case 'object': {\n if (Array.isArray(value)) {\n if (seen.has(value)) return false\n seen.add(value)\n for (const item of value) {\n if (!loop(item, depth + 1)) return false\n }\n return true\n }\n\n if (!isPlainRecord(value)) return false\n if (seen.has(value)) return false\n seen.add(value)\n\n for (const v of Object.values(value)) {\n if (!loop(v, depth + 1)) return false\n }\n\n return true\n }\n default:\n return false\n }\n }\n\n return loop(input, 0)\n}\n\nexport interface JsonValueProjectionStats {\n readonly dropped: number\n readonly oversized: number\n readonly nonSerializable: number\n}\n\nexport interface JsonValueProjection {\n readonly value: JsonValue\n readonly stats: JsonValueProjectionStats\n readonly downgrade?: DowngradeReason\n}\n\nexport interface JsonValueProjectOptions {\n readonly maxDepth?: number\n readonly maxObjectKeys?: number\n readonly maxArrayLength?: number\n readonly maxStringLength?: number\n readonly maxJsonBytes?: number\n readonly oversizedPreviewBytes?: number\n}\n\nconst defaultOptions: Required<JsonValueProjectOptions> = {\n maxDepth: 6,\n maxObjectKeys: 32,\n maxArrayLength: 32,\n maxStringLength: 256,\n maxJsonBytes: 4 * 1024,\n oversizedPreviewBytes: 256,\n}\n\nconst truncateString = (value: string, maxLen: number, stats: MutableStats): string => {\n if (value.length <= maxLen) return value\n stats.oversized += 1\n return value.slice(0, maxLen)\n}\n\ntype MutableStats = {\n dropped: number\n oversized: number\n nonSerializable: number\n}\n\nconst mergeDowngrade = (current: DowngradeReason | undefined, next: DowngradeReason): DowngradeReason => {\n if (!current) return next\n if (current === 'non_serializable' || next === 'non_serializable') return 'non_serializable'\n if (current === 'oversized' || next === 'oversized') return 'oversized'\n return 'unknown'\n}\n\nfunction isPlainRecord(value: unknown): value is Record<string, unknown> {\n if (typeof value !== 'object' || value === null) return false\n const proto = Object.getPrototypeOf(value)\n return proto === Object.prototype || proto === null\n}\n\nconst asNumber = (value: number, stats: MutableStats): JsonValue => {\n if (Number.isFinite(value)) return value\n stats.nonSerializable += 1\n return String(value)\n}\n\nconst toJsonValueInternal = (\n input: unknown,\n options: Required<JsonValueProjectOptions>,\n stats: MutableStats,\n seen: WeakSet<object>,\n depth: number,\n): JsonValue => {\n if (input === null) return null\n\n switch (typeof input) {\n case 'string':\n return truncateString(input, options.maxStringLength, stats)\n case 'number':\n return asNumber(input, stats)\n case 'boolean':\n return input\n case 'bigint':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'symbol':\n stats.nonSerializable += 1\n return truncateString(input.toString(), options.maxStringLength, stats)\n case 'function':\n stats.nonSerializable += 1\n return '[Function]'\n case 'undefined':\n stats.dropped += 1\n return null\n }\n\n // object\n if (depth >= options.maxDepth) {\n stats.oversized += 1\n return '[Truncated]'\n }\n\n if (input instanceof Date) {\n return input.toISOString()\n }\n\n if (input instanceof Error) {\n stats.nonSerializable += 1\n return {\n name: truncateString(input.name, options.maxStringLength, stats),\n message: truncateString(input.message, options.maxStringLength, stats),\n }\n }\n\n if (typeof input === 'object') {\n if (seen.has(input)) {\n stats.nonSerializable += 1\n return '[Circular]'\n }\n seen.add(input)\n }\n\n if (Array.isArray(input)) {\n const out: Array<JsonValue> = []\n const limit = Math.min(input.length, options.maxArrayLength)\n for (let i = 0; i < limit; i++) {\n out.push(toJsonValueInternal(input[i], options, stats, seen, depth + 1))\n }\n if (input.length > limit) {\n stats.oversized += 1\n out.push(`[...${input.length - limit} more]`)\n }\n return out\n }\n\n if (!isPlainRecord(input)) {\n stats.nonSerializable += 1\n return truncateString(String(input), options.maxStringLength, stats)\n }\n\n const keys = Object.keys(input).sort()\n const limit = Math.min(keys.length, options.maxObjectKeys)\n const out: Record<string, JsonValue> = {}\n\n for (let i = 0; i < limit; i++) {\n const rawKey = keys[i]!\n const rawValue = (input as any)[rawKey]\n const key = truncateString(rawKey, options.maxStringLength, stats)\n if (rawValue === undefined) {\n stats.dropped += 1\n continue\n }\n out[key] = toJsonValueInternal(rawValue, options, stats, seen, depth + 1)\n }\n\n if (keys.length > limit) {\n stats.oversized += 1\n out.__truncatedKeys = keys.length - limit\n }\n\n return out\n}\n\nexport const projectJsonValue = (input: unknown, options?: JsonValueProjectOptions): JsonValueProjection => {\n const resolved: Required<JsonValueProjectOptions> = { ...defaultOptions, ...(options ?? {}) }\n const stats: MutableStats = { dropped: 0, oversized: 0, nonSerializable: 0 }\n const seen = new WeakSet<object>()\n\n let downgrade: DowngradeReason | undefined\n const value = toJsonValueInternal(input, resolved, stats, seen, 0)\n\n if (stats.nonSerializable > 0) {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n }\n if (stats.oversized > 0) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n }\n\n // Hard gate: ensure JSON.stringify never throws and respect the max byte budget.\n try {\n const json = JSON.stringify(value)\n if (json.length > resolved.maxJsonBytes) {\n downgrade = mergeDowngrade(downgrade, 'oversized')\n const preview = json.slice(0, Math.min(resolved.oversizedPreviewBytes, resolved.maxJsonBytes))\n return {\n value: {\n _tag: 'oversized',\n bytes: json.length,\n preview,\n },\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized + 1,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n }\n } catch {\n downgrade = mergeDowngrade(downgrade, 'non_serializable')\n return {\n value: '[Unserializable]',\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable + 1,\n },\n downgrade,\n }\n }\n\n return {\n value,\n stats: {\n dropped: stats.dropped,\n oversized: stats.oversized,\n nonSerializable: stats.nonSerializable,\n },\n downgrade,\n }\n}\n","import { Cause } from 'effect'\n\nexport type DowngradeReason = 'non_serializable' | 'oversized' | 'unknown'\n\nexport interface SerializableErrorSummary {\n readonly message: string\n readonly name?: string\n readonly code?: string\n readonly hint?: string\n}\n\nexport interface ErrorSummaryResult {\n readonly errorSummary: SerializableErrorSummary\n readonly downgrade?: DowngradeReason\n}\n\nconst truncate = (value: string, maxLen: number): { readonly value: string; readonly truncated: boolean } => {\n if (value.length <= maxLen) return { value, truncated: false }\n return { value: value.slice(0, maxLen), truncated: true }\n}\n\nconst safeStringify = (value: unknown): { readonly ok: true; readonly json: string } | { readonly ok: false } => {\n try {\n return { ok: true, json: JSON.stringify(value) }\n } catch {\n return { ok: false }\n }\n}\n\nconst getMessageFromUnknown = (cause: unknown): string => {\n if (typeof cause === 'string') return cause\n if (typeof cause === 'number' || typeof cause === 'boolean' || typeof cause === 'bigint') return String(cause)\n if (cause instanceof Error) return cause.message || cause.name || 'Error'\n if (cause && typeof cause === 'object' && 'message' in (cause as any) && typeof (cause as any).message === 'string') {\n return (cause as any).message as string\n }\n\n // Try Effect Cause pretty (best-effort). This may include more details than needed,\n // so callers MUST still treat it as an untrusted/oversized string and truncate.\n try {\n const pretty = Cause.pretty(cause as Cause.Cause<unknown>, { renderErrorCause: true })\n if (typeof pretty === 'string' && pretty.length > 0) return pretty\n } catch {\n // ignore\n }\n\n return 'Unknown error'\n}\n\nexport const toSerializableErrorSummary = (\n cause: unknown,\n options?: {\n readonly maxMessageLength?: number\n },\n): ErrorSummaryResult => {\n const maxMessageLength = options?.maxMessageLength ?? 256\n\n const messageRaw = getMessageFromUnknown(cause)\n const { value: message, truncated } = truncate(messageRaw, maxMessageLength)\n\n const summary: { message: string; name?: string; code?: string; hint?: string } = {\n message,\n }\n\n if (cause instanceof Error) {\n if (cause.name && cause.name !== 'Error') summary.name = cause.name\n const anyCause = cause as any\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n else if (typeof anyCause.code === 'number' && Number.isFinite(anyCause.code)) summary.code = String(anyCause.code)\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n return {\n errorSummary: summary,\n downgrade: truncated ? 'oversized' : undefined,\n }\n }\n\n if (cause && typeof cause === 'object') {\n const anyCause = cause as any\n if (typeof anyCause.name === 'string' && anyCause.name.length > 0) summary.name = anyCause.name\n if (typeof anyCause.code === 'string' && anyCause.code.length > 0) summary.code = anyCause.code\n if (typeof anyCause.hint === 'string' && anyCause.hint.length > 0) summary.hint = anyCause.hint\n }\n\n // If the original cause isn't JSON-serializable, mark it explicitly.\n const stringifyResult = safeStringify(cause)\n if (!stringifyResult.ok) {\n return {\n errorSummary: summary,\n downgrade: 'non_serializable',\n }\n }\n\n if (truncated) {\n return {\n errorSummary: summary,\n downgrade: 'oversized',\n }\n }\n\n if (message === 'Unknown error') {\n return {\n errorSummary: summary,\n downgrade: 'unknown',\n }\n }\n\n return { errorSummary: summary }\n}\n","// EffectOp core model and middleware composition logic.\n// For higher-level Runtime / Devtools integration, see:\n// specs/000-module-traits-runtime/references/effectop-and-middleware.md\n\nimport { Context, Effect, FiberRef } from 'effect'\n\n/**\n * currentLinkId:\n * - Stores the current operation chain id (linkId) in a FiberRef.\n * - Used to correlate multiple boundary ops within the same chain (can be shared across modules via the same FiberRef).\n */\nexport const currentLinkId = FiberRef.unsafeMake<string | undefined>(undefined)\n\n/**\n * OperationPolicy:\n * - Local policy markers (intent only; no rule logic attached).\n *\n * Constraints (enforced by Runtime/middleware together):\n * - Only observation-only capabilities (Observer) may be disabled; global guards must not be disabled.\n */\nexport interface OperationPolicy {\n readonly disableObservers?: boolean\n}\n\n/**\n * OperationRejected:\n * - Unified failure result when a guard rejects execution.\n * - Semantics: explicit failure with no business side effects (rejection must happen before user code executes).\n */\nexport interface OperationRejected {\n readonly _tag: 'OperationRejected'\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}\n\n/**\n * OperationError:\n * - Any boundary operation executed via EffectOp may be explicitly rejected by Guard middleware.\n * - Therefore, the middleware error channel must allow OperationRejected to be added.\n */\nexport type OperationError<E> = E | OperationRejected\n\nexport const makeOperationRejected = (params: {\n readonly message: string\n readonly kind?: EffectOp['kind']\n readonly name?: string\n readonly linkId?: string\n readonly details?: unknown\n}): OperationRejected => ({\n _tag: 'OperationRejected',\n message: params.message,\n kind: params.kind,\n name: params.name,\n linkId: params.linkId,\n details: params.details,\n})\n\n/**\n * EffectOp: a unified representation of an Effect execution at an \"observable boundary\".\n *\n * - Out / Err / Env are the generic parameters of the underlying Effect.\n * - meta carries structured context needed by Devtools / Middleware.\n */\nexport interface EffectOp<Out = unknown, Err = unknown, Env = unknown> {\n readonly id: string\n readonly kind:\n | 'action'\n | 'flow'\n | 'state'\n | 'service'\n | 'lifecycle'\n | 'trait-computed'\n | 'trait-link'\n | 'trait-source'\n | 'devtools'\n readonly name: string\n readonly payload?: unknown\n readonly meta?: {\n /**\n * linkId:\n * - Operation chain id: multiple boundary ops in the same chain must share it.\n * - Runtime ensures this field is populated on all boundary ops.\n */\n linkId?: string\n moduleId?: string\n instanceId?: string\n runtimeLabel?: string\n txnId?: string\n txnSeq?: number\n opSeq?: number\n fieldPath?: string\n deps?: ReadonlyArray<string>\n from?: string\n to?: string\n traitNodeId?: string\n stepId?: string\n resourceId?: string\n key?: unknown\n trace?: ReadonlyArray<string>\n tags?: ReadonlyArray<string>\n policy?: OperationPolicy\n // Reserved extension slot for middleware/devtools to attach extra information.\n readonly [k: string]: unknown\n }\n readonly effect: Effect.Effect<Out, Err, Env>\n}\n\n/**\n * Middleware: the general middleware model for observing / wrapping / guarding EffectOps.\n */\nexport type Middleware = <A, E, R>(op: EffectOp<A, E, R>) => Effect.Effect<A, OperationError<E>, R>\n\nexport type MiddlewareStack = ReadonlyArray<Middleware>\n\n/**\n * EffectOpMiddlewareEnv:\n * - A Service in Effect Env that carries the current Runtime's MiddlewareStack.\n * - Injected by Runtime.ts when constructing a ManagedRuntime.\n * - Runtime code (e.g. StateTrait.install) uses this Service to decide which MiddlewareStack to use.\n */\nexport interface EffectOpMiddlewareEnv {\n readonly stack: MiddlewareStack\n}\n\nexport class EffectOpMiddlewareTag extends Context.Tag('Logix/EffectOpMiddleware')<\n EffectOpMiddlewareTag,\n EffectOpMiddlewareEnv\n>() {}\n\n/**\n * composeMiddleware:\n * - Composes Middleware from \"outer to inner\" in declaration order:\n * - stack = [mw1, mw2] => mw1 -> mw2 -> effect -> mw2 -> mw1\n * - Matches the reduceRight example in the reference docs.\n */\nexport const composeMiddleware = (stack: MiddlewareStack): Middleware => {\n return <A, E, R>(op: EffectOp<A, E, R>): Effect.Effect<A, OperationError<E>, R> =>\n stack.reduceRight<Effect.Effect<A, OperationError<E>, R>>(\n (eff, mw) => mw({ ...op, effect: eff } as any) as any,\n op.effect as Effect.Effect<A, OperationError<E>, R>,\n )\n}\n\n/**\n * runWithMiddleware:\n * - Executes a given EffectOp with a MiddlewareStack according to the composition rules.\n * - If the stack is empty, returns op.effect directly.\n */\nexport const runWithMiddleware = <A, E, R>(op: EffectOp<A, E, R>, stack: MiddlewareStack): Effect.Effect<A, E, R> => {\n return Effect.gen(function* () {\n const existing = yield* FiberRef.get(currentLinkId)\n const metaLinkId = (op.meta as any)?.linkId\n const linkId = typeof metaLinkId === 'string' && metaLinkId.length > 0 ? metaLinkId : (existing ?? op.id)\n\n const nextOp: EffectOp<A, E, R> = {\n ...op,\n meta: {\n ...(op.meta ?? {}),\n linkId,\n },\n }\n\n const program = stack.length ? composeMiddleware(stack)(nextOp) : nextOp.effect\n\n // linkId is created at the boundary root and reused for nested ops (the FiberRef is the global single source of truth).\n // NOTE: middleware may explicitly reject with OperationRejected.\n return yield* Effect.locally(currentLinkId, linkId)(program as any)\n }) as Effect.Effect<A, E, R>\n}\n","import { Context, Effect, Layer } from 'effect'\nimport type { TraitConvergeRequestedMode } from '../../state-trait/model.js'\nimport type { ReadQueryStrictGateConfig } from './ReadQuery.js'\nimport { getGlobalHostScheduler, type HostScheduler } from './HostScheduler.js'\nimport { makeRuntimeStore, type RuntimeStore } from './RuntimeStore.js'\nimport { makeTickScheduler, type TickScheduler, type TickSchedulerConfig } from './TickScheduler.js'\nimport { makeDeclarativeLinkRuntime, type DeclarativeLinkRuntime } from './DeclarativeLinkRuntime.js'\n\n// Unified runtime env detection, avoiding bundlers inlining NODE_ENV at build time.\nexport const getNodeEnv = (): string | undefined => {\n try {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const env = (globalThis as any)?.process?.env\n return typeof env?.NODE_ENV === 'string' ? env.NODE_ENV : undefined\n } catch {\n return undefined\n }\n}\n\nexport const isDevEnv = (): boolean => getNodeEnv() !== 'production'\n\nexport type StateTransactionInstrumentation = 'full' | 'light'\n\n/**\n * getDefaultStateTxnInstrumentation:\n * - Currently chooses default instrumentation by NODE_ENV:\n * - dev / test: full (keep patches and snapshots for debugging).\n * - production: light (keep minimal semantics to reduce overhead).\n * - May evolve with finer-grained overrides in Runtime.make / Module.make.\n */\nexport const getDefaultStateTxnInstrumentation = (): StateTransactionInstrumentation => (isDevEnv() ? 'full' : 'light')\n\n/**\n * Runtime-level StateTransaction config Service:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime.make can read runtime-level defaults from Env.\n *\n * Notes:\n * - instrumentation is only a runtime-level default.\n * - Explicit instrumentation in ModuleImpl / ModuleRuntimeOptions has higher priority.\n */\nexport interface StateTransactionRuntimeConfig {\n readonly instrumentation?: StateTransactionInstrumentation\n /**\n * StateTrait derived converge budget (ms):\n * - Exceeding the budget triggers a soft degrade (freeze derived fields, preserve base writes and 0/1 commit semantics).\n * - Default is 200ms (aligned with the 007 spec threshold).\n */\n readonly traitConvergeBudgetMs?: number\n /**\n * Auto-mode decision budget (ms):\n * - Only used during the decision phase when requestedMode=\"auto\".\n * - Exceeding the budget must immediately fall back to full (and record evidence).\n */\n readonly traitConvergeDecisionBudgetMs?: number\n /**\n * StateTrait converge scheduling strategy:\n * - full: full topo execution (current default; safest).\n * - dirty: minimal triggering based on dirtyPaths + deps in the txn window (requires accurate deps).\n */\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n /**\n * 043: Trait converge time-slicing (explicit opt-in). Disabled by default.\n */\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n /**\n * 060: Txn Lanes (priority scheduling for transaction follow-up work). Enabled by default since 062.\n */\n readonly txnLanes?: TxnLanesPatch\n /**\n * Runtime-level per-module overrides (hotfix path):\n * - Only affects converge behavior for the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /**\n * 060: Txn Lanes runtime_module overrides (hotfix / gradual tuning).\n * - Only affects the specified moduleId.\n * - Lower priority than Provider overrides.\n */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionConfigTagImpl extends Context.Tag('@logixjs/core/StateTransactionRuntimeConfig')<\n StateTransactionConfigTagImpl,\n StateTransactionRuntimeConfig\n>() {}\n\nexport const StateTransactionConfigTag = StateTransactionConfigTagImpl\n\nexport type ReadQueryStrictGateRuntimeConfig = ReadQueryStrictGateConfig\n\nclass ReadQueryStrictGateConfigTagImpl extends Context.Tag('@logixjs/core/ReadQueryStrictGateRuntimeConfig')<\n ReadQueryStrictGateConfigTagImpl,\n ReadQueryStrictGateRuntimeConfig\n>() {}\n\nexport const ReadQueryStrictGateConfigTag = ReadQueryStrictGateConfigTagImpl\n\nexport type ReplayMode = 'live' | 'replay'\n\nexport interface ReplayModeConfig {\n readonly mode: ReplayMode\n}\n\nclass ReplayModeConfigTagImpl extends Context.Tag('@logixjs/core/ReplayModeConfig')<\n ReplayModeConfigTagImpl,\n ReplayModeConfig\n>() {}\n\nexport const ReplayModeConfigTag = ReplayModeConfigTagImpl\n\nexport const replayModeLayer = (mode: ReplayMode): Layer.Layer<ReplayModeConfigTagImpl, never, never> =>\n Layer.succeed(ReplayModeConfigTag, { mode })\n\nexport interface StateTransactionTraitConvergeOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n}\n\nexport interface TxnLanesPatch {\n /**\n * enabled: whether Txn Lanes is enabled (default on since 062).\n * - undefined: default enabled (when not explicitly configured)\n * - false: disabled (returns to baseline behavior)\n * - true: enabled (only affects scheduling of follow-up work outside the transaction; transactions remain synchronous)\n */\n readonly enabled?: boolean\n /**\n * overrideMode: runtime temporary override (for debugging/rollback/comparison).\n * - forced_off: forcibly disables Txn Lanes (returns to baseline behavior).\n * - forced_sync: forces fully synchronous execution (ignores non-urgent deferral and time-slicing; used for comparisons).\n *\n * Notes:\n * - Override precedence follows StateTransactionOverrides: provider > runtime_module > runtime_default > builtin.\n * - Overrides must be explainable by evidence (see 060 LaneEvidence reasons).\n */\n readonly overrideMode?: 'forced_off' | 'forced_sync'\n /** non-urgent work loop slice budget (ms). */\n readonly budgetMs?: number\n /** Non-urgent backlog coalescing window (ms). */\n readonly debounceMs?: number\n /** Max lag upper bound (ms): exceeding it triggers an explainable starvation protection (forced catch-up). */\n readonly maxLagMs?: number\n /** Whether to allow coalescing/canceling intermediate non-urgent work (must preserve eventual consistency). */\n readonly allowCoalesce?: boolean\n /**\n * Yield strategy for the non-urgent work loop (progressive enhancement).\n * - baseline: uses only time budget + hard upper bound\n * - inputPending: when supported by browsers, also consults `navigator.scheduling.isInputPending`\n */\n readonly yieldStrategy?: 'baseline' | 'inputPending'\n}\n\nexport interface TraitConvergeTimeSlicingPatch {\n /**\n * enabled:\n * - false/undefined: disabled (default)\n * - true: enables time-slicing (only affects computed/link explicitly marked as deferred)\n */\n readonly enabled?: boolean\n /**\n * debounceMs: coalescing interval (ms) for the deferral window; merges high-frequency inputs into one deferred flush.\n */\n readonly debounceMs?: number\n /**\n * maxLagMs: max lag upper bound (ms); exceeding it triggers an explainable forced flush (starvation protection).\n */\n readonly maxLagMs?: number\n}\n\n/**\n * Provider-scoped StateTransactionOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface StateTransactionOverrides {\n readonly traitConvergeMode?: TraitConvergeRequestedMode\n readonly traitConvergeBudgetMs?: number\n readonly traitConvergeDecisionBudgetMs?: number\n readonly traitConvergeTimeSlicing?: TraitConvergeTimeSlicingPatch\n readonly traitConvergeOverridesByModuleId?: Readonly<Record<string, StateTransactionTraitConvergeOverrides>>\n /** 060: Txn Lanes provider-level overrides (delta overrides). */\n readonly txnLanes?: TxnLanesPatch\n /** 060: Txn Lanes provider_module overrides (by moduleId). */\n readonly txnLanesOverridesByModuleId?: Readonly<Record<string, TxnLanesPatch>>\n}\n\nclass StateTransactionOverridesTagImpl extends Context.Tag('@logixjs/core/StateTransactionOverrides')<\n StateTransactionOverridesTagImpl,\n StateTransactionOverrides\n>() {}\n\nexport const StateTransactionOverridesTag = StateTransactionOverridesTagImpl\n\nexport type ConcurrencyLimit = number | 'unbounded'\n\nexport interface ConcurrencyPolicyPatch {\n readonly concurrencyLimit?: ConcurrencyLimit\n readonly losslessBackpressureCapacity?: number\n readonly allowUnbounded?: boolean\n readonly pressureWarningThreshold?: {\n readonly backlogCount?: number\n readonly backlogDurationMs?: number\n }\n readonly warningCooldownMs?: number\n}\n\n/**\n * Runtime-level ConcurrencyPolicy:\n * - Provided at the app layer by Logix.Runtime.make / AppRuntime.makeApp.\n * - ModuleRuntime merges sources via a resolver (builtin/runtime_module/provider, etc.).\n *\n * Notes:\n * - overridesByModuleId is used for runtime_module hot-switching (hotfix / gradual tuning) and is lower priority than provider overrides.\n */\nexport interface ConcurrencyPolicy extends ConcurrencyPolicyPatch {\n readonly overridesByModuleId?: Readonly<Record<string, ConcurrencyPolicyPatch>>\n}\n\nclass ConcurrencyPolicyTagImpl extends Context.Tag('@logixjs/core/ConcurrencyPolicy')<\n ConcurrencyPolicyTagImpl,\n ConcurrencyPolicy\n>() {}\n\nexport const ConcurrencyPolicyTag = ConcurrencyPolicyTagImpl\n\n/**\n * Provider-scoped ConcurrencyPolicyOverrides (delta overrides):\n * - Used to inject more local overrides into a Provider subtree on top of inherited global runtime config.\n * - Override precedence: provider > runtime_module > runtime_default > builtin.\n */\nexport interface ConcurrencyPolicyOverrides extends ConcurrencyPolicyPatch {\n readonly overridesByModuleId?: Readonly<Record<string, ConcurrencyPolicyPatch>>\n}\n\nclass ConcurrencyPolicyOverridesTagImpl extends Context.Tag('@logixjs/core/ConcurrencyPolicyOverrides')<\n ConcurrencyPolicyOverridesTagImpl,\n ConcurrencyPolicyOverrides\n>() {}\n\nexport const ConcurrencyPolicyOverridesTag = ConcurrencyPolicyOverridesTagImpl\n\n// ---- 073: TickScheduler + RuntimeStore (injectable runtime services) ----\n\nexport interface RuntimeStoreService extends RuntimeStore {}\n\nexport class RuntimeStoreTag extends Context.Tag('@logixjs/core/RuntimeStore')<RuntimeStoreTag, RuntimeStoreService>() {}\n\nexport const runtimeStoreLayer: Layer.Layer<any, never, never> = Layer.scoped(\n RuntimeStoreTag,\n Effect.acquireRelease(\n Effect.sync(() => makeRuntimeStore() as RuntimeStoreService),\n (store) => Effect.sync(() => store.dispose()),\n ),\n) as Layer.Layer<any, never, never>\n\nexport const runtimeStoreTestStubLayer = (store: RuntimeStoreService): Layer.Layer<any, never, never> =>\n Layer.succeed(RuntimeStoreTag, store) as Layer.Layer<any, never, never>\n\nexport interface HostSchedulerService extends HostScheduler {}\n\nexport class HostSchedulerTag extends Context.Tag('@logixjs/core/HostScheduler')<\n HostSchedulerTag,\n HostSchedulerService\n>() {}\n\nexport const hostSchedulerLayer: Layer.Layer<any, never, never> = Layer.succeed(\n HostSchedulerTag,\n getGlobalHostScheduler() as HostSchedulerService,\n) as Layer.Layer<any, never, never>\n\nexport const hostSchedulerTestStubLayer = (scheduler: HostSchedulerService): Layer.Layer<any, never, never> =>\n Layer.succeed(HostSchedulerTag, scheduler) as Layer.Layer<any, never, never>\n\nexport interface DeclarativeLinkRuntimeService extends DeclarativeLinkRuntime {}\n\nexport class DeclarativeLinkRuntimeTag extends Context.Tag('@logixjs/core/DeclarativeLinkRuntime')<\n DeclarativeLinkRuntimeTag,\n DeclarativeLinkRuntimeService\n>() {}\n\nexport const declarativeLinkRuntimeLayer: Layer.Layer<any, never, never> = Layer.succeed(\n DeclarativeLinkRuntimeTag,\n makeDeclarativeLinkRuntime() as DeclarativeLinkRuntimeService,\n) as Layer.Layer<any, never, never>\n\nexport const declarativeLinkRuntimeTestStubLayer = (\n runtime: DeclarativeLinkRuntimeService,\n): Layer.Layer<any, never, never> => Layer.succeed(DeclarativeLinkRuntimeTag, runtime) as Layer.Layer<any, never, never>\n\nexport interface TickSchedulerService extends TickScheduler {}\n\nexport class TickSchedulerTag extends Context.Tag('@logixjs/core/TickScheduler')<TickSchedulerTag, TickSchedulerService>() {}\n\nexport const tickSchedulerLayer = (config?: TickSchedulerConfig): Layer.Layer<any, never, never> =>\n Layer.effect(\n TickSchedulerTag,\n Effect.gen(function* () {\n const store = yield* RuntimeStoreTag\n const declarativeLinkRuntime = yield* DeclarativeLinkRuntimeTag\n const hostScheduler = yield* HostSchedulerTag\n return makeTickScheduler({ runtimeStore: store, declarativeLinkRuntime, hostScheduler, config }) as TickSchedulerService\n }),\n ) as Layer.Layer<any, never, never>\n\nexport const tickSchedulerTestStubLayer = (scheduler: TickSchedulerService): Layer.Layer<any, never, never> =>\n Layer.succeed(TickSchedulerTag, scheduler) as Layer.Layer<any, never, never>\n","export type Cancel = () => void\n\nexport type HostScheduler = {\n readonly nowMs: () => number\n readonly scheduleMicrotask: (cb: () => void) => void\n readonly scheduleMacrotask: (cb: () => void) => Cancel\n readonly scheduleAnimationFrame: (cb: () => void) => Cancel\n readonly scheduleTimeout: (ms: number, cb: () => void) => Cancel\n}\n\nconst noopCancel: Cancel = () => {}\n\nconst safeNowMs = (): number => {\n const perf = (globalThis as any).performance as { now?: () => number } | undefined\n if (perf && typeof perf.now === 'function') {\n try {\n const v = perf.now()\n if (typeof v === 'number' && Number.isFinite(v)) return v\n } catch {\n // fallthrough\n }\n }\n\n return Date.now()\n}\n\nconst safeQueueMicrotask = (cb: () => void): void => {\n const qm = (globalThis as any).queueMicrotask as ((run: () => void) => void) | undefined\n if (typeof qm === 'function') {\n try {\n qm(cb)\n return\n } catch {\n // fallthrough\n }\n }\n\n // Promise job fallback (still a microtask boundary).\n try {\n Promise.resolve().then(cb)\n } catch {\n // last resort\n setTimeout(cb, 0)\n }\n}\n\nconst safeSetTimeout = (ms: number, cb: () => void): Cancel => {\n const id = setTimeout(cb, ms)\n return () => {\n try {\n clearTimeout(id)\n } catch {\n // best-effort\n }\n }\n}\n\nconst makeMessageChannelMacrotask = (): ((cb: () => void) => Cancel) | undefined => {\n const MC = (globalThis as any).MessageChannel as { new (): MessageChannel } | undefined\n if (typeof MC !== 'function') return undefined\n\n let channel: MessageChannel\n try {\n channel = new MC()\n } catch {\n return undefined\n }\n\n type Task = { canceled: boolean; cb: () => void }\n const queue: Array<Task> = []\n let scheduled = false\n\n const flush = (): void => {\n scheduled = false\n const tasks = queue.splice(0, queue.length)\n for (const t of tasks) {\n if (t.canceled) continue\n try {\n t.cb()\n } catch {\n // best-effort\n }\n }\n }\n\n try {\n channel.port1.onmessage = flush\n } catch {\n return undefined\n }\n\n const schedule = (cb: () => void): Cancel => {\n const task: Task = { canceled: false, cb }\n queue.push(task)\n if (!scheduled) {\n scheduled = true\n try {\n channel.port2.postMessage(undefined)\n } catch {\n scheduled = false\n // fallback to timeout if postMessage fails\n return safeSetTimeout(0, cb)\n }\n }\n return () => {\n task.canceled = true\n }\n }\n\n return schedule\n}\n\nconst makeSetImmediateMacrotask = (): ((cb: () => void) => Cancel) | undefined => {\n const si = (globalThis as any).setImmediate as ((run: () => void) => any) | undefined\n const ci = (globalThis as any).clearImmediate as ((id: any) => void) | undefined\n if (typeof si !== 'function') return undefined\n\n return (cb) => {\n let id: any\n try {\n id = si(cb)\n } catch {\n return safeSetTimeout(0, cb)\n }\n\n return () => {\n if (typeof ci !== 'function') return\n try {\n ci(id)\n } catch {\n // best-effort\n }\n }\n }\n}\n\nconst makeRaf = (): ((cb: () => void) => Cancel) | undefined => {\n const raf = (globalThis as any).requestAnimationFrame as ((run: () => void) => number) | undefined\n const cancel = (globalThis as any).cancelAnimationFrame as ((id: number) => void) | undefined\n if (typeof raf !== 'function') return undefined\n\n return (cb) => {\n let id: number\n try {\n id = raf(cb)\n } catch {\n return noopCancel\n }\n\n return () => {\n if (typeof cancel !== 'function') return\n try {\n cancel(id)\n } catch {\n // best-effort\n }\n }\n }\n}\n\nexport const makeDefaultHostScheduler = (): HostScheduler => {\n const macrotask =\n makeSetImmediateMacrotask() ??\n makeMessageChannelMacrotask() ??\n ((cb: () => void) => safeSetTimeout(0, cb))\n\n const raf = makeRaf()\n\n return {\n nowMs: safeNowMs,\n scheduleMicrotask: safeQueueMicrotask,\n scheduleMacrotask: macrotask,\n scheduleAnimationFrame: (cb) => raf?.(cb) ?? macrotask(cb),\n scheduleTimeout: safeSetTimeout,\n }\n}\n\nlet globalHostScheduler: HostScheduler | undefined\n\nexport const getGlobalHostScheduler = (): HostScheduler => {\n globalHostScheduler ??= makeDefaultHostScheduler()\n return globalHostScheduler\n}\n\nexport const __unsafeSetGlobalHostSchedulerForTests = (next: HostScheduler | undefined): void => {\n globalHostScheduler = next\n}\n\nexport type DeterministicHostScheduler = HostScheduler & {\n readonly flushMicrotasks: (options?: { readonly max?: number }) => number\n readonly flushOneMacrotask: () => boolean\n readonly flushAll: (options?: { readonly maxTurns?: number }) => { readonly turns: number; readonly ran: number }\n readonly getQueueSize: () => { readonly microtasks: number; readonly macrotasks: number }\n}\n\nexport const makeDeterministicHostScheduler = (): DeterministicHostScheduler => {\n const microtasks: Array<() => void> = []\n const macrotasks: Array<{ canceled: boolean; cb: () => void }> = []\n\n const flushMicrotasks = (options?: { readonly max?: number }): number => {\n const max = options?.max ?? 10_000\n let ran = 0\n while (microtasks.length > 0 && ran < max) {\n const cb = microtasks.shift()!\n ran += 1\n try {\n cb()\n } catch {\n // best-effort\n }\n }\n return ran\n }\n\n const flushOneMacrotask = (): boolean => {\n const t = macrotasks.shift()\n if (!t) return false\n if (t.canceled) return true\n try {\n t.cb()\n } catch {\n // best-effort\n }\n return true\n }\n\n const flushAll = (options?: { readonly maxTurns?: number }): { turns: number; ran: number } => {\n const maxTurns = options?.maxTurns ?? 10_000\n let turns = 0\n let ran = 0\n\n while (turns < maxTurns) {\n const before = microtasks.length + macrotasks.length\n ran += flushMicrotasks()\n if (microtasks.length > 0) {\n turns += 1\n continue\n }\n if (flushOneMacrotask()) {\n turns += 1\n continue\n }\n const after = microtasks.length + macrotasks.length\n if (after === 0 || after === before) break\n turns += 1\n }\n\n return { turns, ran }\n }\n\n return {\n nowMs: safeNowMs,\n scheduleMicrotask: (cb) => {\n microtasks.push(cb)\n },\n scheduleMacrotask: (cb) => {\n const task = { canceled: false, cb }\n macrotasks.push(task)\n return () => {\n task.canceled = true\n }\n },\n scheduleAnimationFrame: (cb) => {\n const task = { canceled: false, cb }\n macrotasks.push(task)\n return () => {\n task.canceled = true\n }\n },\n scheduleTimeout: (_ms, cb) => {\n const task = { canceled: false, cb }\n macrotasks.push(task)\n return () => {\n task.canceled = true\n }\n },\n flushMicrotasks,\n flushOneMacrotask,\n flushAll,\n getQueueSize: () => ({ microtasks: microtasks.length, macrotasks: macrotasks.length }),\n }\n}\n","import type { StateCommitMeta, StateCommitPriority } from './module.js'\n\nexport type ModuleInstanceKey = `${string}::${string}`\nexport type TopicKey = string\n\nexport type TopicKind = 'module' | 'readQuery'\n\nexport type TopicInfo =\n | { readonly kind: 'module'; readonly moduleInstanceKey: ModuleInstanceKey }\n | { readonly kind: 'readQuery'; readonly moduleInstanceKey: ModuleInstanceKey; readonly selectorId: string }\n\nexport const makeModuleInstanceKey = (moduleId: string, instanceId: string): ModuleInstanceKey =>\n `${moduleId}::${instanceId}`\n\nexport const makeReadQueryTopicKey = (moduleInstanceKey: ModuleInstanceKey, selectorId: string): TopicKey =>\n `${moduleInstanceKey}::rq:${selectorId}`\n\nexport const parseTopicKey = (topicKey: string): TopicInfo | undefined => {\n const idx = topicKey.indexOf('::')\n if (idx <= 0) return undefined\n\n const moduleId = topicKey.slice(0, idx)\n const rest = topicKey.slice(idx + 2)\n if (rest.length === 0) return undefined\n\n const idx2 = rest.indexOf('::')\n if (idx2 < 0) {\n return { kind: 'module', moduleInstanceKey: `${moduleId}::${rest}` }\n }\n\n const instanceId = rest.slice(0, idx2)\n const suffix = rest.slice(idx2 + 2)\n if (suffix.startsWith('rq:')) {\n const selectorId = suffix.slice('rq:'.length)\n if (selectorId.length === 0) return undefined\n return {\n kind: 'readQuery',\n moduleInstanceKey: `${moduleId}::${instanceId}`,\n selectorId,\n }\n }\n\n return { kind: 'module', moduleInstanceKey: `${moduleId}::${instanceId}` }\n}\n\nexport interface RuntimeStoreModuleCommit {\n readonly moduleId: string\n readonly instanceId: string\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly state: unknown\n readonly meta: StateCommitMeta\n readonly opSeq?: number\n}\n\nexport interface RuntimeStorePendingDrain {\n readonly modules: ReadonlyMap<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly dirtyTopics: ReadonlyMap<TopicKey, StateCommitPriority>\n}\n\nexport interface RuntimeStoreCommitResult {\n readonly changedTopics: ReadonlyMap<TopicKey, { readonly priority: StateCommitPriority; readonly listeners: ReadonlyArray<() => void> }>\n}\n\nexport interface RuntimeStore {\n // ---- React-facing sync snapshot APIs ----\n readonly getTickSeq: () => number\n readonly getModuleState: (moduleInstanceKey: ModuleInstanceKey) => unknown\n readonly getTopicVersion: (topicKey: TopicKey) => number\n readonly getTopicPriority: (topicKey: TopicKey) => StateCommitPriority\n readonly subscribeTopic: (topicKey: TopicKey, listener: () => void) => () => void\n readonly getTopicSubscriberCount: (topicKey: TopicKey) => number\n readonly getModuleSubscriberCount: (moduleInstanceKey: ModuleInstanceKey) => number\n\n // ---- Runtime integration ----\n readonly registerModuleInstance: (args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly initialState: unknown\n }) => void\n readonly unregisterModuleInstance: (moduleInstanceKey: ModuleInstanceKey) => void\n\n // ---- TickScheduler integration (internal) ----\n readonly commitTick: (args: {\n readonly tickSeq: number\n readonly accepted: RuntimeStorePendingDrain\n }) => RuntimeStoreCommitResult\n\n readonly dispose: () => void\n}\n\nexport const makeRuntimeStore = (): RuntimeStore => {\n let tickSeq = 0\n\n // ---- Committed snapshot (read by React) ----\n const moduleStates = new Map<ModuleInstanceKey, unknown>()\n const topicVersions = new Map<TopicKey, number>()\n const topicPriorities = new Map<TopicKey, StateCommitPriority>()\n\n // ---- Subscriptions ----\n const listenersByTopic = new Map<TopicKey, Set<() => void>>()\n const subscriberCountByModule = new Map<ModuleInstanceKey, number>()\n\n const getTopicVersion = (topicKey: TopicKey): number => topicVersions.get(topicKey) ?? 0\n const getTopicPriority = (topicKey: TopicKey): StateCommitPriority => topicPriorities.get(topicKey) ?? 'normal'\n\n const commitTopicBump = (topicKey: TopicKey, priority: StateCommitPriority): void => {\n const prev = topicVersions.get(topicKey) ?? 0\n topicVersions.set(topicKey, prev + 1)\n topicPriorities.set(topicKey, priority)\n }\n\n const subscribeTopic = (topicKey: TopicKey, listener: () => void): (() => void) => {\n const info = parseTopicKey(topicKey)\n const existing = listenersByTopic.get(topicKey)\n const set = existing ?? new Set<() => void>()\n const alreadyHas = set.has(listener)\n if (!alreadyHas) {\n set.add(listener)\n }\n if (!existing) {\n listenersByTopic.set(topicKey, set)\n }\n\n if (!alreadyHas && info) {\n const prev = subscriberCountByModule.get(info.moduleInstanceKey) ?? 0\n subscriberCountByModule.set(info.moduleInstanceKey, prev + 1)\n }\n\n return () => {\n const current = listenersByTopic.get(topicKey)\n if (!current) return\n const deleted = current.delete(listener)\n if (deleted && info) {\n const prev = subscriberCountByModule.get(info.moduleInstanceKey) ?? 0\n const next = prev - 1\n if (next <= 0) {\n subscriberCountByModule.delete(info.moduleInstanceKey)\n } else {\n subscriberCountByModule.set(info.moduleInstanceKey, next)\n }\n }\n if (current.size === 0) {\n listenersByTopic.delete(topicKey)\n }\n }\n }\n\n const getTopicSubscriberCount = (topicKey: TopicKey): number => listenersByTopic.get(topicKey)?.size ?? 0\n const getModuleSubscriberCount = (moduleInstanceKey: ModuleInstanceKey): number => subscriberCountByModule.get(moduleInstanceKey) ?? 0\n\n const registerModuleInstance = (args: {\n readonly moduleId: string\n readonly instanceId: string\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly initialState: unknown\n }): void => {\n moduleStates.set(args.moduleInstanceKey, args.initialState)\n // Ensure the module topic exists with a stable baseline version/priority.\n if (!topicVersions.has(args.moduleInstanceKey)) {\n topicVersions.set(args.moduleInstanceKey, 0)\n topicPriorities.set(args.moduleInstanceKey, 'normal')\n }\n }\n\n const unregisterModuleInstance = (moduleInstanceKey: ModuleInstanceKey): void => {\n moduleStates.delete(moduleInstanceKey)\n // Keep topic versions by default (helps debugging). Subscribers are expected to detach on module destroy.\n }\n\n const commitTick = (args: { readonly tickSeq: number; readonly accepted: RuntimeStorePendingDrain }): RuntimeStoreCommitResult => {\n tickSeq = args.tickSeq\n\n for (const [key, commit] of args.accepted.modules) {\n moduleStates.set(key, commit.state)\n }\n\n const changedTopics = new Map<TopicKey, { readonly priority: StateCommitPriority; readonly listeners: ReadonlyArray<() => void> }>()\n\n for (const [topicKey, priority] of args.accepted.dirtyTopics) {\n commitTopicBump(topicKey, priority)\n const listeners = Array.from(listenersByTopic.get(topicKey) ?? [])\n if (listeners.length > 0) {\n changedTopics.set(topicKey, { priority, listeners })\n }\n }\n\n return { changedTopics }\n }\n\n const getModuleState = (moduleInstanceKey: ModuleInstanceKey): unknown => moduleStates.get(moduleInstanceKey)\n\n const dispose = (): void => {\n moduleStates.clear()\n topicVersions.clear()\n topicPriorities.clear()\n listenersByTopic.clear()\n subscriberCountByModule.clear()\n }\n\n return {\n getTickSeq: () => tickSeq,\n getModuleState,\n getTopicVersion,\n getTopicPriority,\n subscribeTopic,\n getTopicSubscriberCount,\n getModuleSubscriberCount,\n registerModuleInstance,\n unregisterModuleInstance,\n commitTick,\n dispose,\n }\n}\n","import { Effect, FiberRef } from 'effect'\nimport * as Debug from './DebugSink.js'\nimport * as DevtoolsHub from './DevtoolsHub.js'\nimport type { DeclarativeLinkRuntime } from './DeclarativeLinkRuntime.js'\nimport type { HostScheduler } from './HostScheduler.js'\nimport { makeJobQueue, type JobQueue } from './JobQueue.js'\nimport * as TaskRunner from './TaskRunner.js'\nimport {\n makeReadQueryTopicKey,\n type ModuleInstanceKey,\n type RuntimeStore,\n type RuntimeStoreModuleCommit,\n type RuntimeStorePendingDrain,\n} from './RuntimeStore.js'\nimport type { StateCommitPriority } from './module.js'\n\nexport type TickLane = 'urgent' | 'nonUrgent'\n\nexport type TickDegradeReason = 'budget_steps' | 'cycle_detected' | 'unknown'\n\nexport interface TickSchedulerConfig {\n /**\n * Fixpoint step cap:\n * - Steps count \"work acceptance units\" within a single tick, not time.\n * - Exceeding the budget triggers a soft degrade (stable=false), deferring nonUrgent backlog to the next tick.\n */\n readonly maxSteps?: number\n /**\n * Urgent safety cap:\n * - Even urgent work may be cut when the system appears to be in a cycle (avoid freezing).\n */\n readonly urgentStepCap?: number\n /**\n * Drain-round cap:\n * - Bounds the number of drain rounds while capturing concurrent commits before committing the tick snapshot.\n * - Exceeding the cap is treated as a cycle (stable=false, degradeReason=cycle_detected).\n */\n readonly maxDrainRounds?: number\n /**\n * Microtask starvation protection threshold:\n * - Counts consecutive ticks scheduled on microtask boundaries without yielding to host (best-effort).\n * - Exceeding the limit forces the next tick to start on a macrotask boundary.\n */\n readonly microtaskChainDepthLimit?: number\n /**\n * Optional degraded-tick telemetry (opt-in, sampled):\n * - Runs even when diagnostics=off (Devtools disabled).\n * - Intended for production health signals (frequency of stable=false / forced yield).\n */\n readonly telemetry?: TickSchedulerTelemetryConfig\n}\n\nexport interface TickSchedulerTelemetryEvent {\n readonly tickSeq: number\n readonly stable: boolean\n readonly degradeReason?: TickDegradeReason\n readonly forcedMacrotask?: boolean\n readonly scheduleReason?: TickScheduleReason\n readonly microtaskChainDepth?: number\n readonly deferredWorkCount?: number\n}\n\nexport interface TickSchedulerTelemetryConfig {\n /** Sample rate in [0, 1]. Default: 0 (disabled). */\n readonly sampleRate?: number\n /** Called for ticks that are degraded (stable=false) and/or started on a forced macrotask boundary. */\n readonly onTickDegraded?: (event: TickSchedulerTelemetryEvent) => void\n}\n\nexport interface TickScheduler {\n readonly getTickSeq: () => number\n readonly onModuleCommit: (commit: RuntimeStoreModuleCommit) => Effect.Effect<void, never, never>\n readonly onSelectorChanged: (args: {\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly selectorId: string\n readonly priority: StateCommitPriority\n }) => void\n readonly flushNow: Effect.Effect<void, never, never>\n}\n\n// ---- Runtime.batch (sync boundary) ----\n\ntype BatchWaiter = { readonly resolve: () => void }\n\nlet batchDepth = 0\nconst batchWaiters = new Set<BatchWaiter>()\n\nexport const enterRuntimeBatch = (): void => {\n batchDepth += 1\n}\n\nexport const exitRuntimeBatch = (): void => {\n batchDepth = Math.max(0, batchDepth - 1)\n if (batchDepth !== 0) return\n const waiters = Array.from(batchWaiters)\n batchWaiters.clear()\n for (const w of waiters) {\n try {\n w.resolve()\n } catch {\n // best-effort\n }\n }\n}\n\nconst waitForBatchEndIfNeeded = (): Effect.Effect<void, never, never> =>\n batchDepth === 0\n ? Effect.void\n : Effect.async<void, never>((resume, signal) => {\n\n let done = false\n const cleanup = () => {\n if (done) return\n done = true\n batchWaiters.delete(waiter)\n try {\n signal.removeEventListener('abort', onAbort)\n } catch {\n // best-effort\n }\n }\n\n const onAbort = () => {\n cleanup()\n }\n\n const waiter: BatchWaiter = {\n resolve: () => {\n cleanup()\n resume(Effect.void)\n },\n }\n\n batchWaiters.add(waiter)\n try {\n signal.addEventListener('abort', onAbort, { once: true })\n } catch {\n // best-effort\n }\n })\n\n// ---- TickScheduler implementation ----\n\ntype TriggerKind = 'externalStore' | 'dispatch' | 'timer' | 'unknown'\n\ntype TickScheduleStartedAs = 'microtask' | 'macrotask' | 'batch' | 'unknown'\ntype TickScheduleReason = 'budget' | 'cycle_detected' | 'microtask_starvation' | 'unknown'\n\ntype TickSchedule = {\n readonly startedAs?: TickScheduleStartedAs\n readonly microtaskChainDepth?: number\n readonly forcedMacrotask?: boolean\n readonly reason?: TickScheduleReason\n}\n\nconst clampSampleRate = (sampleRate: number | undefined): number => {\n if (typeof sampleRate !== 'number' || !Number.isFinite(sampleRate)) return 0\n if (sampleRate <= 0) return 0\n if (sampleRate >= 1) return 1\n return sampleRate\n}\n\nconst shouldSampleTick = (tickSeq: number, sampleRate: number): boolean => {\n if (sampleRate <= 0) return false\n if (sampleRate >= 1) return true\n // Deterministic sampling: stable across runs, avoids Math.random() and keeps overhead minimal.\n const x = tickSeq >>> 0\n const h = Math.imul(x ^ 0x9e3779b9, 0x85ebca6b) >>> 0\n return h / 0xffffffff < sampleRate\n}\n\nconst toTriggerKind = (originKind: string | undefined): TriggerKind => {\n if (originKind === 'action') return 'dispatch'\n if (originKind === 'trait-external-store') return 'externalStore'\n if (originKind?.includes('timer')) return 'timer'\n return 'unknown'\n}\n\nconst toLane = (priority: StateCommitPriority): TickLane => (priority === 'low' ? 'nonUrgent' : 'urgent')\n\nconst maxPriority = (a: StateCommitPriority, b: StateCommitPriority): StateCommitPriority =>\n a === 'normal' || b === 'normal' ? 'normal' : 'low'\n\nconst mergeDrain = (base: RuntimeStorePendingDrain, next: RuntimeStorePendingDrain): RuntimeStorePendingDrain => {\n const modules = new Map(base.modules)\n for (const [k, commit] of next.modules) {\n const prev = modules.get(k)\n if (!prev) {\n modules.set(k, commit)\n } else {\n modules.set(k, {\n ...commit,\n meta: {\n ...commit.meta,\n priority: maxPriority(prev.meta.priority, commit.meta.priority),\n },\n })\n }\n }\n\n const dirtyTopics = new Map(base.dirtyTopics)\n for (const [k, p] of next.dirtyTopics) {\n const prev = dirtyTopics.get(k)\n dirtyTopics.set(k, prev ? maxPriority(prev, p) : p)\n }\n\n return { modules, dirtyTopics }\n}\n\nconst emptyDrain = (): RuntimeStorePendingDrain => ({ modules: new Map(), dirtyTopics: new Map() })\n\nexport const makeTickScheduler = (args: {\n readonly runtimeStore: RuntimeStore\n readonly queue?: JobQueue\n readonly hostScheduler: HostScheduler\n readonly config?: TickSchedulerConfig\n readonly declarativeLinkRuntime?: DeclarativeLinkRuntime\n}): TickScheduler => {\n const store = args.runtimeStore\n const hostScheduler = args.hostScheduler\n const declarativeLinks = args.declarativeLinkRuntime\n const queue = args.queue ?? makeJobQueue()\n\n const config: Required<Pick<TickSchedulerConfig, 'maxSteps' | 'urgentStepCap' | 'maxDrainRounds' | 'microtaskChainDepthLimit'>> = {\n maxSteps: args.config?.maxSteps ?? 64,\n urgentStepCap: args.config?.urgentStepCap ?? 512,\n maxDrainRounds: args.config?.maxDrainRounds ?? 8,\n microtaskChainDepthLimit: args.config?.microtaskChainDepthLimit ?? 32,\n }\n const telemetry = args.config?.telemetry\n const telemetrySampleRate = clampSampleRate(telemetry?.sampleRate)\n\n let tickSeq = 0\n let scheduled = false\n let microtaskChainDepth = 0\n let nextForcedReason: TickScheduleReason | undefined\n\n let coalescedModules = 0\n let coalescedTopics = 0\n\n const yieldMicrotask = Effect.async<void, never>((resume) => {\n hostScheduler.scheduleMicrotask(() => resume(Effect.void))\n })\n const yieldMacrotask = Effect.async<void, never>((resume, signal) => {\n const cancel = hostScheduler.scheduleMacrotask(() => resume(Effect.void))\n try {\n signal.addEventListener(\n 'abort',\n () => {\n cancel()\n },\n { once: true },\n )\n } catch {\n // best-effort\n }\n })\n\n const scheduleTick = (): Effect.Effect<void, never, never> =>\n Effect.gen(function* () {\n if (scheduled) return\n scheduled = true\n\n const waitedForBatch = batchDepth > 0\n\n const forcedReason = nextForcedReason\n nextForcedReason = undefined\n\n const shouldYieldForStarvation =\n forcedReason == null && microtaskChainDepth >= Math.max(1, config.microtaskChainDepthLimit)\n\n const reason: TickScheduleReason | undefined = forcedReason ?? (shouldYieldForStarvation ? 'microtask_starvation' : undefined)\n const boundary: 'microtask' | 'macrotask' = reason ? 'macrotask' : 'microtask'\n const startedAs: TickScheduleStartedAs = waitedForBatch ? 'batch' : boundary\n const depthAtSchedule = microtaskChainDepth\n\n yield* Effect.forkDaemon(\n Effect.locally(TaskRunner.inSyncTransactionFiber, false)(\n Effect.gen(function* () {\n try {\n yield* waitForBatchEndIfNeeded()\n if (boundary === 'microtask') {\n yield* yieldMicrotask\n microtaskChainDepth += 1\n } else {\n yield* yieldMacrotask\n microtaskChainDepth = 0\n }\n\n const schedule: TickSchedule = {\n startedAs,\n microtaskChainDepth: boundary === 'macrotask' ? depthAtSchedule : microtaskChainDepth,\n ...(boundary === 'macrotask' ? { forcedMacrotask: true, reason: reason ?? 'unknown' } : {}),\n }\n\n const outcome = yield* flushTick(schedule)\n if (!outcome.stable) {\n nextForcedReason =\n outcome.degradeReason === 'budget_steps'\n ? 'budget'\n : outcome.degradeReason === 'cycle_detected'\n ? 'cycle_detected'\n : 'unknown'\n }\n } finally {\n scheduled = false\n // If something was re-queued or arrived after commit, schedule the next tick (best-effort).\n if (queue.hasPending()) {\n yield* scheduleTick()\n } else {\n // Reset chain depth when the system becomes idle (avoid forcing a macrotask on the next unrelated tick).\n microtaskChainDepth = 0\n }\n }\n }),\n ),\n )\n })\n\n const flushTick = (schedule: TickSchedule): Effect.Effect<{ stable: boolean; degradeReason?: TickDegradeReason }, never, never> =>\n Effect.gen(function* () {\n if (!queue.hasPending()) {\n return { stable: true }\n }\n\n tickSeq += 1\n const currentTickSeq = tickSeq\n\n const diagnosticsLevel = yield* FiberRef.get(Debug.currentDiagnosticsLevel)\n const shouldEmitTrace = DevtoolsHub.isDevtoolsEnabled() && diagnosticsLevel !== 'off'\n\n const captured: {\n drainRounds: number\n stable: boolean\n degradeReason?: TickDegradeReason\n deferred?: RuntimeStorePendingDrain\n accepted: RuntimeStorePendingDrain\n } = {\n drainRounds: 0,\n stable: true,\n accepted: emptyDrain(),\n }\n\n // Fixpoint capture: drain -> apply declarative links -> drain (bounded by maxDrainRounds).\n while (captured.drainRounds < config.maxDrainRounds) {\n const drained = queue.drain()\n if (!drained) break\n captured.drainRounds += 1\n captured.accepted = mergeDrain(captured.accepted, drained)\n\n if (declarativeLinks && drained.modules.size > 0) {\n const changedModuleInstanceKeys = Array.from(drained.modules.keys())\n yield* declarativeLinks.applyForSources({\n tickSeq: currentTickSeq,\n acceptedModules: captured.accepted.modules,\n changedModuleInstanceKeys,\n })\n }\n }\n\n if (queue.hasPending()) {\n captured.stable = false\n captured.degradeReason = 'cycle_detected'\n }\n\n // Budget enforcement (defer nonUrgent only; urgent may be cut only in cycle safety-break).\n const urgentModules: Array<RuntimeStoreModuleCommit> = []\n const nonUrgentModules: Array<RuntimeStoreModuleCommit> = []\n\n for (const commit of captured.accepted.modules.values()) {\n if (toLane(commit.meta.priority) === 'urgent') {\n urgentModules.push(commit)\n } else {\n nonUrgentModules.push(commit)\n }\n }\n\n const urgentCapExceeded = urgentModules.length > config.urgentStepCap\n const urgentAccepted = urgentCapExceeded ? urgentModules.slice(0, config.urgentStepCap) : urgentModules\n const urgentDeferred = urgentCapExceeded ? urgentModules.slice(config.urgentStepCap) : []\n\n const nonUrgentBudget = Math.max(0, config.maxSteps)\n const nonUrgentAccepted = urgentCapExceeded ? [] : nonUrgentModules.slice(0, nonUrgentBudget)\n const nonUrgentDeferred = urgentCapExceeded ? nonUrgentModules : nonUrgentModules.slice(nonUrgentBudget)\n\n if (urgentCapExceeded) {\n captured.stable = false\n captured.degradeReason = 'cycle_detected'\n } else if (nonUrgentDeferred.length > 0) {\n captured.stable = false\n captured.degradeReason = captured.degradeReason ?? 'budget_steps'\n }\n\n const acceptedModules = new Map<ModuleInstanceKey, RuntimeStoreModuleCommit>()\n const deferredModules = new Map<ModuleInstanceKey, RuntimeStoreModuleCommit>()\n\n for (const c of urgentAccepted) acceptedModules.set(c.moduleInstanceKey, c)\n for (const c of nonUrgentAccepted) acceptedModules.set(c.moduleInstanceKey, c)\n\n for (const c of urgentDeferred) deferredModules.set(c.moduleInstanceKey, c)\n for (const c of nonUrgentDeferred) deferredModules.set(c.moduleInstanceKey, c)\n\n const acceptedTopics = new Map<string, StateCommitPriority>()\n const deferredTopics = new Map<string, StateCommitPriority>()\n\n for (const [topicKey, priority] of captured.accepted.dirtyTopics) {\n const info = storeTopicToModuleInstanceKey(topicKey)\n if (!info) continue\n if (acceptedModules.has(info)) {\n acceptedTopics.set(topicKey, priority)\n } else if (deferredModules.has(info)) {\n deferredTopics.set(topicKey, priority)\n } else {\n // Conservative default: treat unknown topics as accepted.\n acceptedTopics.set(topicKey, priority)\n }\n }\n\n const acceptedDrain: RuntimeStorePendingDrain = {\n modules: acceptedModules,\n dirtyTopics: acceptedTopics,\n }\n\n const deferredDrain: RuntimeStorePendingDrain | undefined =\n deferredModules.size > 0 || deferredTopics.size > 0\n ? {\n modules: deferredModules,\n dirtyTopics: deferredTopics,\n }\n : undefined\n\n captured.deferred = deferredDrain\n\n let startedAtMs: number | undefined\n let triggerSummary: any | undefined\n let anchor: any | undefined\n let backlog: any | undefined\n let result: any | undefined\n\n if (shouldEmitTrace) {\n startedAtMs = Date.now()\n\n triggerSummary = (() => {\n const triggers = Array.from(captured.accepted.modules.values())\n const counts = new Map<TriggerKind, number>()\n let primary: any = undefined\n for (const t of triggers) {\n const kind = toTriggerKind(t.meta.originKind)\n counts.set(kind, (counts.get(kind) ?? 0) + 1)\n if (!primary) {\n primary = {\n kind,\n moduleId: t.moduleId,\n instanceId: t.instanceId,\n fieldPath: kind === 'externalStore' ? t.meta.originName : undefined,\n actionTag: kind === 'dispatch' ? t.meta.originName : undefined,\n }\n }\n }\n return {\n total: triggers.length,\n kinds: Array.from(counts.entries()).map(([kind, count]) => ({ kind, count })),\n primary,\n coalescedCount: {\n modules: coalescedModules,\n topics: coalescedTopics,\n },\n }\n })()\n\n anchor = (() => {\n const first = captured.accepted.modules.values().next().value as RuntimeStoreModuleCommit | undefined\n if (!first) return undefined\n return {\n moduleId: first.moduleId,\n instanceId: first.instanceId,\n txnSeq: first.meta.txnSeq,\n txnId: first.meta.txnId,\n ...(typeof first.opSeq === 'number' ? { opSeq: first.opSeq } : null),\n }\n })()\n\n backlog = (() => {\n const deferredWork = captured.deferred\n if (!deferredWork) return undefined\n const pendingDeferredWork = deferredWork.modules.size + deferredWork.dirtyTopics.size\n\n const deferredModulesList = Array.from(deferredWork.modules.values())\n const pendingExternalInputs = deferredModulesList.filter((m) => toTriggerKind(m.meta.originKind) === 'externalStore').length\n\n const primaryDeferred =\n deferredModulesList.find((m) => toTriggerKind(m.meta.originKind) === 'externalStore') ?? deferredModulesList[0]\n const kind = primaryDeferred ? toTriggerKind(primaryDeferred.meta.originKind) : 'unknown'\n\n const deferredPrimary =\n primaryDeferred != null\n ? {\n kind: kind === 'externalStore' ? ('externalStore' as const) : ('unknown' as const),\n moduleId: primaryDeferred.moduleId,\n instanceId: primaryDeferred.instanceId,\n fieldPath: kind === 'externalStore' ? primaryDeferred.meta.originName : undefined,\n storeId: undefined,\n }\n : undefined\n\n return {\n pendingExternalInputs,\n pendingDeferredWork,\n deferredPrimary,\n }\n })()\n\n result = {\n stable: captured.stable,\n ...(captured.stable ? null : { degradeReason: captured.degradeReason ?? 'unknown' }),\n } as const\n }\n\n if (shouldEmitTrace && schedule.forcedMacrotask && schedule.reason === 'microtask_starvation') {\n yield* Debug.record({\n type: 'warn:microtask-starvation',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n tickSeq: currentTickSeq,\n microtaskChainDepth: schedule.microtaskChainDepth,\n })\n }\n\n if (shouldEmitTrace) {\n yield* Debug.record({\n type: 'trace:tick',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n data: {\n tickSeq: currentTickSeq,\n phase: 'start',\n timestampMs: startedAtMs!,\n schedule,\n triggerSummary,\n anchors: anchor,\n budget: {\n maxSteps: config.maxSteps,\n elapsedMs: 0,\n steps: 0,\n txnCount: acceptedModules.size,\n },\n },\n })\n }\n\n if (!captured.stable && shouldEmitTrace) {\n yield* Debug.record({\n type: 'trace:tick',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n data: {\n tickSeq: currentTickSeq,\n phase: 'budgetExceeded',\n timestampMs: Date.now(),\n schedule,\n triggerSummary,\n anchors: anchor,\n budget: {\n maxSteps: config.maxSteps,\n elapsedMs: Math.max(0, Date.now() - startedAtMs!),\n steps: config.maxSteps,\n txnCount: acceptedModules.size,\n },\n backlog,\n result,\n },\n })\n }\n\n // Requeue deferred backlog before committing the tick, so the next tick can pick it up.\n if (deferredDrain) {\n queue.requeue(deferredDrain)\n }\n\n const committed = store.commitTick({\n tickSeq: currentTickSeq,\n accepted: acceptedDrain,\n })\n\n // Notify changed topics after committing the snapshot token.\n for (const { listeners } of committed.changedTopics.values()) {\n for (const listener of listeners) {\n try {\n listener()\n } catch {\n // best-effort: never let a subscriber break the tick\n }\n }\n }\n\n if (!captured.stable && shouldEmitTrace && backlog?.deferredPrimary) {\n const primary = backlog.deferredPrimary\n if (primary.kind === 'externalStore') {\n const moduleInstanceKey =\n primary.moduleId && primary.instanceId ? (`${primary.moduleId}::${primary.instanceId}` as ModuleInstanceKey) : undefined\n if (moduleInstanceKey && store.getModuleSubscriberCount(moduleInstanceKey) > 0) {\n yield* Debug.record({\n type: 'warn:priority-inversion',\n moduleId: primary.moduleId,\n instanceId: primary.instanceId,\n tickSeq: currentTickSeq,\n reason: 'deferredBacklog',\n })\n }\n }\n }\n\n if (shouldEmitTrace) {\n yield* Debug.record({\n type: 'trace:tick',\n moduleId: anchor?.moduleId,\n instanceId: anchor?.instanceId,\n data: {\n tickSeq: currentTickSeq,\n phase: 'settled',\n timestampMs: Date.now(),\n schedule,\n triggerSummary,\n anchors: anchor,\n budget: {\n maxSteps: config.maxSteps,\n elapsedMs: Math.max(0, Date.now() - startedAtMs!),\n steps: acceptedModules.size,\n txnCount: acceptedModules.size,\n },\n backlog,\n result,\n },\n })\n }\n\n if (telemetry?.onTickDegraded && (schedule.forcedMacrotask || !captured.stable) && shouldSampleTick(currentTickSeq, telemetrySampleRate)) {\n try {\n telemetry.onTickDegraded({\n tickSeq: currentTickSeq,\n stable: captured.stable,\n degradeReason: captured.stable ? undefined : (captured.degradeReason ?? 'unknown'),\n forcedMacrotask: schedule.forcedMacrotask,\n scheduleReason: schedule.reason,\n microtaskChainDepth: schedule.microtaskChainDepth,\n deferredWorkCount: deferredDrain ? deferredDrain.modules.size + deferredDrain.dirtyTopics.size : 0,\n })\n } catch {\n // best-effort: never let user telemetry break the tick\n }\n }\n coalescedModules = 0\n coalescedTopics = 0\n\n return { stable: captured.stable, degradeReason: captured.degradeReason }\n })\n\n const flushNow: TickScheduler['flushNow'] = flushTick({ startedAs: 'unknown' }).pipe(Effect.asVoid)\n\n const storeTopicToModuleInstanceKey = (topicKey: string): ModuleInstanceKey | undefined => {\n const idx = topicKey.indexOf('::rq:')\n if (idx > 0) {\n return topicKey.slice(0, idx) as ModuleInstanceKey\n }\n if (topicKey.includes('::')) {\n return topicKey as ModuleInstanceKey\n }\n return undefined\n }\n\n const onSelectorChanged: TickScheduler['onSelectorChanged'] = ({ moduleInstanceKey, selectorId, priority }) => {\n const coalesced = queue.markTopicDirty(makeReadQueryTopicKey(moduleInstanceKey, selectorId), priority)\n if (coalesced) coalescedTopics += 1\n }\n\n const onModuleCommit: TickScheduler['onModuleCommit'] = (commit) =>\n Effect.gen(function* () {\n const coalescedCommit = queue.enqueueModuleCommit(commit)\n if (coalescedCommit) coalescedModules += 1\n const coalescedTopic = queue.markTopicDirty(commit.moduleInstanceKey, commit.meta.priority)\n if (coalescedTopic) coalescedTopics += 1\n yield* scheduleTick()\n })\n\n return {\n getTickSeq: () => tickSeq,\n onModuleCommit,\n onSelectorChanged,\n flushNow,\n }\n}\n","import { Effect, FiberRef } from 'effect'\nimport type { JsonValue } from '../../observability/jsonValue.js'\nimport type { EvidencePackage, EvidencePackageSource } from '../../observability/evidence.js'\nimport { exportEvidencePackage, OBSERVABILITY_PROTOCOL_VERSION } from '../../observability/evidence.js'\nimport type { ConvergeStaticIrExport } from '../../state-trait/converge-ir.js'\nimport type { ConvergeStaticIrCollector } from './ConvergeStaticIrCollector.js'\nimport {\n currentDiagnosticsLevel,\n clearRuntimeDebugEventSeq,\n toRuntimeDebugEventRef,\n type Event,\n type RuntimeDebugEventRef,\n type Sink,\n} from './DebugSink.js'\nimport { getGlobalHostScheduler } from './HostScheduler.js'\n\n/**\n * DevtoolsHub:\n * - Process/page-level Debug event aggregator (global singleton).\n *\n * Note: this hub is only appended to Debug sinks when devtoolsHubLayer is explicitly enabled.\n * The Snapshot API is always available (returns empty snapshots when disabled).\n *\n * Performance:\n * - Devtools Debug events can be extremely dense in hot paths (EffectOp / Trait / StateTxn, etc.).\n * - The previous implementation copied ringBuffer and Maps per event to build an \"immutable snapshot\" (O(bufferSize)).\n * - The current implementation lets Snapshot reference internal Map/Array directly (read-only convention) and batches\n * subscriber notifications in microtasks, avoiding per-event copies and reducing main-thread interference.\n */\n\nexport interface DevtoolsSnapshot {\n /**\n * SnapshotToken:\n * - Monotonic snapshot change token (a subscription-safe source of truth).\n * - Any externally visible change must advance the token.\n * - If the token does not change, externally visible snapshot fields must not change (avoid tearing / missed updates).\n */\n readonly snapshotToken: SnapshotToken\n readonly instances: ReadonlyMap<string, number>\n readonly events: ReadonlyArray<RuntimeDebugEventRef>\n readonly latestStates: ReadonlyMap<string, JsonValue>\n readonly latestTraitSummaries: ReadonlyMap<string, JsonValue>\n /**\n * exportBudget:\n * - Tracks \"degrade counts\" caused by export boundaries (JsonValue projection/trimming), for explainability.\n * - Counts are cumulative (may differ from the ring buffer window); clearDevtoolsEvents resets them.\n */\n readonly exportBudget: {\n readonly dropped: number\n readonly oversized: number\n }\n}\n\nexport interface DevtoolsHubOptions {\n readonly bufferSize?: number\n}\n\nexport type SnapshotToken = number\n\n// ---- Global mutable state (singleton) ----\n\nconst instances = new Map<string, number>()\nconst latestStates = new Map<string, JsonValue>()\nconst latestTraitSummaries = new Map<string, JsonValue>()\nconst instanceLabels = new Map<string, string>()\nconst convergeStaticIrByDigest = new Map<string, ConvergeStaticIrExport>()\nconst liveInstanceKeys = new Set<string>()\n\nconst exportBudget = {\n dropped: 0,\n oversized: 0,\n}\n\nlet lastRunTs = 0\nlet lastRunTsSeq = 0\n\nconst nextRunId = (): string => {\n const ts = Date.now()\n if (ts === lastRunTs) {\n lastRunTsSeq += 1\n } else {\n lastRunTs = ts\n lastRunTsSeq = 0\n }\n\n return lastRunTsSeq === 0 ? `run-${ts}` : `run-${ts}.${lastRunTsSeq}`\n}\n\nlet currentRunId = nextRunId()\nlet nextSeq = 1\n\nlet bufferSize = 500\nconst ringBuffer: RuntimeDebugEventRef[] = []\nconst ringBufferSeq: number[] = []\n\nlet snapshotToken: SnapshotToken = 0\n\nconst ensureRingBufferSize = (): void => {\n if (bufferSize <= 0) {\n ringBuffer.length = 0\n ringBufferSeq.length = 0\n return\n }\n\n if (ringBuffer.length <= bufferSize) return\n const excess = ringBuffer.length - bufferSize\n ringBuffer.splice(0, excess)\n ringBufferSeq.splice(0, excess)\n}\n\nconst trimRingBufferIfNeeded = (): void => {\n if (bufferSize <= 0) {\n ringBuffer.length = 0\n ringBufferSeq.length = 0\n return\n }\n\n // Small windows keep a strict upper bound to avoid \"size=5 but events.length briefly > 5\" surprises.\n // Large windows allow short bursts + batch trimming to avoid linear shift() costs under sustained load.\n if (bufferSize <= 64) {\n ensureRingBufferSize()\n return\n }\n\n const slack = Math.min(1024, Math.floor(bufferSize / 2))\n const threshold = bufferSize + Math.max(1, slack)\n if (ringBuffer.length <= threshold) return\n\n const excess = ringBuffer.length - bufferSize\n ringBuffer.splice(0, excess)\n ringBufferSeq.splice(0, excess)\n}\n\n// Snapshot references internal structures directly (read-only convention) to avoid copy costs in hot paths.\nconst currentSnapshot: DevtoolsSnapshot = {\n snapshotToken,\n instances,\n events: ringBuffer,\n latestStates,\n latestTraitSummaries,\n exportBudget,\n}\n\nconst listeners = new Set<() => void>()\n\nlet notifyScheduled = false\nconst scheduleNotify = () => {\n if (listeners.size === 0) return\n if (notifyScheduled) return\n notifyScheduled = true\n getGlobalHostScheduler().scheduleMicrotask(() => {\n notifyScheduled = false\n for (const listener of listeners) {\n listener()\n }\n })\n}\n\nlet devtoolsEnabled = false\n\nconst bumpSnapshotToken = (): void => {\n snapshotToken += 1\n ;(currentSnapshot as any).snapshotToken = snapshotToken\n}\n\nconst markSnapshotChanged = (): void => {\n bumpSnapshotToken()\n scheduleNotify()\n}\n\nexport const configureDevtoolsHub = (options?: DevtoolsHubOptions) => {\n devtoolsEnabled = true\n if (typeof options?.bufferSize === 'number' && Number.isFinite(options.bufferSize)) {\n const next = Math.floor(options.bufferSize)\n const nextBufferSize = next >= 0 ? next : 0\n if (nextBufferSize !== bufferSize) {\n bufferSize = nextBufferSize\n ensureRingBufferSize()\n markSnapshotChanged()\n }\n }\n}\n\nexport const isDevtoolsEnabled = (): boolean => devtoolsEnabled\n\n// ---- Snapshot public helpers ----\n\nexport const getDevtoolsSnapshot = (): DevtoolsSnapshot => currentSnapshot\nexport const getDevtoolsSnapshotToken = (): SnapshotToken => snapshotToken\n\nexport const subscribeDevtoolsSnapshot = (listener: () => void): (() => void) => {\n listeners.add(listener)\n return () => {\n listeners.delete(listener)\n }\n}\n\nexport const getDevtoolsRunId = (): string => currentRunId\n\nexport const setDevtoolsRunId = (runId: string): void => {\n if (typeof runId !== 'string' || runId.length === 0) return\n if (runId !== currentRunId) {\n currentRunId = runId\n markSnapshotChanged()\n }\n}\n\nexport const startDevtoolsRun = (runId?: string): string => {\n currentRunId = typeof runId === 'string' && runId.length > 0 ? runId : nextRunId()\n nextSeq = 1\n clearRuntimeDebugEventSeq()\n clearDevtoolsEvents()\n return currentRunId\n}\n\nexport const clearDevtoolsEvents = (): void => {\n ringBuffer.length = 0\n ringBufferSeq.length = 0\n exportBudget.dropped = 0\n exportBudget.oversized = 0\n markSnapshotChanged()\n}\n\nexport const setInstanceLabel = (instanceId: string, label: string): void => {\n instanceLabels.set(instanceId, label)\n markSnapshotChanged()\n}\n\nexport const getInstanceLabel = (instanceId: string): string | undefined => instanceLabels.get(instanceId)\n\nconst registerConvergeStaticIr = (ir: ConvergeStaticIrExport): void => {\n convergeStaticIrByDigest.set(ir.staticIrDigest, ir)\n}\n\nexport const devtoolsHubConvergeStaticIrCollector: ConvergeStaticIrCollector = {\n register: registerConvergeStaticIr,\n}\n\nexport const exportDevtoolsEvidencePackage = (options?: {\n readonly runId?: string\n readonly source?: EvidencePackageSource\n readonly protocolVersion?: string\n}): EvidencePackage => {\n const protocolVersion = options?.protocolVersion ?? OBSERVABILITY_PROTOCOL_VERSION\n const runId = options?.runId ?? currentRunId\n const source = options?.source ?? { host: 'unknown' }\n\n const events = ringBuffer.map((payload, i) => ({\n protocolVersion,\n runId,\n seq: ringBufferSeq[i] ?? i + 1,\n timestamp: payload.timestamp,\n type: 'debug:event',\n payload: payload as unknown as JsonValue,\n }))\n\n const isRecord = (value: unknown): value is Record<string, unknown> =>\n typeof value === 'object' && value !== null && !Array.isArray(value)\n\n // In full diagnostics: de-duplicate by staticIrDigest and export matching ConvergeStaticIR (for offline explanation/replay).\n const convergeDigests = new Set<string>()\n let sawFullConverge = false\n\n for (const ref of ringBuffer) {\n if (ref.kind !== 'trait:converge') continue\n const meta = ref.meta\n if (!isRecord(meta)) continue\n\n const digest = meta.staticIrDigest\n if (typeof digest === 'string' && digest.length > 0) {\n convergeDigests.add(digest)\n }\n\n const dirty = meta.dirty\n if (isRecord(dirty) && typeof dirty.rootCount === 'number') {\n sawFullConverge = true\n }\n }\n\n let summary: JsonValue | undefined\n if (sawFullConverge && convergeDigests.size > 0) {\n const staticIrByDigest: Record<string, JsonValue> = {}\n for (const digest of convergeDigests) {\n const ir = convergeStaticIrByDigest.get(digest)\n if (ir) {\n staticIrByDigest[digest] = ir as unknown as JsonValue\n }\n }\n if (Object.keys(staticIrByDigest).length > 0) {\n summary = { converge: { staticIrByDigest } } as unknown as JsonValue\n }\n }\n\n return exportEvidencePackage({\n protocolVersion,\n runId,\n source,\n events,\n summary,\n })\n}\n\n// ---- Hub Sink ----\n\nexport const devtoolsHubSink: Sink = {\n record: (event: Event) =>\n Effect.gen(function* () {\n // NOTE: the hub is a global singleton, but whether events are exportable/written to the buffer is controlled by FiberRef,\n // enabling different perf baselines/diagnostics tiers across scopes within the same process.\n const level = yield* FiberRef.get(currentDiagnosticsLevel)\n\n let changed = false\n\n // trace:instanceLabel: set a human-readable label for a runtime instance.\n if (event.type === 'trace:instanceLabel') {\n const instanceId = (event as any).instanceId as string | undefined\n const data = (event as any).data\n const label = data && typeof data === 'object' && 'label' in data ? String((data as any).label) : undefined\n if (instanceId && label) {\n instanceLabels.set(instanceId, label)\n changed = true\n }\n }\n\n // Instance counters: maintain active instance counts by runtimeLabel::moduleId.\n if (event.type === 'module:init' || event.type === 'module:destroy') {\n const moduleId = (event as any).moduleId ?? 'unknown'\n const runtimeLabel = (event as any).runtimeLabel ?? 'unknown'\n const instanceId = (event as any).instanceId as string | undefined\n const key = `${runtimeLabel}::${moduleId}`\n const prev = instances.get(key) ?? 0\n if (event.type === 'module:init') {\n instances.set(key, prev + 1)\n changed = true\n if (instanceId) {\n const instanceKey = `${runtimeLabel}::${moduleId}::${instanceId}`\n liveInstanceKeys.add(instanceKey)\n // If instanceId is reused, ensure derived caches do not carry leftovers from the previous lifetime.\n if (latestStates.delete(instanceKey)) changed = true\n if (latestTraitSummaries.delete(instanceKey)) changed = true\n }\n } else {\n const next = prev - 1\n if (next <= 0) {\n if (instances.delete(key)) changed = true\n } else {\n instances.set(key, next)\n changed = true\n }\n\n if (instanceId) {\n const instanceKey = `${runtimeLabel}::${moduleId}::${instanceId}`\n liveInstanceKeys.delete(instanceKey)\n if (latestStates.delete(instanceKey)) changed = true\n if (latestTraitSummaries.delete(instanceKey)) changed = true\n if (instanceLabels.delete(instanceId)) changed = true\n changed = true\n }\n }\n }\n\n let exportBudgetChanged = false\n const ref = toRuntimeDebugEventRef(event, {\n diagnosticsLevel: level,\n resolveConvergeStaticIr: (staticIrDigest) => convergeStaticIrByDigest.get(staticIrDigest),\n onMetaProjection: ({ stats }) => {\n if (stats.dropped !== 0 || stats.oversized !== 0) {\n exportBudgetChanged = true\n }\n exportBudget.dropped += stats.dropped\n exportBudget.oversized += stats.oversized\n },\n })\n if (exportBudgetChanged) {\n changed = true\n }\n if (!ref) {\n // off tier: do not write ring buffer / latestStates, but keep minimal counters/labels (including module:destroy cleanup).\n if (changed) {\n markSnapshotChanged()\n }\n return\n }\n\n // latestStates / latestTraitSummaries: record latest snapshots by runtimeLabel::moduleId::instanceId.\n if (ref.kind === 'state' && ref.label === 'state:update') {\n const runtimeLabel = ref.runtimeLabel ?? 'unknown'\n const key = `${runtimeLabel}::${ref.moduleId}::${ref.instanceId}`\n\n // Late/replayed events after module:destroy: allow entering the window for replay, but do not rebuild latest* caches.\n if (liveInstanceKeys.has(key)) {\n if (ref.meta && typeof ref.meta === 'object' && !Array.isArray(ref.meta)) {\n const anyMeta = ref.meta as any\n if ('state' in anyMeta) {\n latestStates.set(key, anyMeta.state as JsonValue)\n changed = true\n }\n if ('traitSummary' in anyMeta && anyMeta.traitSummary !== undefined) {\n latestTraitSummaries.set(key, anyMeta.traitSummary as JsonValue)\n changed = true\n }\n }\n }\n }\n\n // ring buffer: keep the most recent bufferSize RuntimeDebugEventRefs.\n if (bufferSize > 0) {\n const seq = nextSeq++\n ringBuffer.push(ref)\n ringBufferSeq.push(seq)\n trimRingBufferIfNeeded()\n changed = true\n }\n\n if (changed) {\n markSnapshotChanged()\n }\n }),\n}\n","import { Effect } from 'effect'\nimport type { ReadQueryCompiled } from './ReadQuery.js'\nimport type { DeclarativeLinkIR, DeclarativeLinkNodeId } from './DeclarativeLinkIR.js'\nimport type { ModuleInstanceKey, RuntimeStoreModuleCommit } from './RuntimeStore.js'\n\nexport interface ModuleAsSourceLink {\n readonly id: string\n readonly sourceModuleInstanceKey: ModuleInstanceKey\n readonly readQuery: ReadQueryCompiled<any, any>\n readonly computeValue: (snapshot: unknown) => unknown\n readonly equalsValue: (a: unknown, b: unknown) => boolean\n readonly applyValue: (next: unknown) => Effect.Effect<void, never, never>\n}\n\nexport interface DeclarativeLinkRegistration {\n readonly linkId: string\n readonly ir: DeclarativeLinkIR\n readonly readNodes: ReadonlyArray<{\n readonly nodeId: DeclarativeLinkNodeId\n readonly moduleInstanceKey: ModuleInstanceKey\n readonly readQuery: ReadQueryCompiled<any, any>\n }>\n readonly dispatchNodes: ReadonlyArray<{\n readonly nodeId: DeclarativeLinkNodeId\n readonly dispatch: (payload: unknown) => Effect.Effect<void, never, never>\n }>\n}\n\nexport interface DeclarativeLinkRuntime {\n readonly registerModuleAsSourceLink: (link: ModuleAsSourceLink) => () => void\n readonly registerDeclarativeLink: (link: DeclarativeLinkRegistration) => () => void\n readonly applyForSources: (args: {\n readonly tickSeq: number\n readonly acceptedModules: ReadonlyMap<ModuleInstanceKey, RuntimeStoreModuleCommit>\n readonly changedModuleInstanceKeys: ReadonlyArray<ModuleInstanceKey>\n }) => Effect.Effect<{ readonly scheduled: boolean }, never, never>\n}\n\ntype StoredModuleAsSourceLink = ModuleAsSourceLink & {\n hasValue: boolean\n lastValue: unknown\n}\n\ntype StoredDeclarativeLink = DeclarativeLinkRegistration & {\n readonly readNodeById: ReadonlyMap<string, { readonly nodeId: DeclarativeLinkNodeId; readonly moduleInstanceKey: ModuleInstanceKey; readonly readQuery: ReadQueryCompiled<any, any> }>\n readonly dispatchNodeById: ReadonlyMap<string, { readonly nodeId: DeclarativeLinkNodeId; readonly dispatch: (payload: unknown) => Effect.Effect<void, never, never> }>\n readonly dispatchTargetsByReadNode: ReadonlyMap<string, ReadonlyArray<DeclarativeLinkNodeId>>\n readonly readNodeState: Map<string, { hasValue: boolean; lastValue: unknown }>\n}\n\nexport const makeDeclarativeLinkRuntime = (): DeclarativeLinkRuntime => {\n const moduleAsSourceById = new Map<string, StoredModuleAsSourceLink>()\n const moduleAsSourceIdsBySource = new Map<ModuleInstanceKey, Set<string>>()\n\n const declarativeById = new Map<string, StoredDeclarativeLink>()\n const declarativeReadNodesBySource = new Map<ModuleInstanceKey, Array<{ readonly linkId: string; readonly nodeId: DeclarativeLinkNodeId }>>()\n\n const registerModuleAsSourceLink: DeclarativeLinkRuntime['registerModuleAsSourceLink'] = (link) => {\n const stored: StoredModuleAsSourceLink = {\n ...link,\n hasValue: false,\n lastValue: undefined,\n }\n\n moduleAsSourceById.set(link.id, stored)\n const set = moduleAsSourceIdsBySource.get(link.sourceModuleInstanceKey) ?? new Set<string>()\n set.add(link.id)\n moduleAsSourceIdsBySource.set(link.sourceModuleInstanceKey, set)\n\n return () => {\n moduleAsSourceById.delete(link.id)\n const current = moduleAsSourceIdsBySource.get(link.sourceModuleInstanceKey)\n if (!current) return\n current.delete(link.id)\n if (current.size === 0) {\n moduleAsSourceIdsBySource.delete(link.sourceModuleInstanceKey)\n }\n }\n }\n\n const registerDeclarativeLink: DeclarativeLinkRuntime['registerDeclarativeLink'] = (link) => {\n const readNodeById = new Map<string, { readonly nodeId: DeclarativeLinkNodeId; readonly moduleInstanceKey: ModuleInstanceKey; readonly readQuery: ReadQueryCompiled<any, any> }>()\n for (const n of link.readNodes) {\n readNodeById.set(n.nodeId, n)\n }\n\n const dispatchNodeById = new Map<\n string,\n { readonly nodeId: DeclarativeLinkNodeId; readonly dispatch: (payload: unknown) => Effect.Effect<void, never, never> }\n >()\n for (const n of link.dispatchNodes) {\n dispatchNodeById.set(n.nodeId, n)\n }\n\n // V1 constraint: dispatch must have at most one incoming edge, interpreted as \"payload flow\".\n const incomingByDispatch = new Map<string, number>()\n for (const e of link.ir.edges) {\n const to = e.to\n const isDispatch = dispatchNodeById.has(to)\n if (!isDispatch) continue\n incomingByDispatch.set(to, (incomingByDispatch.get(to) ?? 0) + 1)\n const count = incomingByDispatch.get(to) ?? 0\n if (count > 1) {\n throw new Error(\n `[DeclarativeLinkRuntime] Invalid DeclarativeLinkIR: dispatch node has multiple incoming edges (linkId=${link.linkId}, nodeId=${to}).`,\n )\n }\n }\n\n const dispatchTargetsByReadNode = new Map<string, Array<DeclarativeLinkNodeId>>()\n for (const e of link.ir.edges) {\n const from = e.from\n const to = e.to\n if (!readNodeById.has(from)) continue\n if (!dispatchNodeById.has(to)) continue\n const list = dispatchTargetsByReadNode.get(from) ?? []\n list.push(to)\n dispatchTargetsByReadNode.set(from, list)\n }\n\n const stored: StoredDeclarativeLink = {\n ...link,\n readNodeById,\n dispatchNodeById,\n dispatchTargetsByReadNode,\n readNodeState: new Map(),\n }\n\n declarativeById.set(link.linkId, stored)\n\n for (const n of link.readNodes) {\n const list = declarativeReadNodesBySource.get(n.moduleInstanceKey) ?? []\n list.push({ linkId: link.linkId, nodeId: n.nodeId })\n declarativeReadNodesBySource.set(n.moduleInstanceKey, list)\n }\n\n return () => {\n declarativeById.delete(link.linkId)\n for (const n of link.readNodes) {\n const list = declarativeReadNodesBySource.get(n.moduleInstanceKey)\n if (!list) continue\n const next = list.filter((x) => !(x.linkId === link.linkId && x.nodeId === n.nodeId))\n if (next.length === 0) {\n declarativeReadNodesBySource.delete(n.moduleInstanceKey)\n } else {\n declarativeReadNodesBySource.set(n.moduleInstanceKey, next)\n }\n }\n }\n }\n\n const applyForSources: DeclarativeLinkRuntime['applyForSources'] = (args) =>\n Effect.gen(function* () {\n let scheduled = false\n\n // ---- Module-as-Source edges (module readQuery -> externalStore trait writeback) ----\n for (const sourceKey of args.changedModuleInstanceKeys) {\n const ids = moduleAsSourceIdsBySource.get(sourceKey)\n if (!ids || ids.size === 0) continue\n const commit = args.acceptedModules.get(sourceKey)\n if (!commit) continue\n\n for (const id of ids) {\n const link = moduleAsSourceById.get(id)\n if (!link) continue\n\n let selected: unknown\n try {\n selected = link.readQuery.select(commit.state as any)\n } catch {\n continue\n }\n\n const nextValue = link.computeValue(selected)\n if (link.hasValue && link.equalsValue(link.lastValue, nextValue)) {\n continue\n }\n\n link.hasValue = true\n link.lastValue = nextValue\n scheduled = true\n yield* link.applyValue(nextValue)\n }\n }\n\n // ---- DeclarativeLinkIR edges (module readQuery -> dispatch) ----\n for (const sourceKey of args.changedModuleInstanceKeys) {\n const refs = declarativeReadNodesBySource.get(sourceKey)\n if (!refs || refs.length === 0) continue\n const commit = args.acceptedModules.get(sourceKey)\n if (!commit) continue\n\n for (const ref of refs) {\n const link = declarativeById.get(ref.linkId)\n if (!link) continue\n const readNode = link.readNodeById.get(ref.nodeId)\n if (!readNode) continue\n\n let value: unknown\n try {\n value = readNode.readQuery.select(commit.state as any)\n } catch {\n continue\n }\n\n const state = link.readNodeState.get(ref.nodeId) ?? { hasValue: false, lastValue: undefined }\n const changed = !state.hasValue || !Object.is(state.lastValue, value)\n if (!changed) continue\n\n state.hasValue = true\n state.lastValue = value\n link.readNodeState.set(ref.nodeId, state)\n\n const targets = link.dispatchTargetsByReadNode.get(ref.nodeId) ?? []\n for (const dispatchNodeId of targets) {\n const node = link.dispatchNodeById.get(dispatchNodeId)\n if (!node) continue\n scheduled = true\n yield* node.dispatch(value)\n }\n }\n }\n\n return { scheduled } as const\n })\n\n return {\n registerModuleAsSourceLink,\n registerDeclarativeLink,\n applyForSources,\n }\n}\n","import { Context, Effect, Layer } from 'effect'\n\nexport type ResourceSnapshotPhase = 'idle' | 'loading' | 'success' | 'error'\n\nexport type ReplayLogEvent =\n | {\n readonly _tag: 'ResourceSnapshot'\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n /**\n * Optional: source concurrency policy (e.g. \"switch\" / \"exhaust-trailing\").\n * - Must remain slim & serializable.\n * - Used by Devtools/replay to explain why old results are dropped / why trailing happens.\n */\n readonly concurrency?: string\n readonly phase: ResourceSnapshotPhase\n readonly snapshot: unknown\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n }\n | {\n readonly _tag: 'InvalidateRequest'\n readonly timestamp: number\n readonly moduleId?: string\n readonly instanceId?: string\n readonly kind: 'resource' | 'query'\n readonly target: string\n readonly meta?: unknown\n }\n\nexport type ResourceSnapshotEvent = Extract<ReplayLogEvent, { readonly _tag: 'ResourceSnapshot' }>\n\nexport interface ReplayLogService {\n readonly record: (event: ReplayLogEvent) => Effect.Effect<void>\n readonly snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>>\n readonly resetCursor: Effect.Effect<void>\n readonly consumeNext: (predicate: (event: ReplayLogEvent) => boolean) => Effect.Effect<ReplayLogEvent | undefined>\n readonly consumeNextResourceSnapshot: (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }) => Effect.Effect<ResourceSnapshotEvent | undefined>\n}\n\nexport class ReplayLog extends Context.Tag('@logixjs/core/ReplayLog')<ReplayLog, ReplayLogService>() {}\n\nexport const make = (initial?: ReadonlyArray<ReplayLogEvent>): ReplayLogService => {\n const events: Array<ReplayLogEvent> = initial ? Array.from(initial) : []\n let cursor = 0\n\n const consumeNext = (predicate: (event: ReplayLogEvent) => boolean): Effect.Effect<ReplayLogEvent | undefined> =>\n Effect.sync(() => {\n for (let i = cursor; i < events.length; i++) {\n const event = events[i]\n if (!predicate(event)) continue\n cursor = i + 1\n return event\n }\n return undefined\n })\n\n const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n }): Effect.Effect<ResourceSnapshotEvent | undefined> =>\n consumeNext((event): event is ResourceSnapshotEvent => {\n if (event._tag !== 'ResourceSnapshot') return false\n if (event.resourceId !== params.resourceId) return false\n if (event.fieldPath !== params.fieldPath) return false\n if (params.keyHash !== undefined && event.keyHash !== params.keyHash) {\n return false\n }\n if (params.phase !== undefined && event.phase !== params.phase) {\n return false\n }\n return true\n }).pipe(Effect.map((event) => event as ResourceSnapshotEvent | undefined))\n\n return {\n record: (event) => Effect.sync(() => events.push(event)),\n snapshot: Effect.sync(() => events.slice()),\n resetCursor: Effect.sync(() => {\n cursor = 0\n }),\n consumeNext,\n consumeNextResourceSnapshot,\n }\n}\n\nexport const layer = (initial?: ReadonlyArray<ReplayLogEvent>): Layer.Layer<ReplayLog, never, never> =>\n Layer.succeed(ReplayLog, make(initial))\n\nexport const record = (event: ReplayLogEvent): Effect.Effect<void, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.record(event)\n })\n\nexport const snapshot: Effect.Effect<ReadonlyArray<ReplayLogEvent>, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.snapshot\n})\n\nexport const resetCursor: Effect.Effect<void, never, ReplayLog> = Effect.gen(function* () {\n const log = yield* ReplayLog\n yield* log.resetCursor\n})\n\nexport const consumeNextResourceSnapshot = (params: {\n readonly resourceId: string\n readonly fieldPath: string\n readonly keyHash?: string\n readonly phase?: ResourceSnapshotPhase\n}): Effect.Effect<ResourceSnapshotEvent | undefined, never, ReplayLog> =>\n Effect.gen(function* () {\n const log = yield* ReplayLog\n return yield* log.consumeNextResourceSnapshot(params)\n })\n","export type RowId = string\n\nexport interface ListConfig {\n readonly path: string\n readonly trackBy?: string\n}\n\nexport type ListRemovalListener = (rowId: RowId) => void\n\ntype Segment = string | number\n\nconst parseSegments = (path: string): ReadonlyArray<Segment> => {\n if (!path) return []\n return path.split('.').map((seg) => (/^[0-9]+$/.test(seg) ? Number(seg) : seg))\n}\n\nexport const getAtPath = (state: any, path: string): any => {\n if (!path || state == null) return state\n const segments = parseSegments(path)\n let current: any = state\n for (const seg of segments) {\n if (current == null) return undefined\n if (typeof seg === 'number') {\n current = Array.isArray(current) ? current[seg] : current[String(seg)]\n continue\n }\n current = current[seg]\n }\n return current\n}\n\nexport const setAtPathMutating = (draft: unknown, path: string, value: unknown): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const nextKey = segments[i + 1]!\n\n const next = current?.[key as any]\n if (next == null || typeof next !== 'object') {\n current[key as any] = typeof nextKey === 'number' ? [] : {}\n }\n current = current[key as any]\n }\n\n const last = segments[segments.length - 1]!\n current[last as any] = value\n}\n\nexport const unsetAtPathMutating = (draft: unknown, path: string): void => {\n if (!path) return\n const segments = parseSegments(path)\n if (segments.length === 0) return\n\n let current: any = draft\n for (let i = 0; i < segments.length - 1; i++) {\n const key = segments[i]!\n const next = current?.[key as any]\n if (next == null || typeof next !== 'object') {\n return\n }\n current = next\n }\n\n const last = segments[segments.length - 1]!\n if (Array.isArray(current) && typeof last === 'number') {\n current[last] = undefined\n return\n }\n\n if (current && typeof current === 'object') {\n delete current[last as any]\n }\n}\n\nexport const joinPath = (prefix: string, suffix: string): string => {\n if (!prefix) return suffix\n if (!suffix) return prefix\n return `${prefix}.${suffix}`\n}\n\nexport interface ListItemFieldPath {\n readonly listPath: string\n readonly itemPath: string\n}\n\n/**\n * parseListItemFieldPath:\n * - Recognize a list.item field path like \"items[].profileResource\".\n * - Supports nested arrays: returns listPath/itemPath for the innermost list.\n */\nexport const parseListItemFieldPath = (fieldPath: string): ListItemFieldPath | undefined => {\n const raw = typeof fieldPath === 'string' ? fieldPath.trim() : ''\n if (!raw) return undefined\n\n const segments = raw.split('.').filter(Boolean)\n let lastListSeg = -1\n for (let i = 0; i < segments.length; i++) {\n if (segments[i]!.endsWith('[]')) lastListSeg = i\n }\n if (lastListSeg < 0) return undefined\n\n const strip = (seg: string): string => (seg.endsWith('[]') ? seg.slice(0, -2) : seg)\n\n const listPath = segments\n .slice(0, lastListSeg + 1)\n .map(strip)\n .join('.')\n\n const itemPath = segments\n .slice(lastListSeg + 1)\n .map(strip)\n .join('.')\n\n return { listPath, itemPath }\n}\n\nexport const toListItemValuePath = (listPath: string, index: number, itemPath: string): string =>\n itemPath ? `${listPath}.${index}.${itemPath}` : `${listPath}.${index}`\n\ntype ListState = {\n readonly listPath: string\n readonly parentRowId?: RowId\n readonly itemsRef: ReadonlyArray<unknown>\n readonly ids: ReadonlyArray<RowId>\n readonly indexById: ReadonlyMap<RowId, number>\n readonly trackBy?: string\n}\n\nconst readTrackBy = (item: unknown, trackBy: string): unknown => {\n if (!item || typeof item !== 'object') return undefined\n const segments = trackBy.split('.')\n let current: any = item\n for (const seg of segments) {\n if (current == null) return undefined\n current = current[seg as any]\n }\n return current\n}\n\nconst didReorderByReference = (prevItems: ReadonlyArray<unknown>, nextItems: ReadonlyArray<unknown>): boolean => {\n const buckets = new Map<unknown, Array<number>>()\n for (let i = 0; i < prevItems.length; i++) {\n const item = prevItems[i]\n const list = buckets.get(item) ?? []\n list.push(i)\n buckets.set(item, list)\n }\n\n for (let nextIndex = 0; nextIndex < nextItems.length; nextIndex++) {\n const item = nextItems[nextIndex]\n const q = buckets.get(item)\n if (!q || q.length === 0) continue\n const prevIndex = q.shift()!\n if (prevIndex !== nextIndex) {\n return true\n }\n }\n\n return false\n}\n\nconst hasStableTrackByKeys = (items: ReadonlyArray<unknown>, trackBy: string): boolean =>\n items.every((item) => readTrackBy(item, trackBy) !== undefined)\n\nconst isSameTrackBySequence = (\n prevItems: ReadonlyArray<unknown>,\n nextItems: ReadonlyArray<unknown>,\n trackBy: string,\n): boolean => {\n if (prevItems.length !== nextItems.length) return false\n for (let i = 0; i < prevItems.length; i++) {\n if (!Object.is(readTrackBy(prevItems[i], trackBy), readTrackBy(nextItems[i], trackBy))) {\n return false\n }\n }\n return true\n}\n\nconst reconcileIds = (\n prev: ListState | undefined,\n nextItems: ReadonlyArray<unknown>,\n trackBy?: string,\n makeRowId?: () => RowId,\n): { readonly ids: ReadonlyArray<RowId>; readonly removed: ReadonlyArray<RowId> } => {\n const nextRowId =\n makeRowId ??\n (() => {\n let rowSeq = 0\n return () => {\n rowSeq += 1\n return `r${rowSeq}`\n }\n })()\n\n if (!prev) {\n return {\n ids: nextItems.map(() => nextRowId()),\n removed: [],\n }\n }\n\n if (prev.itemsRef === nextItems) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n\n // Important: keep RowId stable under \"non-structural changes\" (field updates / object clones only),\n // otherwise in-flight state and caches would be invalidated needlessly.\n const sameLength = prev.itemsRef.length === nextItems.length\n if (sameLength) {\n // trackBy case: if the key sequence is identical, we can reuse old ids;\n // otherwise we must reconcile by key (avoid misclassifying clone+reorder as \"no reorder\").\n if (trackBy) {\n const canUseKeys = hasStableTrackByKeys(prev.itemsRef, trackBy) && hasStableTrackByKeys(nextItems, trackBy)\n if (canUseKeys) {\n if (isSameTrackBySequence(prev.itemsRef, nextItems, trackBy)) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n } else {\n // If trackBy keys are not available, fall back to reference-level detection (keep stability for \"cloned but not reordered\").\n if (!didReorderByReference(prev.itemsRef, nextItems)) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n }\n } else if (!didReorderByReference(prev.itemsRef, nextItems)) {\n return {\n ids: prev.ids,\n removed: [],\n }\n }\n }\n\n const keyOf = (item: unknown): unknown => {\n if (!trackBy) return item\n const k = readTrackBy(item, trackBy)\n return k !== undefined ? k : item\n }\n\n const buckets = new Map<unknown, Array<RowId>>()\n for (let i = 0; i < prev.itemsRef.length; i++) {\n const key = keyOf(prev.itemsRef[i])\n const list = buckets.get(key) ?? []\n list.push(prev.ids[i]!)\n buckets.set(key, list)\n }\n\n const ids: Array<RowId> = []\n for (let i = 0; i < nextItems.length; i++) {\n const key = keyOf(nextItems[i])\n const list = buckets.get(key)\n if (list && list.length > 0) {\n ids.push(list.shift()!)\n } else {\n ids.push(nextRowId())\n }\n }\n\n const removed: Array<RowId> = []\n for (const list of buckets.values()) {\n removed.push(...list)\n }\n\n return { ids, removed }\n}\n\nconst buildIndexById = (ids: ReadonlyArray<RowId>): ReadonlyMap<RowId, number> => {\n const map = new Map<RowId, number>()\n for (let i = 0; i < ids.length; i++) {\n map.set(ids[i]!, i)\n }\n return map\n}\n\nexport class RowIdStore {\n private readonly lists = new Map<string, ListState>()\n private readonly removalListeners = new Map<string, Set<ListRemovalListener>>()\n private readonly rowIdIndex = new Map<\n RowId,\n { readonly key: string; readonly listPath: string; readonly index: number }\n >()\n private nextRowSeq = 0\n\n constructor(private readonly instanceId?: string) {}\n\n private listKey = (listPath: string, parentRowId?: RowId): string =>\n parentRowId ? `${listPath}@@${parentRowId}` : listPath\n\n private makeRowId = (): RowId => {\n this.nextRowSeq += 1\n return this.instanceId ? `${this.instanceId}::r${this.nextRowSeq}` : `r${this.nextRowSeq}`\n }\n\n private notifyRemoved(listPath: string, rowId: RowId): void {\n const listeners = this.removalListeners.get(listPath)\n if (!listeners || listeners.size === 0) return\n for (const fn of listeners) {\n try {\n fn(rowId)\n } catch {\n // listener failures should never break runtime behavior\n }\n }\n }\n\n private removeDescendants(parentRowId: RowId): void {\n const keys: Array<string> = []\n for (const [k, st] of this.lists.entries()) {\n if (st.parentRowId === parentRowId) keys.push(k)\n }\n\n for (const key of keys) {\n const st = this.lists.get(key)\n if (!st) continue\n this.lists.delete(key)\n for (let i = 0; i < st.ids.length; i++) {\n const rowId = st.ids[i]!\n this.rowIdIndex.delete(rowId)\n this.notifyRemoved(st.listPath, rowId)\n this.removeDescendants(rowId)\n }\n }\n }\n\n onRemoved(listPath: string, listener: ListRemovalListener): () => void {\n const set = this.removalListeners.get(listPath) ?? new Set<ListRemovalListener>()\n set.add(listener)\n this.removalListeners.set(listPath, set)\n return () => {\n const current = this.removalListeners.get(listPath)\n if (!current) return\n current.delete(listener)\n if (current.size === 0) this.removalListeners.delete(listPath)\n }\n }\n\n getRowId(listPath: string, index: number, parentRowId?: RowId): RowId | undefined {\n const state = this.lists.get(this.listKey(listPath, parentRowId))\n return state ? state.ids[index] : undefined\n }\n\n getIndex(listPath: string, rowId: RowId): number | undefined {\n const info = this.rowIdIndex.get(rowId)\n if (!info) return undefined\n if (info.listPath !== listPath) return undefined\n return info.index\n }\n\n /**\n * ensureList:\n * - Align RowID mapping for the given listPath with the current items.\n * - Returns the latest ids (index -> RowId).\n */\n ensureList(\n listPath: string,\n items: ReadonlyArray<unknown>,\n trackBy?: string,\n parentRowId?: RowId,\n ): ReadonlyArray<RowId> {\n const key = this.listKey(listPath, parentRowId)\n const prev = this.lists.get(key)\n const { ids, removed } = reconcileIds(prev, items, trackBy ?? prev?.trackBy, this.makeRowId)\n\n const next: ListState = {\n listPath,\n parentRowId,\n itemsRef: items,\n ids,\n indexById: buildIndexById(ids),\n trackBy: trackBy ?? prev?.trackBy,\n }\n this.lists.set(key, next)\n\n if (removed.length > 0) {\n for (const rowId of removed) {\n this.rowIdIndex.delete(rowId)\n this.notifyRemoved(listPath, rowId)\n this.removeDescendants(rowId)\n }\n }\n\n // Refresh the reverse rowId -> index mapping (index changes are allowed).\n for (let i = 0; i < ids.length; i++) {\n const rowId = ids[i]!\n this.rowIdIndex.set(rowId, { key, listPath, index: i })\n }\n\n return ids\n }\n\n /**\n * updateAll:\n * - After each commit, align RowID mappings for all known lists.\n * - configs come from list declarations in StateTraitProgram.spec (may include trackBy).\n */\n updateAll(state: unknown, configs: ReadonlyArray<ListConfig>): void {\n const cfgByPath = new Map<string, ListConfig>()\n const paths: Array<string> = []\n for (const cfg of configs) {\n if (!cfg || typeof cfg.path !== 'string') continue\n const p = cfg.path.trim()\n if (!p) continue\n cfgByPath.set(p, cfg)\n paths.push(p)\n }\n\n const pathSet = new Set(paths)\n\n const parentOf = (path: string): string | undefined => {\n const segments = path.split('.').filter(Boolean)\n let best: string | undefined\n for (let i = 1; i < segments.length; i++) {\n const prefix = segments.slice(0, i).join('.')\n if (pathSet.has(prefix)) best = prefix\n }\n return best\n }\n\n const parentByPath = new Map<string, string | undefined>()\n const suffixByPath = new Map<string, string>()\n const childrenByParent = new Map<string | undefined, Array<string>>()\n\n for (const path of paths) {\n const parent = parentOf(path)\n parentByPath.set(path, parent)\n const suffix = parent ? path.slice(parent.length + 1) : path\n suffixByPath.set(path, suffix)\n const list = childrenByParent.get(parent) ?? []\n list.push(path)\n childrenByParent.set(parent, list)\n }\n\n // roots first (and deterministic traversal)\n const roots = (childrenByParent.get(undefined) ?? []).slice().sort()\n\n const visit = (listPath: string, parentRowId: RowId | undefined, listValue: unknown): void => {\n const cfg = cfgByPath.get(listPath)\n const items = Array.isArray(listValue) ? (listValue as ReadonlyArray<unknown>) : []\n const ids = this.ensureList(listPath, items, cfg?.trackBy, parentRowId)\n\n const children = (childrenByParent.get(listPath) ?? []).slice().sort()\n if (children.length === 0) return\n\n for (let i = 0; i < items.length; i++) {\n const row = items[i]\n const rowId = ids[i]\n if (!rowId) continue\n for (const childPath of children) {\n const suffix = suffixByPath.get(childPath) ?? ''\n const childValue = suffix ? getAtPath(row as any, suffix) : undefined\n visit(childPath, rowId, childValue)\n }\n }\n }\n\n for (const root of roots) {\n const value = getAtPath(state as any, root)\n visit(root, undefined, value)\n }\n }\n}\n\nexport const collectListConfigs = (spec: Record<string, unknown>): ReadonlyArray<ListConfig> => {\n const configs: Array<ListConfig> = []\n for (const key in spec) {\n if (!Object.prototype.hasOwnProperty.call(spec, key)) continue\n const raw = spec[key]\n if (!raw || typeof raw !== 'object') continue\n const tag = (raw as any)._tag\n if (tag !== 'StateTraitList') continue\n const trackBy = (raw as any).identityHint?.trackBy\n configs.push({\n path: key,\n trackBy: typeof trackBy === 'string' ? trackBy : undefined,\n })\n }\n return configs\n}\n","import { isDevEnv } from './env.js'\nimport type { RuntimeInternals } from './RuntimeInternals.js'\n\nconst RUNTIME_INTERNALS = Symbol.for('@logixjs/core/runtimeInternals')\nconst BOUND_INTERNALS = Symbol.for('@logixjs/core/boundInternals')\nconst MODULE_TRAITS_PROGRAM = Symbol.for('@logixjs/core/moduleTraitsProgram')\n\nconst defineHidden = (target: object, key: symbol, value: unknown): void => {\n Object.defineProperty(target, key, {\n value,\n enumerable: false,\n configurable: true,\n writable: false,\n })\n}\n\nexport const setRuntimeInternals = (runtime: object, internals: RuntimeInternals): void => {\n defineHidden(runtime, RUNTIME_INTERNALS, internals)\n}\n\nexport const setBoundInternals = (bound: object, internals: RuntimeInternals): void => {\n defineHidden(bound, BOUND_INTERNALS, internals)\n}\n\n/**\n * ModuleTraitsProgram(StateTraitProgram):\n * - Attaches a traits Program to a module definition object (used by TraitLifecycle/Debug).\n * - Uses Symbol + non-enumerable properties to avoid spreading `.__*` magic fields.\n *\n * Note: this is a \"module-definition-side\" internal slot, not RuntimeInternals (instance-level); the semantics differ.\n */\nexport const setModuleTraitsProgram = (module: object, program: unknown): void => {\n defineHidden(module, MODULE_TRAITS_PROGRAM, program)\n}\n\nexport const getModuleTraitsProgram = (module: unknown): unknown | undefined => {\n if (!module) return undefined\n if (typeof module !== 'object' && typeof module !== 'function') return undefined\n return (module as any)[MODULE_TRAITS_PROGRAM] as unknown | undefined\n}\n\nconst formatScope = (moduleId: unknown, instanceId: unknown): string => {\n const m = typeof moduleId === 'string' && moduleId.length > 0 ? moduleId : 'unknown'\n const i = typeof instanceId === 'string' && instanceId.length > 0 ? instanceId : 'unknown'\n return `moduleId=${m}, instanceId=${i}`\n}\n\nexport const getRuntimeInternals = (runtime: object): RuntimeInternals => {\n const scope = runtime as { readonly moduleId?: unknown; readonly instanceId?: unknown }\n const internals = (runtime as any)[RUNTIME_INTERNALS] as RuntimeInternals | undefined\n if (!internals) {\n const msg = isDevEnv()\n ? [\n '[MissingRuntimeInternals] Runtime internals not installed on ModuleRuntime instance.',\n `scope: ${formatScope(scope.moduleId, scope.instanceId)}`,\n 'fix:',\n '- Ensure ModuleRuntime.make calls internalHooks.installInternalHooks (020 foundation).',\n '- If you created a mock runtime for tests, attach internals or avoid calling internal-only APIs.',\n ].join('\\n')\n : 'Runtime internals not installed'\n throw new Error(msg)\n }\n\n const runtimeInstanceId = scope.instanceId\n if (\n typeof runtimeInstanceId === 'string' &&\n runtimeInstanceId.length > 0 &&\n runtimeInstanceId !== internals.instanceId\n ) {\n throw new Error(\n isDevEnv()\n ? [\n '[InconsistentRuntimeInternals] Runtime internals instanceId mismatch.',\n `runtime: ${formatScope(scope.moduleId, runtimeInstanceId)}`,\n `internals: ${formatScope(internals.moduleId, internals.instanceId)}`,\n ].join('\\n')\n : 'Runtime internals mismatch',\n )\n }\n\n return internals\n}\n\nexport const getBoundInternals = (bound: object): RuntimeInternals => {\n const internals = (bound as any)[BOUND_INTERNALS] as RuntimeInternals | undefined\n if (!internals) {\n const msg = isDevEnv()\n ? [\n '[MissingBoundInternals] Bound internals not installed on Bound API instance.',\n 'fix:',\n '- Ensure BoundApiRuntime attaches internals (020 foundation).',\n '- If you created a mock bound for tests, attach internals or avoid calling internal-only APIs.',\n ].join('\\n')\n : 'Bound internals not installed'\n throw new Error(msg)\n }\n\n return internals\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA,aAAAA;AAAA,EAAA,eAAAC;AAAA,EAAA,eAAAC;AAAA,EAAA,wBAAAC;AAAA,EAAA,qBAAAC;AAAA,EAAA,sBAAAC;AAAA;AAAA;;;ACAA,IAAAC,kBAAyC;;;ACAzC,IAAAC,iBAA4D;;;ACA5D,IAAAC,iBAAuD;;;ACwEvD,IAAM,iBAAoD;AAAA,EACxD,UAAU;AAAA,EACV,eAAe;AAAA,EACf,gBAAgB;AAAA,EAChB,iBAAiB;AAAA,EACjB,cAAc,IAAI;AAAA,EAClB,uBAAuB;AACzB;;;AC/EA,oBAAsB;;;ACItB,IAAAC,iBAA0C;AAOnC,IAAM,gBAAgB,wBAAS,WAA+B,MAAS;AAoHvE,IAAM,wBAAN,cAAoC,uBAAQ,IAAI,0BAA0B,EAG/E,EAAE;AAAC;;;AHgJE,IAAM,oBAAoB,wBAAS,WAAgC,CAAC,CAAC;AACrE,IAAM,sBAAsB,wBAAS,WAA+B,MAAS;AAC7E,IAAM,eAAe,wBAAS,WAA+B,MAAS;AACtE,IAAM,eAAe,wBAAS,WAA+B,MAAS;AAEtE,IAAM,0BAA0B,wBAAS,WAA6B,KAAK;AAiB3E,IAAM,0CAA0C,wBAAS,WAAmD;AAAA,EACjH,cAAc;AAAA,EACd,MAAM;AACR,CAAC;AA0ID,IAAM,uBAAuB,oBAAI,IAAY;AAC7C,IAAM,wBAAwB,oBAAI,IAAY;AA4C9C,IAAM,oBAAoB,CAAC,UAAgE;AACzF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,eAAe,MAAM;AACzB,QAAI;AACF,aAAO,qBAAM,OAAO,MAAM,OAA+B;AAAA,QACvD,kBAAkB;AAAA,MACpB,CAAC;AAAA,IACH,QAAQ;AACN,UAAI;AACF,eAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,MAC5C,QAAQ;AACN,eAAO,OAAO,MAAM,KAAK;AAAA,MAC3B;AAAA,IACF;AAAA,EACF,GAAG;AAEH,QAAM,UAAU,kBAAkB,QAAQ;AAAA,EAAsB,WAAW;AAE3E,SAAO,sBAAO,SAAS,OAAO,EAAE;AAAA,IAC9B,sBAAO,aAAa;AAAA,MAClB,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf,eAAe;AAAA,IACjB,CAAC;AAAA,EACH;AACF;AAEA,IAAM,gBAAgB,CAAC,UAA2D;AAChF,QAAM,WAAW,MAAM,YAAY;AACnC,QAAM,SAAS,kBAAkB,QAAQ,gBAAgB,MAAM,QAAQ;AACvE,QAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAC5C,QAAM,MAAM,GAAG,MAAM;AAAA,EAAK,MAAM;AAEhC,QAAM,OACJ,MAAM,aAAa,YACf,sBAAO,WAAW,GAAG,IACrB,MAAM,aAAa,SACjB,sBAAO,QAAQ,GAAG,IAClB,sBAAO,SAAS,GAAG;AAE3B,QAAM,cAAuC;AAAA,IAC3C,kBAAkB;AAAA,IAClB,eAAe,cAAc,MAAM,QAAQ;AAAA,IAC3C,yBAAyB,MAAM;AAAA,IAC/B,4BAA4B,MAAM;AAAA,EACpC;AACA,MAAI,MAAM,MAAM;AACd,gBAAY,uBAAuB,IAAI,MAAM;AAAA,EAC/C;AACA,MAAI,MAAM,WAAW;AACnB,gBAAY,4BAA4B,IAAI,MAAM;AAAA,EACpD;AAEA,SAAO,KAAK,KAAK,sBAAO,aAAa,WAAW,CAAC;AACnD;AAOO,IAAM,YAAY,qBAAM,cAAc,mBAAmB,CAAC,CAAC;AAQlE,IAAM,gBAAsB;AAAA,EAC1B,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AACjB;AAEO,IAAM,iBAAiB,qBAAM,cAAc,mBAAmB,CAAC,aAAa,CAAC;AASpF,IAAM,cAAoB;AAAA,EACxB,QAAQ,CAAC,UACP,MAAM,SAAS,oBACX,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAC/C;AAEO,IAAM,eAAe,qBAAM,cAAc,mBAAmB,CAAC,WAAW,CAAC;AAEhF,IAAM,YAAY,OAAO,WAAW,eAAe,OAAO,aAAa;AAGvE,IAAM,4BAA4B,CAAC,UAAsC;AAEvE,MAAI,OAAQ,MAAc,SAAS,YAAa,MAAc,KAAK,WAAW,QAAQ,GAAG;AACvF,UAAM,WAAY,MAAc,YAAY;AAC5C,UAAM,OAAQ,MAAc;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,yBAAyB,WAAW,QAAQ,OAAO,IAAI;AAAA,QACvD;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,IAAI,KAAK;AAEjB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,mBAAmB;AACpC,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,eAAe,MAAM;AACzB,UAAI;AACF,eAAO,qBAAM,OAAO,MAAM,OAA+B,EAAE,kBAAkB,KAAK,CAAC;AAAA,MACrF,QAAQ;AACN,YAAI;AACF,iBAAO,KAAK,UAAU,MAAM,OAAO,MAAM,CAAC;AAAA,QAC5C,QAAQ;AACN,iBAAO,OAAO,MAAM,KAAK;AAAA,QAC3B;AAAA,MACF;AAAA,IACF,GAAG;AAEH,UAAM,MAAM,GAAG,QAAQ,IAAI,WAAW;AACtC,QAAI,qBAAqB,IAAI,GAAG,GAAG;AACjC,aAAO,sBAAO;AAAA,IAChB;AACA,yBAAqB,IAAI,GAAG;AAE5B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,mCAAmC;AAAA,QACnC;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,MACF;AAEA,cAAQ,MAAM,WAAW;AAEzB,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,SAAS,cAAc;AAC/B,UAAM,WAAW,MAAM,YAAY;AACnC,UAAM,SAAS,QAAQ,MAAM,IAAI,YAAY,MAAM,OAAO,GACxD,MAAM,YAAY,WAAW,MAAM,SAAS,KAAK,EACnD,GAAG,MAAM,OAAO;AAAA,QAAW,MAAM,IAAI,KAAK,EAAE;AAE5C,UAAM,QACJ,MAAM,aAAa,YAAY,kBAAkB,MAAM,aAAa,SAAS,kBAAkB;AAEjG,UAAM,QACJ,MAAM,aAAa,YACf,wBACA,MAAM,aAAa,SACjB,qBACA;AAER,UAAM,MAAM,GAAG,QAAQ,IAAI,MAAM,IAAI,IAAI,MAAM,OAAO;AACtD,QAAI,sBAAsB,IAAI,GAAG,GAAG;AAClC,aAAO,sBAAO;AAAA,IAChB;AACA,0BAAsB,IAAI,GAAG;AAE7B,WAAO,sBAAO,KAAK,MAAM;AAEvB,cAAQ;AAAA,QACN,iBAAiB,QAAQ,eAAe;AAAA,QACxC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,UAAI,MAAM,aAAa,WAAW;AAEhC,gBAAQ,KAAK,MAAM;AAAA,MACrB,WAAW,MAAM,aAAa,QAAQ;AAEpC,gBAAQ,KAAK,MAAM;AAAA,MACrB,OAAO;AAEL,gBAAQ,MAAM,MAAM;AAAA,MACtB;AAEA,cAAQ,SAAS;AAAA,IACnB,CAAC;AAAA,EACH;AAIA,SAAO,sBAAO;AAChB;AAOA,IAAM,qBAA2B;AAAA,EAC/B,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AAEd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,eACb,cAAc,KAAK,IACnB,sBAAO,SAAS,EAAE,YAAY,MAAM,CAAC;AAAA,IAC7C;AAEA,WAAO,0BAA0B,KAAK;AAAA,EACxC;AACF;AAEO,IAAM,sBAAsB,qBAAM,cAAc,mBAAmB,CAAC,kBAAkB,CAAC;AAQ9F,IAAM,+BAAqC;AAAA,EACzC,QAAQ,CAAC,UAAiB;AACxB,QAAI,CAAC,WAAW;AACd,aAAO,MAAM,SAAS,oBAClB,kBAAkB,KAAK,IACvB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAChD,cAAc,KAAK,IACnB,sBAAO;AAAA,IACf;AAEA,WAAO,MAAM,SAAS,qBAAsB,MAAM,SAAS,gBAAgB,MAAM,aAAa,SAC1F,0BAA0B,KAAK,IAC/B,sBAAO;AAAA,EACb;AACF;AAEO,IAAM,gCAAgC,qBAAM,cAAc,mBAAmB,CAAC,4BAA4B,CAAC;AAO3G,IAAM,2BAA2B,sBAAO;AAAA,EAC7C,sBAAO;AAAA,EACP,sBAAO,aAAa,EAAE,MAAM,WAAW,QAAQ,KAAK,CAAC;AACvD;;;AI1uBA,IAAAC,iBAAuC;;;ACUvC,IAAM,aAAqB,MAAM;AAAC;AAElC,IAAM,YAAY,MAAc;AAC9B,QAAM,OAAQ,WAAmB;AACjC,MAAI,QAAQ,OAAO,KAAK,QAAQ,YAAY;AAC1C,QAAI;AACF,YAAM,IAAI,KAAK,IAAI;AACnB,UAAI,OAAO,MAAM,YAAY,OAAO,SAAS,CAAC,EAAG,QAAO;AAAA,IAC1D,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO,KAAK,IAAI;AAClB;AAEA,IAAM,qBAAqB,CAAC,OAAyB;AACnD,QAAM,KAAM,WAAmB;AAC/B,MAAI,OAAO,OAAO,YAAY;AAC5B,QAAI;AACF,SAAG,EAAE;AACL;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI;AACF,YAAQ,QAAQ,EAAE,KAAK,EAAE;AAAA,EAC3B,QAAQ;AAEN,eAAW,IAAI,CAAC;AAAA,EAClB;AACF;AAEA,IAAM,iBAAiB,CAAC,IAAY,OAA2B;AAC7D,QAAM,KAAK,WAAW,IAAI,EAAE;AAC5B,SAAO,MAAM;AACX,QAAI;AACF,mBAAa,EAAE;AAAA,IACjB,QAAQ;AAAA,IAER;AAAA,EACF;AACF;AAEA,IAAM,8BAA8B,MAAgD;AAClF,QAAM,KAAM,WAAmB;AAC/B,MAAI,OAAO,OAAO,WAAY,QAAO;AAErC,MAAI;AACJ,MAAI;AACF,cAAU,IAAI,GAAG;AAAA,EACnB,QAAQ;AACN,WAAO;AAAA,EACT;AAGA,QAAM,QAAqB,CAAC;AAC5B,MAAI,YAAY;AAEhB,QAAM,QAAQ,MAAY;AACxB,gBAAY;AACZ,UAAM,QAAQ,MAAM,OAAO,GAAG,MAAM,MAAM;AAC1C,eAAW,KAAK,OAAO;AACrB,UAAI,EAAE,SAAU;AAChB,UAAI;AACF,UAAE,GAAG;AAAA,MACP,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AACF,YAAQ,MAAM,YAAY;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,CAAC,OAA2B;AAC3C,UAAM,OAAa,EAAE,UAAU,OAAO,GAAG;AACzC,UAAM,KAAK,IAAI;AACf,QAAI,CAAC,WAAW;AACd,kBAAY;AACZ,UAAI;AACF,gBAAQ,MAAM,YAAY,MAAS;AAAA,MACrC,QAAQ;AACN,oBAAY;AAEZ,eAAO,eAAe,GAAG,EAAE;AAAA,MAC7B;AAAA,IACF;AACA,WAAO,MAAM;AACX,WAAK,WAAW;AAAA,IAClB;AAAA,EACF;AAEA,SAAO;AACT;AAEA,IAAM,4BAA4B,MAAgD;AAChF,QAAM,KAAM,WAAmB;AAC/B,QAAM,KAAM,WAAmB;AAC/B,MAAI,OAAO,OAAO,WAAY,QAAO;AAErC,SAAO,CAAC,OAAO;AACb,QAAI;AACJ,QAAI;AACF,WAAK,GAAG,EAAE;AAAA,IACZ,QAAQ;AACN,aAAO,eAAe,GAAG,EAAE;AAAA,IAC7B;AAEA,WAAO,MAAM;AACX,UAAI,OAAO,OAAO,WAAY;AAC9B,UAAI;AACF,WAAG,EAAE;AAAA,MACP,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AACF;AAEA,IAAM,UAAU,MAAgD;AAC9D,QAAM,MAAO,WAAmB;AAChC,QAAM,SAAU,WAAmB;AACnC,MAAI,OAAO,QAAQ,WAAY,QAAO;AAEtC,SAAO,CAAC,OAAO;AACb,QAAI;AACJ,QAAI;AACF,WAAK,IAAI,EAAE;AAAA,IACb,QAAQ;AACN,aAAO;AAAA,IACT;AAEA,WAAO,MAAM;AACX,UAAI,OAAO,WAAW,WAAY;AAClC,UAAI;AACF,eAAO,EAAE;AAAA,MACX,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AACF;AAEO,IAAM,2BAA2B,MAAqB;AAC3D,QAAM,YACJ,0BAA0B,KAC1B,4BAA4B,MAC3B,CAAC,OAAmB,eAAe,GAAG,EAAE;AAE3C,QAAM,MAAM,QAAQ;AAEpB,SAAO;AAAA,IACL,OAAO;AAAA,IACP,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,wBAAwB,CAAC,OAAO,MAAM,EAAE,KAAK,UAAU,EAAE;AAAA,IACzD,iBAAiB;AAAA,EACnB;AACF;AAEA,IAAI;AAEG,IAAM,yBAAyB,MAAqB;AACzD,gDAAwB,yBAAyB;AACjD,SAAO;AACT;;;ACrKO,IAAM,gBAAgB,CAAC,aAA4C;AACxE,QAAM,MAAM,SAAS,QAAQ,IAAI;AACjC,MAAI,OAAO,EAAG,QAAO;AAErB,QAAM,WAAW,SAAS,MAAM,GAAG,GAAG;AACtC,QAAM,OAAO,SAAS,MAAM,MAAM,CAAC;AACnC,MAAI,KAAK,WAAW,EAAG,QAAO;AAE9B,QAAM,OAAO,KAAK,QAAQ,IAAI;AAC9B,MAAI,OAAO,GAAG;AACZ,WAAO,EAAE,MAAM,UAAU,mBAAmB,GAAG,QAAQ,KAAK,IAAI,GAAG;AAAA,EACrE;AAEA,QAAM,aAAa,KAAK,MAAM,GAAG,IAAI;AACrC,QAAM,SAAS,KAAK,MAAM,OAAO,CAAC;AAClC,MAAI,OAAO,WAAW,KAAK,GAAG;AAC5B,UAAM,aAAa,OAAO,MAAM,MAAM,MAAM;AAC5C,QAAI,WAAW,WAAW,EAAG,QAAO;AACpC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,mBAAmB,GAAG,QAAQ,KAAK,UAAU;AAAA,MAC7C;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,UAAU,mBAAmB,GAAG,QAAQ,KAAK,UAAU,GAAG;AAC3E;AAgDO,IAAM,mBAAmB,MAAoB;AAClD,MAAI,UAAU;AAGd,QAAM,eAAe,oBAAI,IAAgC;AACzD,QAAM,gBAAgB,oBAAI,IAAsB;AAChD,QAAM,kBAAkB,oBAAI,IAAmC;AAG/D,QAAM,mBAAmB,oBAAI,IAA+B;AAC5D,QAAM,0BAA0B,oBAAI,IAA+B;AAEnE,QAAM,kBAAkB,CAAC,aAA+B,cAAc,IAAI,QAAQ,KAAK;AACvF,QAAM,mBAAmB,CAAC,aAA4C,gBAAgB,IAAI,QAAQ,KAAK;AAEvG,QAAM,kBAAkB,CAAC,UAAoB,aAAwC;AACnF,UAAM,OAAO,cAAc,IAAI,QAAQ,KAAK;AAC5C,kBAAc,IAAI,UAAU,OAAO,CAAC;AACpC,oBAAgB,IAAI,UAAU,QAAQ;AAAA,EACxC;AAEA,QAAM,iBAAiB,CAAC,UAAoB,aAAuC;AACjF,UAAM,OAAO,cAAc,QAAQ;AACnC,UAAM,WAAW,iBAAiB,IAAI,QAAQ;AAC9C,UAAM,MAAM,YAAY,oBAAI,IAAgB;AAC5C,UAAM,aAAa,IAAI,IAAI,QAAQ;AACnC,QAAI,CAAC,YAAY;AACf,UAAI,IAAI,QAAQ;AAAA,IAClB;AACA,QAAI,CAAC,UAAU;AACb,uBAAiB,IAAI,UAAU,GAAG;AAAA,IACpC;AAEA,QAAI,CAAC,cAAc,MAAM;AACvB,YAAM,OAAO,wBAAwB,IAAI,KAAK,iBAAiB,KAAK;AACpE,8BAAwB,IAAI,KAAK,mBAAmB,OAAO,CAAC;AAAA,IAC9D;AAEA,WAAO,MAAM;AACX,YAAM,UAAU,iBAAiB,IAAI,QAAQ;AAC7C,UAAI,CAAC,QAAS;AACd,YAAM,UAAU,QAAQ,OAAO,QAAQ;AACvC,UAAI,WAAW,MAAM;AACnB,cAAM,OAAO,wBAAwB,IAAI,KAAK,iBAAiB,KAAK;AACpE,cAAM,OAAO,OAAO;AACpB,YAAI,QAAQ,GAAG;AACb,kCAAwB,OAAO,KAAK,iBAAiB;AAAA,QACvD,OAAO;AACL,kCAAwB,IAAI,KAAK,mBAAmB,IAAI;AAAA,QAC1D;AAAA,MACF;AACA,UAAI,QAAQ,SAAS,GAAG;AACtB,yBAAiB,OAAO,QAAQ;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,0BAA0B,CAAC,aAA+B,iBAAiB,IAAI,QAAQ,GAAG,QAAQ;AACxG,QAAM,2BAA2B,CAAC,sBAAiD,wBAAwB,IAAI,iBAAiB,KAAK;AAErI,QAAM,yBAAyB,CAAC,SAKpB;AACV,iBAAa,IAAI,KAAK,mBAAmB,KAAK,YAAY;AAE1D,QAAI,CAAC,cAAc,IAAI,KAAK,iBAAiB,GAAG;AAC9C,oBAAc,IAAI,KAAK,mBAAmB,CAAC;AAC3C,sBAAgB,IAAI,KAAK,mBAAmB,QAAQ;AAAA,IACtD;AAAA,EACF;AAEA,QAAM,2BAA2B,CAAC,sBAA+C;AAC/E,iBAAa,OAAO,iBAAiB;AAAA,EAEvC;AAEA,QAAM,aAAa,CAAC,SAA8G;AAChI,cAAU,KAAK;AAEf,eAAW,CAAC,KAAK,MAAM,KAAK,KAAK,SAAS,SAAS;AACjD,mBAAa,IAAI,KAAK,OAAO,KAAK;AAAA,IACpC;AAEA,UAAM,gBAAgB,oBAAI,IAAyG;AAEnI,eAAW,CAAC,UAAU,QAAQ,KAAK,KAAK,SAAS,aAAa;AAC5D,sBAAgB,UAAU,QAAQ;AAClC,YAAM,YAAY,MAAM,KAAK,iBAAiB,IAAI,QAAQ,KAAK,CAAC,CAAC;AACjE,UAAI,UAAU,SAAS,GAAG;AACxB,sBAAc,IAAI,UAAU,EAAE,UAAU,UAAU,CAAC;AAAA,MACrD;AAAA,IACF;AAEA,WAAO,EAAE,cAAc;AAAA,EACzB;AAEA,QAAM,iBAAiB,CAAC,sBAAkD,aAAa,IAAI,iBAAiB;AAE5G,QAAM,UAAU,MAAY;AAC1B,iBAAa,MAAM;AACnB,kBAAc,MAAM;AACpB,oBAAgB,MAAM;AACtB,qBAAiB,MAAM;AACvB,4BAAwB,MAAM;AAAA,EAChC;AAEA,SAAO;AAAA,IACL,YAAY,MAAM;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACrNA,IAAAC,iBAAiC;;;ACAjC,IAAAC,iBAAiC;AAyEjC,IAAI,YAAY;AAChB,IAAI,eAAe;AAEnB,IAAM,YAAY,MAAc;AAC9B,QAAM,KAAK,KAAK,IAAI;AACpB,MAAI,OAAO,WAAW;AACpB,oBAAgB;AAAA,EAClB,OAAO;AACL,gBAAY;AACZ,mBAAe;AAAA,EACjB;AAEA,SAAO,iBAAiB,IAAI,OAAO,EAAE,KAAK,OAAO,EAAE,IAAI,YAAY;AACrE;AAEA,IAAI,eAAe,UAAU;;;ACxF7B,IAAAC,iBAAuB;AAkDhB,IAAM,6BAA6B,MAA8B;AACtE,QAAM,qBAAqB,oBAAI,IAAsC;AACrE,QAAM,4BAA4B,oBAAI,IAAoC;AAE1E,QAAM,kBAAkB,oBAAI,IAAmC;AAC/D,QAAM,+BAA+B,oBAAI,IAAmG;AAE5I,QAAM,6BAAmF,CAAC,SAAS;AACjG,UAAM,SAAmC;AAAA,MACvC,GAAG;AAAA,MACH,UAAU;AAAA,MACV,WAAW;AAAA,IACb;AAEA,uBAAmB,IAAI,KAAK,IAAI,MAAM;AACtC,UAAM,MAAM,0BAA0B,IAAI,KAAK,uBAAuB,KAAK,oBAAI,IAAY;AAC3F,QAAI,IAAI,KAAK,EAAE;AACf,8BAA0B,IAAI,KAAK,yBAAyB,GAAG;AAE/D,WAAO,MAAM;AACX,yBAAmB,OAAO,KAAK,EAAE;AACjC,YAAM,UAAU,0BAA0B,IAAI,KAAK,uBAAuB;AAC1E,UAAI,CAAC,QAAS;AACd,cAAQ,OAAO,KAAK,EAAE;AACtB,UAAI,QAAQ,SAAS,GAAG;AACtB,kCAA0B,OAAO,KAAK,uBAAuB;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AAEA,QAAM,0BAA6E,CAAC,SAAS;AAC3F,UAAM,eAAe,oBAAI,IAAwJ;AACjL,eAAW,KAAK,KAAK,WAAW;AAC9B,mBAAa,IAAI,EAAE,QAAQ,CAAC;AAAA,IAC9B;AAEA,UAAM,mBAAmB,oBAAI,IAG3B;AACF,eAAW,KAAK,KAAK,eAAe;AAClC,uBAAiB,IAAI,EAAE,QAAQ,CAAC;AAAA,IAClC;AAGA,UAAM,qBAAqB,oBAAI,IAAoB;AACnD,eAAW,KAAK,KAAK,GAAG,OAAO;AAC7B,YAAM,KAAK,EAAE;AACb,YAAM,aAAa,iBAAiB,IAAI,EAAE;AAC1C,UAAI,CAAC,WAAY;AACjB,yBAAmB,IAAI,KAAK,mBAAmB,IAAI,EAAE,KAAK,KAAK,CAAC;AAChE,YAAM,QAAQ,mBAAmB,IAAI,EAAE,KAAK;AAC5C,UAAI,QAAQ,GAAG;AACb,cAAM,IAAI;AAAA,UACR,yGAAyG,KAAK,MAAM,YAAY,EAAE;AAAA,QACpI;AAAA,MACF;AAAA,IACF;AAEA,UAAM,4BAA4B,oBAAI,IAA0C;AAChF,eAAW,KAAK,KAAK,GAAG,OAAO;AAC7B,YAAM,OAAO,EAAE;AACf,YAAM,KAAK,EAAE;AACb,UAAI,CAAC,aAAa,IAAI,IAAI,EAAG;AAC7B,UAAI,CAAC,iBAAiB,IAAI,EAAE,EAAG;AAC/B,YAAM,OAAO,0BAA0B,IAAI,IAAI,KAAK,CAAC;AACrD,WAAK,KAAK,EAAE;AACZ,gCAA0B,IAAI,MAAM,IAAI;AAAA,IAC1C;AAEA,UAAM,SAAgC;AAAA,MACpC,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,MACA,eAAe,oBAAI,IAAI;AAAA,IACzB;AAEA,oBAAgB,IAAI,KAAK,QAAQ,MAAM;AAEvC,eAAW,KAAK,KAAK,WAAW;AAC9B,YAAM,OAAO,6BAA6B,IAAI,EAAE,iBAAiB,KAAK,CAAC;AACvE,WAAK,KAAK,EAAE,QAAQ,KAAK,QAAQ,QAAQ,EAAE,OAAO,CAAC;AACnD,mCAA6B,IAAI,EAAE,mBAAmB,IAAI;AAAA,IAC5D;AAEA,WAAO,MAAM;AACX,sBAAgB,OAAO,KAAK,MAAM;AAClC,iBAAW,KAAK,KAAK,WAAW;AAC9B,cAAM,OAAO,6BAA6B,IAAI,EAAE,iBAAiB;AACjE,YAAI,CAAC,KAAM;AACX,cAAM,OAAO,KAAK,OAAO,CAAC,MAAM,EAAE,EAAE,WAAW,KAAK,UAAU,EAAE,WAAW,EAAE,OAAO;AACpF,YAAI,KAAK,WAAW,GAAG;AACrB,uCAA6B,OAAO,EAAE,iBAAiB;AAAA,QACzD,OAAO;AACL,uCAA6B,IAAI,EAAE,mBAAmB,IAAI;AAAA,QAC5D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAA6D,CAAC,SAClE,sBAAO,IAAI,aAAa;AACtB,QAAI,YAAY;AAGhB,eAAW,aAAa,KAAK,2BAA2B;AACtD,YAAM,MAAM,0BAA0B,IAAI,SAAS;AACnD,UAAI,CAAC,OAAO,IAAI,SAAS,EAAG;AAC5B,YAAM,SAAS,KAAK,gBAAgB,IAAI,SAAS;AACjD,UAAI,CAAC,OAAQ;AAEb,iBAAW,MAAM,KAAK;AACpB,cAAM,OAAO,mBAAmB,IAAI,EAAE;AACtC,YAAI,CAAC,KAAM;AAEX,YAAI;AACJ,YAAI;AACF,qBAAW,KAAK,UAAU,OAAO,OAAO,KAAY;AAAA,QACtD,QAAQ;AACN;AAAA,QACF;AAEA,cAAM,YAAY,KAAK,aAAa,QAAQ;AAC5C,YAAI,KAAK,YAAY,KAAK,YAAY,KAAK,WAAW,SAAS,GAAG;AAChE;AAAA,QACF;AAEA,aAAK,WAAW;AAChB,aAAK,YAAY;AACjB,oBAAY;AACZ,eAAO,KAAK,WAAW,SAAS;AAAA,MAClC;AAAA,IACF;AAGA,eAAW,aAAa,KAAK,2BAA2B;AACtD,YAAM,OAAO,6BAA6B,IAAI,SAAS;AACvD,UAAI,CAAC,QAAQ,KAAK,WAAW,EAAG;AAChC,YAAM,SAAS,KAAK,gBAAgB,IAAI,SAAS;AACjD,UAAI,CAAC,OAAQ;AAEb,iBAAW,OAAO,MAAM;AACtB,cAAM,OAAO,gBAAgB,IAAI,IAAI,MAAM;AAC3C,YAAI,CAAC,KAAM;AACX,cAAM,WAAW,KAAK,aAAa,IAAI,IAAI,MAAM;AACjD,YAAI,CAAC,SAAU;AAEf,YAAI;AACJ,YAAI;AACF,kBAAQ,SAAS,UAAU,OAAO,OAAO,KAAY;AAAA,QACvD,QAAQ;AACN;AAAA,QACF;AAEA,cAAM,QAAQ,KAAK,cAAc,IAAI,IAAI,MAAM,KAAK,EAAE,UAAU,OAAO,WAAW,OAAU;AAC5F,cAAM,UAAU,CAAC,MAAM,YAAY,CAAC,OAAO,GAAG,MAAM,WAAW,KAAK;AACpE,YAAI,CAAC,QAAS;AAEd,cAAM,WAAW;AACjB,cAAM,YAAY;AAClB,aAAK,cAAc,IAAI,IAAI,QAAQ,KAAK;AAExC,cAAM,UAAU,KAAK,0BAA0B,IAAI,IAAI,MAAM,KAAK,CAAC;AACnE,mBAAW,kBAAkB,SAAS;AACpC,gBAAM,OAAO,KAAK,iBAAiB,IAAI,cAAc;AACrD,cAAI,CAAC,KAAM;AACX,sBAAY;AACZ,iBAAO,KAAK,SAAS,KAAK;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,UAAU;AAAA,EACrB,CAAC;AAEH,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AL9NO,IAAM,aAAa,MAA0B;AAClD,MAAI;AAEF,UAAM,MAAO,YAAoB,SAAS;AAC1C,WAAO,OAAO,KAAK,aAAa,WAAW,IAAI,WAAW;AAAA,EAC5D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,IAAM,WAAW,MAAe,WAAW,MAAM;AAgExD,IAAM,gCAAN,cAA4C,uBAAQ,IAAI,6CAA6C,EAGnG,EAAE;AAAC;AAML,IAAM,mCAAN,cAA+C,uBAAQ,IAAI,gDAAgD,EAGzG,EAAE;AAAC;AAUL,IAAM,0BAAN,cAAsC,uBAAQ,IAAI,gCAAgC,EAGhF,EAAE;AAAC;AAkFL,IAAM,mCAAN,cAA+C,uBAAQ,IAAI,yCAAyC,EAGlG,EAAE;AAAC;AA6BL,IAAM,2BAAN,cAAuC,uBAAQ,IAAI,iCAAiC,EAGlF,EAAE;AAAC;AAaL,IAAM,oCAAN,cAAgD,uBAAQ,IAAI,0CAA0C,EAGpG,EAAE;AAAC;AAQE,IAAM,kBAAN,cAA8B,uBAAQ,IAAI,4BAA4B,EAAwC,EAAE;AAAC;AAEjH,IAAM,oBAAoD,qBAAM;AAAA,EACrE;AAAA,EACA,sBAAO;AAAA,IACL,sBAAO,KAAK,MAAM,iBAAiB,CAAwB;AAAA,IAC3D,CAAC,UAAU,sBAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAAA,EAC9C;AACF;AAOO,IAAM,mBAAN,cAA+B,uBAAQ,IAAI,6BAA6B,EAG7E,EAAE;AAAC;AAEE,IAAM,qBAAqD,qBAAM;AAAA,EACtE;AAAA,EACA,uBAAuB;AACzB;AAOO,IAAM,4BAAN,cAAwC,uBAAQ,IAAI,sCAAsC,EAG/F,EAAE;AAAC;AAEE,IAAM,8BAA8D,qBAAM;AAAA,EAC/E;AAAA,EACA,2BAA2B;AAC7B;AAQO,IAAM,mBAAN,cAA+B,uBAAQ,IAAI,6BAA6B,EAA0C,EAAE;AAAC;;;ALzRrH,IAAM,yBAAyB,wBAAS,WAAW,KAAK;AAUxD,IAAM,qBAAqB,wBAAS,WAAW,KAAK;;;AWxB3D,IAAAC,iBAAuC;AA+ChC,IAAM,YAAN,cAAwB,uBAAQ,IAAI,yBAAyB,EAA+B,EAAE;AAAC;AAwD/F,IAAM,WAA2E,sBAAO,IAAI,aAAa;AAC9G,QAAM,MAAM,OAAO;AACnB,SAAO,OAAO,IAAI;AACpB,CAAC;AAEM,IAAM,cAAqD,sBAAO,IAAI,aAAa;AACxF,QAAM,MAAM,OAAO;AACnB,SAAO,IAAI;AACb,CAAC;;;ACpGD,IAAM,gBAAgB,CAAC,SAAyC;AAC9D,MAAI,CAAC,KAAM,QAAO,CAAC;AACnB,SAAO,KAAK,MAAM,GAAG,EAAE,IAAI,CAAC,QAAS,WAAW,KAAK,GAAG,IAAI,OAAO,GAAG,IAAI,GAAI;AAChF;AAsCO,IAAM,sBAAsB,CAAC,OAAgB,SAAuB;AACzE,MAAI,CAAC,KAAM;AACX,QAAM,WAAW,cAAc,IAAI;AACnC,MAAI,SAAS,WAAW,EAAG;AAE3B,MAAI,UAAe;AACnB,WAAS,IAAI,GAAG,IAAI,SAAS,SAAS,GAAG,KAAK;AAC5C,UAAM,MAAM,SAAS,CAAC;AACtB,UAAM,OAAO,UAAU,GAAU;AACjC,QAAI,QAAQ,QAAQ,OAAO,SAAS,UAAU;AAC5C;AAAA,IACF;AACA,cAAU;AAAA,EACZ;AAEA,QAAM,OAAO,SAAS,SAAS,SAAS,CAAC;AACzC,MAAI,MAAM,QAAQ,OAAO,KAAK,OAAO,SAAS,UAAU;AACtD,YAAQ,IAAI,IAAI;AAChB;AAAA,EACF;AAEA,MAAI,WAAW,OAAO,YAAY,UAAU;AAC1C,WAAO,QAAQ,IAAW;AAAA,EAC5B;AACF;;;ACxEA,IAAM,kBAAkB,uBAAO,IAAI,8BAA8B;AACjE,IAAM,wBAAwB,uBAAO,IAAI,mCAAmC;AA8BrE,IAAM,yBAAyB,CAACC,YAAyC;AAC9E,MAAI,CAACA,QAAQ,QAAO;AACpB,MAAI,OAAOA,YAAW,YAAY,OAAOA,YAAW,WAAY,QAAO;AACvE,SAAQA,QAAe,qBAAqB;AAC9C;AA4CO,IAAM,oBAAoB,CAAC,UAAoC;AACpE,QAAM,YAAa,MAAc,eAAe;AAChD,MAAI,CAAC,WAAW;AACd,UAAM,MAAM,SAAS,IACjB;AAAA,MACE;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,EAAE,KAAK,IAAI,IACX;AACJ,UAAM,IAAI,MAAM,GAAG;AAAA,EACrB;AAEA,SAAO;AACT;;;AdhFO,IAAMC,OAAM;AAAA,EACjB,OAAO,CAAC,UAA4B,EAAE,MAAM,SAAS,KAAK;AAAA,EAC1D,MAAM,CAAC,MAAc,mBAAqD;AAAA,IACxE,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AAAA,EACA,MAAM,CACJ,MACA,OACA,aACc;AAAA,IACd,MAAM;AAAA,IACN;AAAA,IACA;AAAA,IACA,eAAe,SAAS;AAAA,IACxB,OAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaA,eAAe,CAAC,cAAgC;AAC9C,UAAM,MAAM,OAAO,cAAc,WAAW,UAAU,KAAK,IAAI;AAC/D,QAAI,CAAC,OAAO,QAAQ,QAAS,QAAOA,KAAI,KAAK;AAE7C,UAAM,WAAW,IAAI,MAAM,GAAG,EAAE,OAAO,OAAO;AAC9C,QAAI,SAAS,WAAW,EAAG,QAAOA,KAAI,KAAK;AAE3C,UAAM,UAAU,CAAC,QAAyB,WAAW,KAAK,GAAG;AAC7D,UAAM,eAAe,CAAC,QAAyB,IAAI,SAAS,IAAI,IAAI,IAAI,MAAM,GAAG,EAAE,IAAI;AAEvF,UAAM,iBAA0E,CAAC;AACjF,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,MAAM,SAAS,CAAC;AACtB,UAAI,CAAC,QAAQ,GAAG,EAAG;AACnB,YAAM,QAAQ,OAAO,GAAG;AACxB,UAAI,CAAC,OAAO,SAAS,KAAK,KAAK,QAAQ,EAAG,QAAOA,KAAI,MAAM,GAAG;AAC9D,qBAAe,KAAK,EAAE,KAAK,GAAG,MAAM,CAAC;AAAA,IACvC;AAGA,QAAI,eAAe,SAAS,GAAG;AAC7B,YAAM,OAAO,eAAe,eAAe,SAAS,CAAC;AACrD,YAAM,gBAAgB,eAAe,MAAM,GAAG,EAAE,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK;AAEpE,YAAM,WAAW,SACd,MAAM,GAAG,KAAK,GAAG,EACjB,OAAO,CAAC,QAAQ,CAAC,QAAQ,GAAG,CAAC,EAC7B,IAAI,YAAY,EAChB,KAAK,GAAG;AAEX,UAAI,CAAC,SAAU,QAAOA,KAAI,MAAM,GAAG;AAEnC,YAAM,YAAY,SACf,MAAM,KAAK,MAAM,CAAC,EAClB,IAAI,YAAY,EAChB,KAAK,GAAG;AAEX,aAAOA,KAAI,KAAK,UAAU,KAAK,OAAO;AAAA,QACpC,eAAe,cAAc,SAAS,IAAI,gBAAgB;AAAA,QAC1D,OAAO,YAAY,YAAY;AAAA,MACjC,CAAC;AAAA,IACH;AAGA,QAAI,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI,CAAC,GAAG;AAC1C,YAAM,OAAO,SAAS,SAAS,SAAS,CAAC;AACzC,UAAI,KAAK,SAAS,IAAI,GAAG;AACvB,eAAOA,KAAI,KAAK,SAAS,IAAI,YAAY,EAAE,KAAK,GAAG,CAAC;AAAA,MACtD;AAAA,IACF;AAEA,WAAOA,KAAI,MAAM,GAAG;AAAA,EACtB;AAAA,EACA,MAAM,OAAiB,EAAE,MAAM,OAAO;AACxC;AAKO,IAAM,iBAAiB,CAAC,OAA2B,YACxD,uBAAO,IAAI,aAAa;AACtB,MAAI;AACJ,MAAI;AACF,gBAAY,kBAAkB,KAAY;AAAA,EAC5C,QAAQ;AACN;AAAA,EACF;AAEA,QAAM,UAAU,UAAU,OAAO;AAIjC,MAAI,CAAC,QAAS;AAEd,QAAM,WAAW,CAAC,QAAqD;AACrE,QAAI,IAAI,SAAS,OAAQ,QAAO,EAAE,MAAM,OAAO;AAC/C,QAAI,IAAI,SAAS,QAAS,QAAO,EAAE,MAAM,SAAS,MAAM,IAAI,KAAK;AACjE,QAAI,IAAI,SAAS,QAAQ;AACvB,aAAO;AAAA,QACL,MAAM;AAAA,QACN,MAAM,IAAI;AAAA,QACV,GAAI,IAAI,gBAAgB,EAAE,eAAe,IAAI,cAAc,IAAI,CAAC;AAAA,MAClE;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,MACN,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,GAAI,IAAI,gBAAgB,EAAE,eAAe,IAAI,cAAc,IAAI,CAAC;AAAA,MAChE,OAAO,IAAI;AAAA,IACb;AAAA,EACF;AAEA,QAAM,WAAqD;AAAA,IACzD,MAAM,QAAQ;AAAA,IACd,QAAQ,SAAS,QAAQ,MAAM;AAAA,EACjC;AAEA,QAAM,QAAQ,OAAO,yBAAS,IAAe,sBAAsB;AACnE,MAAI,OAAO;AACT,YAAQ,QAAQ;AAChB;AAAA,EACF;AAEA,SAAO,UAAU,IAAI;AAAA,IAAwB,EAAE,MAAM,SAAS,MAAM,kBAAkB,SAAS,QAAQ;AAAA,IAAG,MACxG,uBAAO,KAAK,MAAM;AAChB,cAAQ,QAAQ;AAAA,IAClB,CAAC;AAAA,EACH;AACF,CAAC;AAKI,IAAM,gBAAgB,CAAC,OAA2B,YACvD,uBAAO,IAAI,aAAa;AACtB,MAAI,QAAQ,SAAS,kBAAkB;AACrC,UAAM,cAAc,CAAC,QAAsC;AACzD,UAAI,IAAI,SAAS,OAAQ,QAAO;AAChC,UAAI,IAAI,SAAS,WAAW,IAAI,SAAS,OAAQ,QAAO,IAAI;AAC5D,YAAM,OAAO,GAAG,IAAI,IAAI;AACxB,aAAO,IAAI,QAAQ,GAAG,IAAI,IAAI,IAAI,KAAK,KAAK;AAAA,IAC9C;AAEA,UAAM,YAAY,YAAY,QAAQ,MAAM;AAC5C,QAAI,CAAC,WAAW;AACd;AAAA,IACF;AAEA,WAAO,MAAM,OAAO,OAAO,QAAQ,SAAS;AAC5C;AAAA,EACF;AAEA,MAAI,QAAQ,SAAS,oBAAoB;AACvC;AAAA,EACF;AAEA,QAAM,eAAe,OAAO,uBAAO,cAAwB,SAAS;AACpE,MAAI,uBAAO,OAAO,YAAY,GAAG;AAC/B;AAAA,EACF;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AACF,UAAM,YAAY,kBAAkB,KAAY;AAChD,eAAW,UAAU;AACrB,iBAAa,UAAU;AAAA,EACzB,QAAQ;AACN,eAAW;AACX,iBAAa;AAAA,EACf;AAEA,SAAO,aAAa,MAAM,OAAO;AAAA,IAC/B,MAAM;AAAA,IACN,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,MAAM,QAAQ;AAAA,EAChB,CAAC;AACH,CAAC;AAKI,IAAM,UAAU,CAAC,OAA2B,YACjD,uBAAO,IAAI,aAAa;AACtB,QAAM,QAAQ,MACZ,MAAM,MAAM,OAAO,CAAC,UAAU;AAC5B,UAAM,UAAU,CAAC,MAAuB,SAAuB;AAC7D,UAAI,CAAC,KAAM;AACX,MAAM,oBAAoB,OAAO,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,IACpD;AAEA,QAAI,QAAQ,SAAS,oBAAoB;AACvC,YAAM,SAAS,QAAQ;AACvB,UAAI,OAAO,SAAS,QAAS;AAC7B,cAAQ,UAAU,OAAO,IAAI;AAC7B,cAAQ,MAAM,OAAO,IAAI;AACzB;AAAA,IACF;AAEA,QAAI,QAAQ,SAAS,oBAAoB;AACvC,YAAM,SAAS,QAAQ;AACvB,UAAI,OAAO,SAAS,OAAQ;AAC5B,YAAM,OAAO,GAAG,OAAO,IAAI,IAAI,OAAO,KAAK;AAC3C,YAAM,OAAO,OAAO,QAAQ,GAAG,IAAI,IAAI,OAAO,KAAK,KAAK;AACxD,cAAQ,UAAU,IAAI;AACtB,cAAQ,MAAM,IAAI;AAClB;AAAA,IACF;AAEA,QAAI,QAAQ,SAAS,gBAAgB;AAGnC;AAAA,IACF;AAAA,EACF,CAAC;AAEH,QAAM,QAAQ,OAAO,yBAAS,IAAe,sBAAsB;AACnE,MAAI,OAAO;AACT,WAAO,OAAO,MAAM;AAAA,EACtB;AAEA,MAAI;AAOJ,MAAI;AACF,UAAM,YAAY,kBAAkB,KAAY;AAChD,iBAAa,CAAC,QAAQ,SAAS,UAAU,IAAI,wBAAwB,QAAe,IAAI;AAAA,EAC1F,QAAQ;AACN,iBAAa;AAAA,EACf;AAEA,MAAI,CAAC,YAAY;AACf,WAAO,OAAO,MAAM;AAAA,EACtB;AAEA,SAAO,OAAO,WAAW,EAAE,MAAM,SAAS,MAAM,WAAW,SAAS,QAAQ,GAAG,KAAK;AACtF,CAAC;AAOH,IAAM,gBAAgB,CAAC,SACrB,SAAS,YAAY,SAAS,QAAQ,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,KAAK;AAE3F,IAAM,gBAAgB,CAAC,SAAyB;AAC9C,MAAI,CAAC,KAAM,QAAO;AAClB,QAAM,WAAW,KAAK,MAAM,GAAG,EAAE,OAAO,OAAO;AAC/C,SAAO,SACJ,IAAI,CAAC,QAAS,WAAW,KAAK,GAAG,IAAI,OAAO,GAAI,EAChD,KAAK,GAAG,EACR,QAAQ,WAAW,IAAI;AAC5B;AAEA,IAAM,wBAAwB,CAAC,KAAa,YAA6B;AACvE,MAAI,CAAC,OAAO,CAAC,QAAS,QAAO;AAC7B,MAAI,QAAQ,QAAS,QAAO;AAC5B,MAAI,QAAQ,WAAW,GAAG,GAAG,GAAG,EAAG,QAAO;AAC1C,MAAI,QAAQ,WAAW,GAAG,GAAG,IAAI,EAAG,QAAO;AAE3C,MAAI,IAAI,WAAW,GAAG,OAAO,IAAI,EAAG,QAAO;AAC3C,SAAO;AACT;AAEO,IAAM,mBAAmB,CAAC,OAA2BC,YAAkC;AAC5F,QAAM,UAAU,uBAAuBA,OAAa;AAEpD,QAAM,UAAU,SAAS,SAAS,OAAO,CAAC,MAAM,KAAK,EAAE,SAAS,QAAQ,KAAK,CAAC;AAE9E,QAAM,gBAAgB,QAAQ;AAAA,IAAO,CAAC,MACpC,MAAM,QAAQ,GAAG,MAAM,QAAQ,IAAI,EAAE,KAAK,SAAS,SAAS,SAAS,IAAI;AAAA,EAC3E;AAEA,QAAM,oBAAoB,QAAQ;AAAA,IAAO,CAAC,MACxC,MAAM,QAAQ,GAAG,MAAM,QAAQ,IAAI,EAAE,KAAK,SAAS,SAAS,aAAa,IAAI;AAAA,EAC/E;AAEA,QAAM,QAAQ,uBAAO,KAAK,MAAM;AAC9B,QAAI,cAAc,WAAW,EAAG;AAChC,UAAM,UAAU;AAAA,MACd,uBAAO,QAAQ,eAAe,CAAC,UAAe,MAAM,OAAO,OAAO,QAAQ,MAAM,SAAS,GAAG;AAAA,QAC1F,aAAa;AAAA,MACf,CAAC,EAAE,KAAK,uBAAO,MAAM;AAAA,IACvB;AAAA,EACF,CAAC;AAED,QAAM,qBAAqB,CAAC,gBAC1B,uBAAO,IAAI,aAAa;AACtB,QAAI,CAAC,eAAe,cAAc,WAAW,EAAG;AAChD,QAAI,kBAAkB,WAAW,EAAG;AAEpC,UAAM,iBAAiB,cAAc,WAAW;AAEhD,WAAO,uBAAO;AAAA,MACZ;AAAA,MACA,CAAC,UAAe;AACd,cAAM,OAAQ,OAAO,MAAM,QAAQ,CAAC;AACpC,cAAM,WAAW,KAAK,KAAK,CAAC,QAAQ,sBAAsB,KAAK,cAAc,CAAC;AAC9E,YAAI,CAAC,SAAU,QAAO,uBAAO;AAC7B,eAAO,MAAM,OAAO,OAAO,QAAQ,MAAM,SAAS;AAAA,MACpD;AAAA,MACA,EAAE,aAAa,YAAY;AAAA,IAC7B;AAAA,EACF,CAAC,EAAE,KAAK,uBAAO,MAAM;AAEvB,SAAO,EAAE,OAAO,mBAAmB;AACrC;AASO,IAAM,UAAU,CAAC,WAAgE,uBAAO;;;ADjVxF,IAAMC,OAAeA;AAErB,IAAMC,kBAAiB,CAAC,OAA2B,YAC/C,eAAe,OAAc,OAAO;AAExC,IAAMC,iBAAgB,CAAC,OAA2B,YAC9C,cAAc,OAAc,OAAO;AAEvC,IAAMC,WAAU,CAAC,OAA2B,YACxC,QAAQ,OAAc,OAAO;AAEjC,IAAMC,oBAAmB,CAAC,OAA2BC,YACjD,iBAAiB,OAAcA,OAAM;AAEzC,IAAMC,WAAU,CAAC,UAAwE,QAAQ,KAAY;","names":["Ref","cleanup","install","makeSourceWiring","scopedExecute","scopedValidate","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","import_effect","module","Ref","module","Ref","scopedValidate","scopedExecute","cleanup","makeSourceWiring","module","install"]}
|