@nicia-ai/typegraph 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/README.md +23 -0
  2. package/dist/{ast-BVyihVbP.d.cts → ast-CXFx6bF6.d.ts} +45 -165
  3. package/dist/{ast-BVyihVbP.d.ts → ast-D-3bOanX.d.cts} +45 -165
  4. package/dist/backend/drizzle/index.cjs +20 -20
  5. package/dist/backend/drizzle/index.d.cts +6 -5
  6. package/dist/backend/drizzle/index.d.ts +6 -5
  7. package/dist/backend/drizzle/index.js +8 -8
  8. package/dist/backend/drizzle/postgres.cjs +9 -9
  9. package/dist/backend/drizzle/postgres.d.cts +9 -11
  10. package/dist/backend/drizzle/postgres.d.ts +9 -11
  11. package/dist/backend/drizzle/postgres.js +6 -6
  12. package/dist/backend/drizzle/schema/postgres.cjs +10 -10
  13. package/dist/backend/drizzle/schema/postgres.d.cts +6 -5
  14. package/dist/backend/drizzle/schema/postgres.d.ts +6 -5
  15. package/dist/backend/drizzle/schema/postgres.js +3 -3
  16. package/dist/backend/drizzle/schema/sqlite.cjs +10 -10
  17. package/dist/backend/drizzle/schema/sqlite.d.cts +6 -5
  18. package/dist/backend/drizzle/schema/sqlite.d.ts +6 -5
  19. package/dist/backend/drizzle/schema/sqlite.js +3 -3
  20. package/dist/backend/drizzle/sqlite.cjs +9 -9
  21. package/dist/backend/drizzle/sqlite.d.cts +17 -18
  22. package/dist/backend/drizzle/sqlite.d.ts +17 -18
  23. package/dist/backend/drizzle/sqlite.js +6 -6
  24. package/dist/backend/postgres/index.cjs +21 -17
  25. package/dist/backend/postgres/index.d.cts +8 -7
  26. package/dist/backend/postgres/index.d.ts +8 -7
  27. package/dist/backend/postgres/index.js +8 -8
  28. package/dist/backend/sqlite/index.cjs +27 -21
  29. package/dist/backend/sqlite/index.cjs.map +1 -1
  30. package/dist/backend/sqlite/index.d.cts +7 -6
  31. package/dist/backend/sqlite/index.d.ts +7 -6
  32. package/dist/backend/sqlite/index.js +18 -12
  33. package/dist/backend/sqlite/index.js.map +1 -1
  34. package/dist/{chunk-YM5AL65Y.cjs → chunk-2WVFEIHR.cjs} +29 -3
  35. package/dist/chunk-2WVFEIHR.cjs.map +1 -0
  36. package/dist/{chunk-4PIEL2VO.js → chunk-3PURVEA4.js} +36 -5
  37. package/dist/chunk-3PURVEA4.js.map +1 -0
  38. package/dist/chunk-4HARSV2G.js +1448 -0
  39. package/dist/chunk-4HARSV2G.js.map +1 -0
  40. package/dist/{chunk-IIAT36MI.js → chunk-54WJF3DW.js} +29 -3
  41. package/dist/chunk-54WJF3DW.js.map +1 -0
  42. package/dist/chunk-CMHFS34N.cjs +390 -0
  43. package/dist/chunk-CMHFS34N.cjs.map +1 -0
  44. package/dist/chunk-DD6ONEBN.cjs +1264 -0
  45. package/dist/chunk-DD6ONEBN.cjs.map +1 -0
  46. package/dist/chunk-F2BZSEFE.js +388 -0
  47. package/dist/chunk-F2BZSEFE.js.map +1 -0
  48. package/dist/{chunk-DDM2FZRJ.cjs → chunk-JQDWEX6V.cjs} +24 -24
  49. package/dist/{chunk-DDM2FZRJ.cjs.map → chunk-JQDWEX6V.cjs.map} +1 -1
  50. package/dist/chunk-NP4G4ZKM.js +1228 -0
  51. package/dist/chunk-NP4G4ZKM.js.map +1 -0
  52. package/dist/{chunk-UJAGXJDG.cjs → chunk-NU2XNMVI.cjs} +38 -7
  53. package/dist/chunk-NU2XNMVI.cjs.map +1 -0
  54. package/dist/{chunk-JKTO7TW3.js → chunk-O5XPCJLF.js} +25 -3
  55. package/dist/chunk-O5XPCJLF.js.map +1 -0
  56. package/dist/{chunk-2QHQ2C4P.js → chunk-OGGLFYFA.js} +36 -5
  57. package/dist/chunk-OGGLFYFA.js.map +1 -0
  58. package/dist/{chunk-SV5H3XM5.cjs → chunk-OYL2SGBD.cjs} +26 -2
  59. package/dist/chunk-OYL2SGBD.cjs.map +1 -0
  60. package/dist/chunk-SFY2PPOY.cjs +1469 -0
  61. package/dist/chunk-SFY2PPOY.cjs.map +1 -0
  62. package/dist/{chunk-JDAET5LO.js → chunk-SMLIWLS7.js} +9 -9
  63. package/dist/chunk-SMLIWLS7.js.map +1 -0
  64. package/dist/{chunk-VXRVGFCI.js → chunk-U3452TEU.js} +17 -17
  65. package/dist/{chunk-VXRVGFCI.js.map → chunk-U3452TEU.js.map} +1 -1
  66. package/dist/{chunk-MNO33ASC.cjs → chunk-UYMT4LO2.cjs} +9 -8
  67. package/dist/chunk-UYMT4LO2.cjs.map +1 -0
  68. package/dist/chunk-V7CS2MDB.cjs +289 -0
  69. package/dist/chunk-V7CS2MDB.cjs.map +1 -0
  70. package/dist/chunk-WE5BKYNB.js +287 -0
  71. package/dist/chunk-WE5BKYNB.js.map +1 -0
  72. package/dist/{chunk-L642L24T.js → chunk-XDTYTNYL.js} +14 -21
  73. package/dist/chunk-XDTYTNYL.js.map +1 -0
  74. package/dist/{chunk-N4AOJ3VF.cjs → chunk-XZL6MCZJ.cjs} +38 -7
  75. package/dist/chunk-XZL6MCZJ.cjs.map +1 -0
  76. package/dist/{chunk-DBFCKELK.cjs → chunk-ZJHQZZT2.cjs} +18 -27
  77. package/dist/chunk-ZJHQZZT2.cjs.map +1 -0
  78. package/dist/index-Dkicw49A.d.cts +373 -0
  79. package/dist/index-Dkicw49A.d.ts +373 -0
  80. package/dist/index.cjs +5915 -3377
  81. package/dist/index.cjs.map +1 -1
  82. package/dist/index.d.cts +24 -664
  83. package/dist/index.d.ts +24 -664
  84. package/dist/index.js +5781 -3223
  85. package/dist/index.js.map +1 -1
  86. package/dist/indexes/index.cjs +16 -16
  87. package/dist/indexes/index.d.cts +5 -4
  88. package/dist/indexes/index.d.ts +5 -4
  89. package/dist/indexes/index.js +2 -2
  90. package/dist/interchange/index.cjs +9 -9
  91. package/dist/interchange/index.cjs.map +1 -1
  92. package/dist/interchange/index.d.cts +5 -3
  93. package/dist/interchange/index.d.ts +5 -3
  94. package/dist/interchange/index.js +8 -8
  95. package/dist/interchange/index.js.map +1 -1
  96. package/dist/manager-Jc5Btay9.d.cts +493 -0
  97. package/dist/manager-e9LXthrx.d.ts +493 -0
  98. package/dist/profiler/index.cjs +2 -8
  99. package/dist/profiler/index.cjs.map +1 -1
  100. package/dist/profiler/index.d.cts +9 -161
  101. package/dist/profiler/index.d.ts +9 -161
  102. package/dist/profiler/index.js +4 -4
  103. package/dist/profiler/index.js.map +1 -1
  104. package/dist/schema/index.cjs +145 -0
  105. package/dist/schema/index.cjs.map +1 -0
  106. package/dist/schema/index.d.cts +237 -0
  107. package/dist/schema/index.d.ts +237 -0
  108. package/dist/schema/index.js +72 -0
  109. package/dist/schema/index.js.map +1 -0
  110. package/dist/{store-BPhjw5S8.d.ts → store-DM3Tk3Pw.d.ts} +874 -1283
  111. package/dist/{store-DNOOQEm8.d.cts → store-nbBybLWP.d.cts} +874 -1283
  112. package/dist/{test-helpers-BjyRYJZX.d.ts → test-helpers-CIq1Hhj1.d.ts} +5 -1
  113. package/dist/{test-helpers-NoQXhleQ.d.cts → test-helpers-DPRFVky4.d.cts} +5 -1
  114. package/dist/{types-D_3mEv2y.d.ts → types-BL1GyVku.d.cts} +2 -2
  115. package/dist/{types-DsRfx0yk.d.ts → types-Cdbi4hcx.d.ts} +228 -4
  116. package/dist/{types-BrSfFSpW.d.cts → types-DCGa53O2.d.ts} +2 -2
  117. package/dist/{types-aapj0GLz.d.cts → types-DDP0MGBF.d.cts} +228 -4
  118. package/dist/{types-CX4cLd7M.d.ts → types-DHRsi6j9.d.cts} +4 -3
  119. package/dist/types-DTJEu_-h.d.cts +158 -0
  120. package/dist/types-DTJEu_-h.d.ts +158 -0
  121. package/dist/{types-a5rAxC92.d.cts → types-ZT5mlism.d.ts} +4 -3
  122. package/package.json +17 -2
  123. package/dist/chunk-2FURVVAX.cjs +0 -350
  124. package/dist/chunk-2FURVVAX.cjs.map +0 -1
  125. package/dist/chunk-2QHQ2C4P.js.map +0 -1
  126. package/dist/chunk-4PIEL2VO.js.map +0 -1
  127. package/dist/chunk-DBFCKELK.cjs.map +0 -1
  128. package/dist/chunk-H7THXVH6.cjs +0 -314
  129. package/dist/chunk-H7THXVH6.cjs.map +0 -1
  130. package/dist/chunk-HXAPXPZH.cjs +0 -680
  131. package/dist/chunk-HXAPXPZH.cjs.map +0 -1
  132. package/dist/chunk-IIAT36MI.js.map +0 -1
  133. package/dist/chunk-JDAET5LO.js.map +0 -1
  134. package/dist/chunk-JKTO7TW3.js.map +0 -1
  135. package/dist/chunk-L642L24T.js.map +0 -1
  136. package/dist/chunk-MNO33ASC.cjs.map +0 -1
  137. package/dist/chunk-N4AOJ3VF.cjs.map +0 -1
  138. package/dist/chunk-QB3WBMDT.js +0 -646
  139. package/dist/chunk-QB3WBMDT.js.map +0 -1
  140. package/dist/chunk-SV5H3XM5.cjs.map +0 -1
  141. package/dist/chunk-UJAGXJDG.cjs.map +0 -1
  142. package/dist/chunk-X4EVMBON.js +0 -312
  143. package/dist/chunk-X4EVMBON.js.map +0 -1
  144. package/dist/chunk-XZL4NLV6.js +0 -348
  145. package/dist/chunk-XZL4NLV6.js.map +0 -1
  146. package/dist/chunk-YM5AL65Y.cjs.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/ontology/types.ts","../src/errors/validation.ts","../src/schema/migration.ts","../src/ontology/closures.ts","../src/ontology/constants.ts","../src/registry/kind-registry.ts","../src/schema/serializer.ts","../src/schema/manager.ts"],"names":["ValidationError","index","nowIso","getNodeKinds","getEdgeKinds","z","MigrationError"],"mappings":";;;;;;;AAOO,IAAM,eAAA,GAAkB;AA+ExB,SAAS,WAAW,KAAA,EAAmC;AAC5D,EAAA,OACE,OAAO,UAAU,QAAA,IACjB,KAAA,KAAU,QACV,eAAA,IAAmB,KAAA,IAClB,KAAA,CAAkC,eAAe,CAAA,KAAM,IAAA;AAE5D;AAKO,SAAS,YAAY,SAAA,EAAiD;AAC3E,EAAA,IAAI,OAAO,cAAc,QAAA,EAAU;AACjC,IAAA,OAAO,SAAA;AAAA,EACT;AACA,EAAA,OAAO,SAAA,CAAU,IAAA;AACnB;;;AC3DA,SAAS,4BAA4B,KAAA,EAAoC;AACvE,EAAA,OAAO,KAAA,CAAM,MAAA,CAAO,GAAA,CAAI,CAAC,KAAA,MAAW;AAAA,IAClC,IAAA,EAAM,KAAA,CAAM,IAAA,CAAK,IAAA,CAAK,GAAG,CAAA;AAAA,IACzB,SAAS,KAAA,CAAM,OAAA;AAAA,IACf,MAAM,KAAA,CAAM;AAAA,GACd,CAAE,CAAA;AACJ;AAKA,SAAS,oBAAoB,OAAA,EAAoC;AAC/D,EAAA,IAAI,QAAQ,EAAA,EAAI;AACd,IAAA,OAAO,CAAA,EAAG,OAAA,CAAQ,IAAI,CAAA,CAAA,EAAI,QAAQ,EAAE,CAAA,CAAA;AAAA,EACtC;AACA,EAAA,OAAO,CAAA,IAAA,EAAO,QAAQ,IAAI,CAAA,CAAA;AAC5B;AAoBO,SAAS,aAAA,CACd,MAAA,EACA,KAAA,EACA,OAAA,EACG;AACH,EAAA,MAAM,MAAA,GAAS,MAAA,CAAO,SAAA,CAAU,KAAK,CAAA;AAErC,EAAA,IAAI,OAAO,OAAA,EAAS;AAClB,IAAA,OAAO,MAAA,CAAO,IAAA;AAAA,EAChB;AAEA,EAAA,MAAM,MAAA,GAAS,2BAAA,CAA4B,MAAA,CAAO,KAAK,CAAA;AACvD,EAAA,MAAM,QAAA,GAAW,oBAAoB,OAAO,CAAA;AAE5C,EAAA,MAAM,IAAIA,iCAAA;AAAA,IACR,CAAA,QAAA,EAAW,QAAQ,UAAU,CAAA,WAAA,EAAc,QAAQ,CAAA,EAAA,EAAK,MAAA,CAAO,MAAM,OAAO,CAAA,CAAA;AAAA,IAC5E;AAAA,MACE,YAAY,OAAA,CAAQ,UAAA;AAAA,MACpB,MAAM,OAAA,CAAQ,IAAA;AAAA,MACd,WAAW,OAAA,CAAQ,SAAA;AAAA,MACnB,GAAI,OAAA,CAAQ,EAAA,KAAO,UAAa,EAAE,EAAA,EAAI,QAAQ,EAAA,EAAG;AAAA,MACjD;AAAA,KACF;AAAA,IACA,EAAE,KAAA,EAAO,MAAA,CAAO,KAAA;AAAM,GACxB;AACF;AAeO,SAAS,iBAAA,CACd,MAAA,EACA,KAAA,EACA,OAAA,EAKG;AACH,EAAA,OAAO,aAAA,CAAc,QAAQ,KAAA,EAAO;AAAA,IAClC,UAAA,EAAY,MAAA;AAAA,IACZ,GAAG;AAAA,GACJ,CAAA;AACH;AAeO,SAAS,iBAAA,CACd,MAAA,EACA,KAAA,EACA,OAAA,EAKG;AACH,EAAA,OAAO,aAAA,CAAc,QAAQ,KAAA,EAAO;AAAA,IAClC,UAAA,EAAY,MAAA;AAAA,IACZ,GAAG;AAAA,GACJ,CAAA;AACH;AAwBO,SAAS,YAAA,CACd,OACA,OAAA,EACiB;AACjB,EAAA,MAAM,MAAA,GAAS,4BAA4B,KAAK,CAAA;AAChD,EAAA,MAAM,QAAA,GAAW,oBAAoB,OAAO,CAAA;AAE5C,EAAA,OAAO,IAAIA,iCAAA;AAAA,IACT,yBAAyB,OAAA,CAAQ,UAAU,IAAI,QAAQ,CAAA,EAAA,EAAK,MAAM,OAAO,CAAA,CAAA;AAAA,IACzE;AAAA,MACE,YAAY,OAAA,CAAQ,UAAA;AAAA,MACpB,MAAM,OAAA,CAAQ,IAAA;AAAA,MACd,WAAW,OAAA,CAAQ,SAAA;AAAA,MACnB,GAAI,OAAA,CAAQ,EAAA,KAAO,UAAa,EAAE,EAAA,EAAI,QAAQ,EAAA,EAAG;AAAA,MACjD;AAAA,KACF;AAAA,IACA,EAAE,OAAO,KAAA;AAAM,GACjB;AACF;AAkBO,SAAS,qBAAA,CACd,OAAA,EACA,MAAA,EACA,OAAA,EACiB;AACjB,EAAA,OAAO,IAAIA,kCAAgB,OAAA,EAAS;AAAA,IAClC,GAAI,OAAA,EAAS,UAAA,KAAe,MAAA,IAAa;AAAA,MACvC,YAAY,OAAA,CAAQ;AAAA,KACtB;AAAA,IACA,GAAI,OAAA,EAAS,IAAA,KAAS,UAAa,EAAE,IAAA,EAAM,QAAQ,IAAA,EAAK;AAAA,IACxD,GAAI,OAAA,EAAS,SAAA,KAAc,UAAa,EAAE,SAAA,EAAW,QAAQ,SAAA,EAAU;AAAA,IACvE,GAAI,OAAA,EAAS,EAAA,KAAO,UAAa,EAAE,EAAA,EAAI,QAAQ,EAAA,EAAG;AAAA,IAClD;AAAA,GACD,CAAA;AACH;;;AClHO,SAAS,iBAAA,CACd,QACA,KAAA,EACY;AACZ,EAAA,MAAM,WAAA,GAAc,SAAA,CAAU,MAAA,CAAO,KAAA,EAAO,MAAM,KAAK,CAAA;AACvD,EAAA,MAAM,WAAA,GAAc,SAAA,CAAU,MAAA,CAAO,KAAA,EAAO,MAAM,KAAK,CAAA;AACvD,EAAA,MAAM,eAAA,GAAkB,YAAA,CAAa,MAAA,CAAO,QAAA,EAAU,MAAM,QAAQ,CAAA;AAEpE,EAAA,MAAM,aAAa,CAAC,GAAG,aAAa,GAAG,WAAA,EAAa,GAAG,eAAe,CAAA;AACtE,EAAA,MAAM,qBAAqB,UAAA,CAAW,IAAA;AAAA,IACpC,CAAC,MAAA,KAAW,MAAA,CAAO,QAAA,KAAa;AAAA,GAClC;AACA,EAAA,MAAM,UAAA,GAAa,WAAW,MAAA,GAAS,CAAA;AAEvC,EAAA,MAAM,OAAA,GAAU,eAAA,CAAgB,WAAA,EAAa,WAAA,EAAa,eAAe,CAAA;AAEzE,EAAA,OAAO;AAAA,IACL,aAAa,MAAA,CAAO,OAAA;AAAA,IACpB,WAAW,KAAA,CAAM,OAAA;AAAA,IACjB,KAAA,EAAO,WAAA;AAAA,IACP,KAAA,EAAO,WAAA;AAAA,IACP,QAAA,EAAU,eAAA;AAAA,IACV,kBAAA;AAAA,IACA,uBAAuB,CAAC,kBAAA;AAAA,IACxB,UAAA;AAAA,IACA;AAAA,GACF;AACF;AASA,SAAS,SAAA,CACP,QACA,KAAA,EACuB;AACvB,EAAA,MAAM,UAAwB,EAAC;AAC/B,EAAA,MAAM,cAAc,IAAI,GAAA,CAAI,MAAA,CAAO,IAAA,CAAK,MAAM,CAAC,CAAA;AAC/C,EAAA,MAAM,aAAa,IAAI,GAAA,CAAI,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAG7C,EAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,IAAA,IAAI,CAAC,UAAA,CAAW,GAAA,CAAI,IAAI,CAAA,EAAG;AACzB,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,SAAA;AAAA,QACN,IAAA,EAAM,IAAA;AAAA,QACN,QAAA,EAAU,UAAA;AAAA,QACV,OAAA,EAAS,cAAc,IAAI,CAAA,aAAA,CAAA;AAAA,QAC3B,MAAA,EAAQ,OAAO,IAAI;AAAA,OACpB,CAAA;AAAA,IACH;AAAA,EACF;AAGA,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,IAAI,CAAC,WAAA,CAAY,GAAA,CAAI,IAAI,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,OAAA;AAAA,QACN,IAAA,EAAM,IAAA;AAAA,QACN,QAAA,EAAU,MAAA;AAAA,QACV,OAAA,EAAS,cAAc,IAAI,CAAA,WAAA,CAAA;AAAA,QAC3B,KAAA,EAAO,MAAM,IAAI;AAAA,OAClB,CAAA;AAAA,IACH;AAAA,EACF;AAGA,EAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,IAAA,IAAI,UAAA,CAAW,GAAA,CAAI,IAAI,CAAA,EAAG;AACxB,MAAA,MAAM,UAAA,GAAa,OAAO,IAAI,CAAA;AAC9B,MAAA,MAAM,SAAA,GAAY,MAAM,IAAI,CAAA;AAC5B,MAAA,MAAM,WAAA,GAAc,WAAA,CAAY,IAAA,EAAM,UAAA,EAAY,SAAS,CAAA;AAC3D,MAAA,OAAA,CAAQ,IAAA,CAAK,GAAG,WAAW,CAAA;AAAA,IAC7B;AAAA,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AAKA,SAAS,WAAA,CACP,IAAA,EACA,MAAA,EACA,KAAA,EACuB;AACvB,EAAA,MAAM,UAAwB,EAAC;AAG/B,EAAA,MAAM,WAAA,GAAc,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,UAAU,CAAA;AACpD,EAAA,MAAM,UAAA,GAAa,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,UAAU,CAAA;AAClD,EAAA,IAAI,gBAAgB,UAAA,EAAY;AAE9B,IAAA,MAAM,WAAA,GAAc,MAAA,CAAO,UAAA,CAAW,UAAA,IAAc,EAAC;AACrD,IAAA,MAAM,UAAA,GAAa,KAAA,CAAM,UAAA,CAAW,UAAA,IAAc,EAAC;AACnD,IAAA,MAAM,cAAA,GAAiB,IAAI,GAAA,CAAI,MAAA,CAAO,WAAW,QAAQ,CAAA;AACzD,IAAA,MAAM,aAAA,GAAgB,IAAI,GAAA,CAAI,KAAA,CAAM,WAAW,QAAQ,CAAA;AAEvD,IAAA,MAAM,UAAA,GAAa,MAAA,CAAO,IAAA,CAAK,UAAU,CAAA,CAAE,MAAA;AAAA,MACzC,CAAC,CAAA,KAAM,EAAE,CAAA,IAAK,WAAA;AAAA,KAChB;AACA,IAAA,MAAM,YAAA,GAAe,MAAA,CAAO,IAAA,CAAK,WAAW,CAAA,CAAE,MAAA;AAAA,MAC5C,CAAC,CAAA,KAAM,EAAE,CAAA,IAAK,UAAA;AAAA,KAChB;AACA,IAAA,MAAM,WAAA,GAAc,CAAC,GAAG,aAAa,CAAA,CAAE,MAAA;AAAA,MACrC,CAAC,CAAA,KAAM,CAAC,cAAA,CAAe,IAAI,CAAC;AAAA,KAC9B;AAEA,IAAA,MAAM,EAAE,QAAA,EAAU,OAAA,EAAQ,GAAI,6BAAA;AAAA,MAC5B,IAAA;AAAA,MACA,YAAA;AAAA,MACA,UAAA;AAAA,MACA;AAAA,KACF;AAEA,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA;AAAA,MACA,OAAA;AAAA,MACA,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAGA,EAAA,IAAI,MAAA,CAAO,QAAA,KAAa,KAAA,CAAM,QAAA,EAAU;AACtC,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA,EAAU,SAAA;AAAA,MACV,OAAA,EAAS,0BAA0B,MAAA,CAAO,QAAQ,SAAS,KAAA,CAAM,QAAQ,UAAU,IAAI,CAAA,CAAA,CAAA;AAAA,MACvF,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAGA,EAAA,MAAM,iBAAA,GAAoB,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,iBAAiB,CAAA;AACjE,EAAA,MAAM,gBAAA,GAAmB,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,iBAAiB,CAAA;AAC/D,EAAA,IAAI,sBAAsB,gBAAA,EAAkB;AAC1C,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA,EAAU,SAAA;AAAA,MACV,OAAA,EAAS,mCAAmC,IAAI,CAAA,CAAA,CAAA;AAAA,MAChD,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAEA,EAAA,OAAO,OAAA;AACT;AAKA,SAAS,6BAAA,CACP,IAAA,EACA,YAAA,EACA,UAAA,EACA,WAAA,EAC+C;AAC/C,EAAA,IAAI,YAAA,CAAa,SAAS,CAAA,EAAG;AAC3B,IAAA,OAAO;AAAA,MACL,QAAA,EAAU,UAAA;AAAA,MACV,SAAS,CAAA,yBAAA,EAA4B,IAAI,MAAM,YAAA,CAAa,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,KACxE;AAAA,EACF;AACA,EAAA,IAAI,WAAA,CAAY,SAAS,CAAA,EAAG;AAC1B,IAAA,OAAO;AAAA,MACL,QAAA,EAAU,UAAA;AAAA,MACV,SAAS,CAAA,4BAAA,EAA+B,IAAI,MAAM,WAAA,CAAY,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,KAC1E;AAAA,EACF;AACA,EAAA,IAAI,UAAA,CAAW,SAAS,CAAA,EAAG;AACzB,IAAA,OAAO;AAAA,MACL,QAAA,EAAU,MAAA;AAAA,MACV,SAAS,CAAA,qBAAA,EAAwB,IAAI,MAAM,UAAA,CAAW,IAAA,CAAK,IAAI,CAAC,CAAA;AAAA,KAClE;AAAA,EACF;AACA,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,MAAA;AAAA,IACV,OAAA,EAAS,0BAA0B,IAAI,CAAA,CAAA;AAAA,GACzC;AACF;AASA,SAAS,SAAA,CACP,QACA,KAAA,EACuB;AACvB,EAAA,MAAM,UAAwB,EAAC;AAC/B,EAAA,MAAM,cAAc,IAAI,GAAA,CAAI,MAAA,CAAO,IAAA,CAAK,MAAM,CAAC,CAAA;AAC/C,EAAA,MAAM,aAAa,IAAI,GAAA,CAAI,MAAA,CAAO,IAAA,CAAK,KAAK,CAAC,CAAA;AAG7C,EAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,IAAA,IAAI,CAAC,UAAA,CAAW,GAAA,CAAI,IAAI,CAAA,EAAG;AACzB,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,SAAA;AAAA,QACN,IAAA,EAAM,IAAA;AAAA,QACN,QAAA,EAAU,UAAA;AAAA,QACV,OAAA,EAAS,cAAc,IAAI,CAAA,aAAA,CAAA;AAAA,QAC3B,MAAA,EAAQ,OAAO,IAAI;AAAA,OACpB,CAAA;AAAA,IACH;AAAA,EACF;AAGA,EAAA,KAAA,MAAW,QAAQ,UAAA,EAAY;AAC7B,IAAA,IAAI,CAAC,WAAA,CAAY,GAAA,CAAI,IAAI,CAAA,EAAG;AAC1B,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,OAAA;AAAA,QACN,IAAA,EAAM,IAAA;AAAA,QACN,QAAA,EAAU,MAAA;AAAA,QACV,OAAA,EAAS,cAAc,IAAI,CAAA,WAAA,CAAA;AAAA,QAC3B,KAAA,EAAO,MAAM,IAAI;AAAA,OAClB,CAAA;AAAA,IACH;AAAA,EACF;AAGA,EAAA,KAAA,MAAW,QAAQ,WAAA,EAAa;AAC9B,IAAA,IAAI,UAAA,CAAW,GAAA,CAAI,IAAI,CAAA,EAAG;AACxB,MAAA,MAAM,UAAA,GAAa,OAAO,IAAI,CAAA;AAC9B,MAAA,MAAM,SAAA,GAAY,MAAM,IAAI,CAAA;AAC5B,MAAA,MAAM,WAAA,GAAc,WAAA,CAAY,IAAA,EAAM,UAAA,EAAY,SAAS,CAAA;AAC3D,MAAA,OAAA,CAAQ,IAAA,CAAK,GAAG,WAAW,CAAA;AAAA,IAC7B;AAAA,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AAKA,SAAS,WAAA,CACP,IAAA,EACA,MAAA,EACA,KAAA,EACuB;AACvB,EAAA,MAAM,UAAwB,EAAC;AAG/B,EAAA,MAAM,UAAA,GAAa,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,SAAS,CAAA;AAClD,EAAA,MAAM,SAAA,GAAY,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,SAAS,CAAA;AAChD,EAAA,IAAI,eAAe,SAAA,EAAW;AAC5B,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA,EAAU,SAAA;AAAA,MACV,OAAA,EAAS,0BAA0B,IAAI,CAAA,CAAA,CAAA;AAAA,MACvC,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAEA,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,OAAO,CAAA;AAC9C,EAAA,MAAM,OAAA,GAAU,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,OAAO,CAAA;AAC5C,EAAA,IAAI,aAAa,OAAA,EAAS;AACxB,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA,EAAU,SAAA;AAAA,MACV,OAAA,EAAS,wBAAwB,IAAI,CAAA,CAAA,CAAA;AAAA,MACrC,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAGA,EAAA,IAAI,MAAA,CAAO,WAAA,KAAgB,KAAA,CAAM,WAAA,EAAa;AAC5C,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA,EAAU,SAAA;AAAA,MACV,OAAA,EAAS,6BAA6B,MAAA,CAAO,WAAW,SAAS,KAAA,CAAM,WAAW,UAAU,IAAI,CAAA,CAAA,CAAA;AAAA,MAChG,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAGA,EAAA,MAAM,WAAA,GAAc,IAAA,CAAK,SAAA,CAAU,MAAA,CAAO,UAAU,CAAA;AACpD,EAAA,MAAM,UAAA,GAAa,IAAA,CAAK,SAAA,CAAU,KAAA,CAAM,UAAU,CAAA;AAClD,EAAA,IAAI,gBAAgB,UAAA,EAAY;AAC9B,IAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,MACX,IAAA,EAAM,UAAA;AAAA,MACN,IAAA,EAAM,IAAA;AAAA,MACN,QAAA,EAAU,MAAA;AAAA,MACV,OAAA,EAAS,2BAA2B,IAAI,CAAA,CAAA,CAAA;AAAA,MACxC,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAEA,EAAA,OAAO,OAAA;AACT;AASA,SAAS,YAAA,CACP,QACA,KAAA,EAC2B;AAC3B,EAAA,MAAM,UAA4B,EAAC;AAGnC,EAAA,MAAM,kBAAkB,IAAI,GAAA,CAAI,OAAO,IAAA,CAAK,MAAA,CAAO,SAAS,CAAC,CAAA;AAC7D,EAAA,MAAM,iBAAiB,IAAI,GAAA,CAAI,OAAO,IAAA,CAAK,KAAA,CAAM,SAAS,CAAC,CAAA;AAE3D,EAAA,KAAA,MAAW,QAAQ,eAAA,EAAiB;AAClC,IAAA,IAAI,CAAC,cAAA,CAAe,GAAA,CAAI,IAAI,CAAA,EAAG;AAC7B,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,SAAA;AAAA,QACN,MAAA,EAAQ,UAAA;AAAA,QACR,IAAA;AAAA,QACA,QAAA,EAAU,UAAA;AAAA,QACV,OAAA,EAAS,cAAc,IAAI,CAAA,aAAA;AAAA,OAC5B,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,KAAA,MAAW,QAAQ,cAAA,EAAgB;AACjC,IAAA,IAAI,CAAC,eAAA,CAAgB,GAAA,CAAI,IAAI,CAAA,EAAG;AAC9B,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,OAAA;AAAA,QACN,MAAA,EAAQ,UAAA;AAAA,QACR,IAAA;AAAA,QACA,QAAA,EAAU,MAAA;AAAA,QACV,OAAA,EAAS,cAAc,IAAI,CAAA,WAAA;AAAA,OAC5B,CAAA;AAAA,IACH;AAAA,EACF;AAGA,EAAA,MAAM,kBAAkB,IAAI,GAAA;AAAA,IAC1B,MAAA,CAAO,SAAA,CAAU,GAAA,CAAI,CAAC,MAAM,CAAA,EAAG,CAAA,CAAE,QAAQ,CAAA,CAAA,EAAI,CAAA,CAAE,IAAI,CAAA,CAAA,EAAI,CAAA,CAAE,EAAE,CAAA,CAAE;AAAA,GAC/D;AACA,EAAA,MAAM,iBAAiB,IAAI,GAAA;AAAA,IACzB,KAAA,CAAM,SAAA,CAAU,GAAA,CAAI,CAAC,MAAM,CAAA,EAAG,CAAA,CAAE,QAAQ,CAAA,CAAA,EAAI,CAAA,CAAE,IAAI,CAAA,CAAA,EAAI,CAAA,CAAE,EAAE,CAAA,CAAE;AAAA,GAC9D;AAEA,EAAA,KAAA,MAAW,eAAe,eAAA,EAAiB;AACzC,IAAA,IAAI,CAAC,cAAA,CAAe,GAAA,CAAI,WAAW,CAAA,EAAG;AACpC,MAAA,MAAM,CAAC,QAAA,EAAU,IAAA,EAAM,EAAE,CAAA,GAAI,WAAA,CAAY,MAAM,GAAG,CAAA;AAClD,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,SAAA;AAAA,QACN,MAAA,EAAQ,UAAA;AAAA,QACR,IAAA,EAAM,WAAA;AAAA,QACN,QAAA,EAAU,SAAA;AAAA,QACV,SAAS,CAAA,SAAA,EAAY,QAAQ,CAAA,CAAA,EAAI,IAAI,KAAK,EAAE,CAAA,aAAA;AAAA,OAC7C,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,KAAA,MAAW,eAAe,cAAA,EAAgB;AACxC,IAAA,IAAI,CAAC,eAAA,CAAgB,GAAA,CAAI,WAAW,CAAA,EAAG;AACrC,MAAA,MAAM,CAAC,QAAA,EAAU,IAAA,EAAM,EAAE,CAAA,GAAI,WAAA,CAAY,MAAM,GAAG,CAAA;AAClD,MAAA,OAAA,CAAQ,IAAA,CAAK;AAAA,QACX,IAAA,EAAM,OAAA;AAAA,QACN,MAAA,EAAQ,UAAA;AAAA,QACR,IAAA,EAAM,WAAA;AAAA,QACN,QAAA,EAAU,MAAA;AAAA,QACV,SAAS,CAAA,SAAA,EAAY,QAAQ,CAAA,CAAA,EAAI,IAAI,KAAK,EAAE,CAAA,WAAA;AAAA,OAC7C,CAAA;AAAA,IACH;AAAA,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AASA,SAAS,eAAA,CACP,WAAA,EACA,WAAA,EACA,eAAA,EACQ;AACR,EAAA,MAAM,QAAkB,EAAC;AAEzB,EAAA,MAAM,SAAA,GAAY,YAAY,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,OAAO,CAAA,CAAE,MAAA;AAChE,EAAA,MAAM,WAAA,GAAc,YAAY,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,SAAS,CAAA,CAAE,MAAA;AACpE,EAAA,MAAM,YAAA,GAAe,YAAY,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,UAAU,CAAA,CAAE,MAAA;AAEtE,EAAA,IAAI,SAAA,GAAY,CAAA,IAAK,WAAA,GAAc,CAAA,IAAK,eAAe,CAAA,EAAG;AACxD,IAAA,KAAA,CAAM,IAAA;AAAA,MACJ,CAAA,OAAA,EAAU,SAAS,CAAA,QAAA,EAAW,WAAW,aAAa,YAAY,CAAA,SAAA;AAAA,KACpE;AAAA,EACF;AAEA,EAAA,MAAM,SAAA,GAAY,YAAY,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,OAAO,CAAA,CAAE,MAAA;AAChE,EAAA,MAAM,WAAA,GAAc,YAAY,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,SAAS,CAAA,CAAE,MAAA;AACpE,EAAA,MAAM,YAAA,GAAe,YAAY,MAAA,CAAO,CAAC,MAAM,CAAA,CAAE,IAAA,KAAS,UAAU,CAAA,CAAE,MAAA;AAEtE,EAAA,IAAI,SAAA,GAAY,CAAA,IAAK,WAAA,GAAc,CAAA,IAAK,eAAe,CAAA,EAAG;AACxD,IAAA,KAAA,CAAM,IAAA;AAAA,MACJ,CAAA,OAAA,EAAU,SAAS,CAAA,QAAA,EAAW,WAAW,aAAa,YAAY,CAAA,SAAA;AAAA,KACpE;AAAA,EACF;AAEA,EAAA,MAAM,gBAAgB,eAAA,CAAgB,MAAA;AAAA,IACpC,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,KAAS;AAAA,GACpB,CAAE,MAAA;AACF,EAAA,MAAM,kBAAkB,eAAA,CAAgB,MAAA;AAAA,IACtC,CAAC,CAAA,KAAM,CAAA,CAAE,IAAA,KAAS;AAAA,GACpB,CAAE,MAAA;AAEF,EAAA,IAAI,aAAA,GAAgB,CAAA,IAAK,eAAA,GAAkB,CAAA,EAAG;AAC5C,IAAA,KAAA,CAAM,IAAA,CAAK,CAAA,UAAA,EAAa,aAAa,CAAA,QAAA,EAAW,eAAe,CAAA,QAAA,CAAU,CAAA;AAAA,EAC3E;AAEA,EAAA,IAAI,KAAA,CAAM,WAAW,CAAA,EAAG;AACtB,IAAA,OAAO,YAAA;AAAA,EACT;AAEA,EAAA,OAAO,KAAA,CAAM,KAAK,IAAI,CAAA;AACxB;AAcO,SAAS,sBAAsB,IAAA,EAA2B;AAC/D,EAAA,OAAO,CAAC,IAAA,CAAK,kBAAA;AACf;AAKO,SAAS,oBAAoB,IAAA,EAAqC;AACvE,EAAA,MAAM,UAAoB,EAAC;AAE3B,EAAA,KAAA,MAAW,MAAA,IAAU,KAAK,KAAA,EAAO;AAC/B,IAAA,IAAI,MAAA,CAAO,SAAS,SAAA,EAAW;AAC7B,MAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,mCAAA,EAAsC,MAAA,CAAO,IAAI,CAAA,CAAA,CAAG,CAAA;AAAA,IACnE;AACA,IAAA,IAAI,MAAA,CAAO,QAAA,KAAa,UAAA,IAAc,MAAA,CAAO,SAAS,UAAA,EAAY;AAChE,MAAA,OAAA,CAAQ,IAAA;AAAA,QACN,CAAA,4BAAA,EAA+B,MAAA,CAAO,IAAI,CAAA,GAAA,EAAM,OAAO,OAAO,CAAA;AAAA,OAChE;AAAA,IACF;AAAA,EACF;AAEA,EAAA,KAAA,MAAW,MAAA,IAAU,KAAK,KAAA,EAAO;AAC/B,IAAA,IAAI,MAAA,CAAO,SAAS,SAAA,EAAW;AAC7B,MAAA,OAAA,CAAQ,IAAA,CAAK,CAAA,mCAAA,EAAsC,MAAA,CAAO,IAAI,CAAA,CAAA,CAAG,CAAA;AAAA,IACnE;AAAA,EACF;AAEA,EAAA,OAAO,OAAA;AACT;;;AC7kBO,SAAS,yBACd,SAAA,EAC0C;AAE1C,EAAA,MAAM,OAAA,uBAAc,GAAA,EAAyB;AAG7C,EAAA,MAAM,QAAA,uBAAe,GAAA,EAAY;AACjC,EAAA,KAAA,MAAW,CAAC,IAAA,EAAM,EAAE,CAAA,IAAK,SAAA,EAAW;AAClC,IAAA,QAAA,CAAS,IAAI,IAAI,CAAA;AACjB,IAAA,QAAA,CAAS,IAAI,EAAE,CAAA;AAAA,EACjB;AAGA,EAAA,KAAA,MAAW,QAAQ,QAAA,EAAU;AAC3B,IAAA,OAAA,CAAQ,GAAA,CAAI,IAAA,kBAAM,IAAI,GAAA,EAAK,CAAA;AAAA,EAC7B;AAGA,EAAA,KAAA,MAAW,CAAC,IAAA,EAAM,EAAE,CAAA,IAAK,SAAA,EAAW;AAClC,IAAA,OAAA,CAAQ,GAAA,CAAI,IAAI,CAAA,EAAG,GAAA,CAAI,EAAE,CAAA;AAAA,EAC3B;AAIA,EAAA,KAAA,MAAW,KAAK,QAAA,EAAU;AACxB,IAAA,KAAA,MAAW,SAAS,QAAA,EAAU;AAC5B,MAAA,MAAM,YAAA,GAAe,OAAA,CAAQ,GAAA,CAAI,KAAK,CAAA;AACtC,MAAA,IAAI,CAAC,YAAA,EAAc,GAAA,CAAI,CAAC,CAAA,EAAG;AAE3B,MAAA,MAAM,QAAA,GAAW,OAAA,CAAQ,GAAA,CAAI,CAAC,CAAA;AAC9B,MAAA,IAAI,CAAC,QAAA,EAAU;AAEf,MAAA,KAAA,MAAWC,UAAS,QAAA,EAAU;AAC5B,QAAA,YAAA,CAAa,IAAIA,MAAK,CAAA;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,OAAA;AACT;AAOO,SAAS,cACd,OAAA,EAC0C;AAC1C,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAyB;AAE5C,EAAA,KAAA,MAAW,CAAC,IAAA,EAAM,GAAG,CAAA,IAAK,OAAA,EAAS;AACjC,IAAA,KAAA,MAAW,MAAM,GAAA,EAAK;AACpB,MAAA,MAAM,WAAW,MAAA,CAAO,GAAA,CAAI,EAAE,CAAA,wBAAS,GAAA,EAAI;AAC3C,MAAA,QAAA,CAAS,IAAI,IAAI,CAAA;AACjB,MAAA,MAAA,CAAO,GAAA,CAAI,IAAI,QAAQ,CAAA;AAAA,IACzB;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKO,SAAS,WAAA,CACd,OAAA,EACA,MAAA,EACA,MAAA,EACS;AACT,EAAA,OAAO,QAAQ,GAAA,CAAI,MAAM,CAAA,EAAG,GAAA,CAAI,MAAM,CAAA,IAAK,KAAA;AAC7C;;;AC1EO,IAAM,sBAAA,GAAyB;AAG/B,IAAM,iBAAA,GAAoB;AAG1B,IAAM,kBAAA,GAAqB;AAG3B,IAAM,oBAAA,GAAuB;AAG7B,IAAM,uBAAA,GAA0B;AAGhC,IAAM,iBAAA,GAAoB;AAG1B,IAAM,wBAAA,GAA2B;AAGjC,IAAM,uBAAA,GAA0B;AAGhC,IAAM,iBAAA,GAAoB;AAG1B,IAAM,kBAAA,GAAqB;AAG3B,IAAM,oBAAA,GAAuB;AAG7B,IAAM,iBAAA,GAAoB;;;ACpB1B,IAAM,eAAN,MAAmB;AAAA;AAAA,EAEf,SAAA;AAAA,EACA,SAAA;AAAA;AAAA;AAAA,EAIA,iBAAA;AAAA,EACA,mBAAA;AAAA;AAAA;AAAA,EAIA,cAAA;AAAA,EACA,eAAA;AAAA;AAAA,EAGA,eAAA;AAAA,EACA,SAAA;AAAA;AAAA,EAGA,aAAA;AAAA;AAAA;AAAA,EAGA,aAAA;AAAA,EACA,cAAA;AAAA;AAAA,EAGA,YAAA;AAAA,EACA,uBAAA;AAAA,EACA,mBAAA;AAAA,EAET,WAAA,CACE,SAAA,EACA,SAAA,EACA,QAAA,EAcA;AACA,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,SAAA,GAAY,SAAA;AACjB,IAAA,IAAA,CAAK,oBAAoB,QAAA,CAAS,iBAAA;AAClC,IAAA,IAAA,CAAK,sBAAsB,QAAA,CAAS,mBAAA;AACpC,IAAA,IAAA,CAAK,iBAAiB,QAAA,CAAS,cAAA;AAC/B,IAAA,IAAA,CAAK,kBAAkB,QAAA,CAAS,eAAA;AAChC,IAAA,IAAA,CAAK,kBAAkB,QAAA,CAAS,eAAA;AAChC,IAAA,IAAA,CAAK,YAAY,QAAA,CAAS,SAAA;AAC1B,IAAA,IAAA,CAAK,gBAAgB,QAAA,CAAS,aAAA;AAC9B,IAAA,IAAA,CAAK,gBAAgB,QAAA,CAAS,aAAA;AAC9B,IAAA,IAAA,CAAK,iBAAiB,QAAA,CAAS,cAAA;AAC/B,IAAA,IAAA,CAAK,eAAe,QAAA,CAAS,YAAA;AAC7B,IAAA,IAAA,CAAK,0BAA0B,QAAA,CAAS,uBAAA;AACxC,IAAA,IAAA,CAAK,sBAAsB,QAAA,CAAS,mBAAA;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,YAAA,CAAa,OAAe,MAAA,EAAyB;AACnD,IAAA,OAAO,WAAA,CAAY,IAAA,CAAK,iBAAA,EAAmB,KAAA,EAAO,MAAM,CAAA;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,iBAAiB,IAAA,EAAiC;AAChD,IAAA,MAAM,cAAc,IAAA,CAAK,mBAAA,CAAoB,IAAI,IAAI,CAAA,wBAAS,GAAA,EAAI;AAClE,IAAA,OAAO,CAAC,IAAA,EAAM,GAAG,WAAW,CAAA;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,IAAA,EAAmC;AAC9C,IAAA,OAAO,KAAK,iBAAA,CAAkB,GAAA,CAAI,IAAI,CAAA,wBAAS,GAAA,EAAI;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,IAAA,EAAmC;AAChD,IAAA,OAAO,KAAK,mBAAA,CAAoB,GAAA,CAAI,IAAI,CAAA,wBAAS,GAAA,EAAI;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAA,CAAe,iBAAyB,cAAA,EAAiC;AACvE,IAAA,OAAO,WAAA,CAAY,IAAA,CAAK,cAAA,EAAgB,eAAA,EAAiB,cAAc,CAAA;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKA,aAAA,CAAc,gBAAwB,eAAA,EAAkC;AACtE,IAAA,OAAO,WAAA,CAAY,IAAA,CAAK,eAAA,EAAiB,cAAA,EAAgB,eAAe,CAAA;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,IAAA,EAAiC;AAC9C,IAAA,MAAM,WAAW,IAAA,CAAK,eAAA,CAAgB,IAAI,IAAI,CAAA,wBAAS,GAAA,EAAI;AAC3D,IAAA,OAAO,CAAC,IAAA,EAAM,GAAG,QAAQ,CAAA;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,IAAA,EAAiC;AAC7C,IAAA,MAAM,UAAU,IAAA,CAAK,cAAA,CAAe,IAAI,IAAI,CAAA,wBAAS,GAAA,EAAI;AACzD,IAAA,OAAO,CAAC,IAAA,EAAM,GAAG,OAAO,CAAA;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,aAAA,CAAc,GAAW,CAAA,EAAoB;AAC3C,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,eAAA,CAAgB,GAAA,CAAI,CAAC,CAAA;AAC9C,IAAA,OAAO,WAAA,EAAa,GAAA,CAAI,CAAC,CAAA,IAAK,KAAA;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,IAAA,EAAiC;AAC9C,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,eAAA,CAAgB,GAAA,CAAI,IAAI,CAAA;AACjD,IAAA,OAAO,WAAA,GAAc,CAAC,GAAG,WAAW,IAAI,EAAC;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,GAAA,EAAiC;AAC1C,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,GAAG,CAAA;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,WAAA,CAAY,GAAW,CAAA,EAAoB;AACzC,IAAA,MAAM,cAAA,GAAiB,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAC,CAAA,CAAA,EAAI,CAAC,CAAA,CAAA,GAAK,CAAA,EAAG,CAAC,CAAA,CAAA,EAAI,CAAC,CAAA,CAAA;AACtD,IAAA,OAAO,IAAA,CAAK,aAAA,CAAc,GAAA,CAAI,cAAc,CAAA;AAAA,EAC9C;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,IAAA,EAAiC;AAChD,IAAA,MAAM,SAAmB,EAAC;AAC1B,IAAA,KAAA,MAAW,IAAA,IAAQ,KAAK,aAAA,EAAe;AACrC,MAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA;AAC5B,MAAA,MAAM,CAAA,GAAI,MAAM,CAAC,CAAA;AACjB,MAAA,MAAM,CAAA,GAAI,MAAM,CAAC,CAAA;AACjB,MAAA,IAAI,CAAA,KAAM,IAAA,EAAM,MAAA,CAAO,IAAA,CAAK,CAAC,CAAA;AAAA,WAAA,IACpB,CAAA,KAAM,IAAA,EAAM,MAAA,CAAO,IAAA,CAAK,CAAC,CAAA;AAAA,IACpC;AACA,IAAA,OAAO,MAAA;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAA,CAAS,MAAc,KAAA,EAAwB;AAC7C,IAAA,OAAO,WAAA,CAAY,IAAA,CAAK,aAAA,EAAe,IAAA,EAAM,KAAK,CAAA;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,IAAA,EAAiC;AACzC,IAAA,MAAM,MAAA,GAAS,IAAA,CAAK,aAAA,CAAc,GAAA,CAAI,IAAI,CAAA;AAC1C,IAAA,OAAO,MAAA,GAAS,CAAC,GAAG,MAAM,IAAI,EAAC;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,KAAA,EAAkC;AACzC,IAAA,MAAM,KAAA,GAAQ,IAAA,CAAK,cAAA,CAAe,GAAA,CAAI,KAAK,CAAA;AAC3C,IAAA,OAAO,KAAA,GAAQ,CAAC,GAAG,KAAK,IAAI,EAAC;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,eAAe,QAAA,EAAsC;AACnD,IAAA,OAAO,IAAA,CAAK,YAAA,CAAa,GAAA,CAAI,QAAQ,CAAA;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,gBAAgB,QAAA,EAAqC;AACnD,IAAA,MAAM,OAAA,GAAU,IAAA,CAAK,uBAAA,CAAwB,GAAA,CAAI,QAAQ,CAAA;AACzD,IAAA,OAAO,OAAA,GAAU,CAAC,GAAG,OAAO,IAAI,EAAC;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,iBAAiB,QAAA,EAAqC;AACpD,IAAA,MAAM,QAAA,GAAW,IAAA,CAAK,mBAAA,CAAoB,GAAA,CAAI,QAAQ,CAAA;AACtD,IAAA,OAAO,QAAA,GAAW,CAAC,GAAG,QAAQ,IAAI,EAAC;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,oBAAoB,QAAA,EAAqC;AACvD,IAAA,MAAM,WAAW,IAAA,CAAK,mBAAA,CAAoB,IAAI,QAAQ,CAAA,wBAAS,GAAA,EAAI;AACnE,IAAA,OAAO,CAAC,QAAA,EAAU,GAAG,QAAQ,CAAA;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,cAAA,CAAe,cAAsB,UAAA,EAA6B;AAChE,IAAA,IAAI,YAAA,KAAiB,YAAY,OAAO,IAAA;AACxC,IAAA,OAAO,IAAA,CAAK,YAAA,CAAa,YAAA,EAAc,UAAU,CAAA;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAA,EAAuB;AACjC,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,IAAI,CAAA;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAA,EAAuB;AACjC,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,IAAI,CAAA;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAA,EAAoC;AAC9C,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,IAAI,CAAA;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAA,EAAuC;AACjD,IAAA,OAAO,IAAA,CAAK,SAAA,CAAU,GAAA,CAAI,IAAI,CAAA;AAAA,EAChC;AACF;AAKO,SAAS,mBAAA,GAad;AACA,EAAA,OAAO;AAAA,IACL,iBAAA,sBAAuB,GAAA,EAAI;AAAA,IAC3B,mBAAA,sBAAyB,GAAA,EAAI;AAAA,IAC7B,cAAA,sBAAoB,GAAA,EAAI;AAAA,IACxB,eAAA,sBAAqB,GAAA,EAAI;AAAA,IACzB,eAAA,sBAAqB,GAAA,EAAI;AAAA,IACzB,SAAA,sBAAe,GAAA,EAAI;AAAA,IACnB,aAAA,sBAAmB,GAAA,EAAI;AAAA,IACvB,aAAA,sBAAmB,GAAA,EAAI;AAAA,IACvB,cAAA,sBAAoB,GAAA,EAAI;AAAA,IACxB,YAAA,sBAAkB,GAAA,EAAI;AAAA,IACtB,uBAAA,sBAA6B,GAAA,EAAI;AAAA,IACjC,mBAAA,sBAAyB,GAAA;AAAI,GAC/B;AACF;AAKO,SAAS,4BACd,QAAA,EAcA;AAEA,EAAA,MAAM,oBAAwC,EAAC;AAC/C,EAAA,MAAM,mBAAuC,EAAC;AAC9C,EAAA,MAAM,sBAA0C,EAAC;AACjD,EAAA,MAAM,oBAAwC,EAAC;AAC/C,EAAA,MAAM,kBAAsC,EAAC;AAC7C,EAAA,MAAM,qBAAyC,EAAC;AAChD,EAAA,MAAM,mBAAuC,EAAC;AAE9C,EAAA,KAAA,MAAW,YAAY,QAAA,EAAU;AAC/B,IAAA,MAAM,QAAA,GAAW,WAAA,CAAY,QAAA,CAAS,IAAI,CAAA;AAC1C,IAAA,MAAM,MAAA,GAAS,WAAA,CAAY,QAAA,CAAS,EAAE,CAAA;AAEtC,IAAA,QAAQ,QAAA,CAAS,SAAS,IAAA;AAAM,MAC9B,KAAK,sBAAA,EAAwB;AAC3B,QAAA,iBAAA,CAAkB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AACzC,QAAA;AAAA,MACF;AAAA,MACA,KAAK,iBAAA,EAAmB;AACtB,QAAA,gBAAA,CAAiB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AACxC,QAAA;AAAA,MACF;AAAA,MACA,KAAK,kBAAA,EAAoB;AAEvB,QAAA,gBAAA,CAAiB,IAAA,CAAK,CAAC,MAAA,EAAQ,QAAQ,CAAC,CAAA;AACxC,QAAA;AAAA,MACF;AAAA,MACA,KAAK,uBAAA;AAAA,MACL,KAAK,iBAAA,EAAmB;AACtB,QAAA,mBAAA,CAAoB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AAC3C,QAAA;AAAA,MACF;AAAA,MACA,KAAK,uBAAA,EAAyB;AAC5B,QAAA,iBAAA,CAAkB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AACzC,QAAA;AAAA,MACF;AAAA,MACA,KAAK,iBAAA,EAAmB;AACtB,QAAA,eAAA,CAAgB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AACvC,QAAA;AAAA,MACF;AAAA,MACA,KAAK,kBAAA,EAAoB;AAEvB,QAAA,eAAA,CAAgB,IAAA,CAAK,CAAC,MAAA,EAAQ,QAAQ,CAAC,CAAA;AACvC,QAAA;AAAA,MACF;AAAA,MACA,KAAK,oBAAA,EAAsB;AAEzB,QAAA,kBAAA,CAAmB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AAC1C,QAAA;AAAA,MACF;AAAA,MACA,KAAK,iBAAA,EAAmB;AACtB,QAAA,gBAAA,CAAiB,IAAA,CAAK,CAAC,QAAA,EAAU,MAAM,CAAC,CAAA;AACxC,QAAA;AAAA,MACF;AAAA;AACF,EACF;AAGA,EAAA,MAAM,iBAAA,GAAoB,yBAAyB,iBAAiB,CAAA;AACpE,EAAA,MAAM,mBAAA,GAAsB,cAAc,iBAAiB,CAAA;AAG3D,EAAA,MAAM,cAAA,GAAiB,yBAAyB,gBAAgB,CAAA;AAChE,EAAA,MAAM,eAAA,GAAkB,cAAc,cAAc,CAAA;AAGpD,EAAA,MAAM,eAAA,GAAkB,uBAAuB,mBAAmB,CAAA;AAClE,EAAA,MAAM,SAAA,GAAY,kBAAkB,mBAAmB,CAAA;AAGvD,EAAA,MAAM,aAAA,GAAgB,qBAAqB,iBAAiB,CAAA;AAG5D,EAAA,MAAM,aAAA,GAAgB,yBAAyB,eAAe,CAAA;AAC9D,EAAA,MAAM,cAAA,GAAiB,cAAc,aAAa,CAAA;AAGlD,EAAA,MAAM,YAAA,GAAe,oBAAoB,kBAAkB,CAAA;AAK3D,EAAA,MAAM,uBAAA,GAA0B,yBAAyB,gBAAgB,CAAA;AACzE,EAAA,MAAM,mBAAA,GAAsB,cAAc,uBAAuB,CAAA;AAEjE,EAAA,OAAO;AAAA,IACL,iBAAA;AAAA,IACA,mBAAA;AAAA,IACA,cAAA;AAAA,IACA,eAAA;AAAA,IACA,eAAA;AAAA,IACA,SAAA;AAAA,IACA,aAAA;AAAA,IACA,aAAA;AAAA,IACA,cAAA;AAAA,IACA,YAAA;AAAA,IACA,uBAAA;AAAA,IACA;AAAA,GACF;AACF;AAKA,SAAS,YAAY,SAAA,EAAoD;AACvE,EAAA,IAAI,OAAO,cAAc,QAAA,EAAU;AACjC,IAAA,OAAO,SAAA;AAAA,EACT;AACA,EAAA,OAAO,SAAA,CAAU,IAAA;AACnB;AAKA,SAAS,cAAc,KAAA,EAAwB;AAC7C,EAAA,OAAO,MAAM,UAAA,CAAW,SAAS,CAAA,IAAK,KAAA,CAAM,WAAW,UAAU,CAAA;AACnE;AAKA,SAAS,uBACP,SAAA,EAC0C;AAE1C,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAoB;AAEvC,EAAA,SAAS,KAAK,CAAA,EAAmB;AAC/B,IAAA,IAAI,CAAC,MAAA,CAAO,GAAA,CAAI,CAAC,CAAA,EAAG;AAClB,MAAA,MAAA,CAAO,GAAA,CAAI,GAAG,CAAC,CAAA;AACf,MAAA,OAAO,CAAA;AAAA,IACT;AAEA,IAAA,MAAM,CAAA,GAAI,MAAA,CAAO,GAAA,CAAI,CAAC,CAAA;AACtB,IAAA,IAAI,CAAA,KAAM,GAAG,OAAO,CAAA;AACpB,IAAA,MAAM,IAAA,GAAO,KAAK,CAAC,CAAA;AACnB,IAAA,MAAA,CAAO,GAAA,CAAI,GAAG,IAAI,CAAA;AAClB,IAAA,OAAO,IAAA;AAAA,EACT;AAEA,EAAA,SAAS,KAAA,CAAM,GAAW,CAAA,EAAiB;AACzC,IAAA,MAAM,KAAA,GAAQ,KAAK,CAAC,CAAA;AACpB,IAAA,MAAM,KAAA,GAAQ,KAAK,CAAC,CAAA;AACpB,IAAA,IAAI,UAAU,KAAA,EAAO;AACnB,MAAA,MAAA,CAAO,GAAA,CAAI,OAAO,KAAK,CAAA;AAAA,IACzB;AAAA,EACF;AAGA,EAAA,KAAA,MAAW,CAAC,CAAA,EAAG,CAAC,CAAA,IAAK,SAAA,EAAW;AAC9B,IAAA,KAAA,CAAM,GAAG,CAAC,CAAA;AAAA,EACZ;AAGA,EAAA,MAAM,OAAA,uBAAc,GAAA,EAAyB;AAC7C,EAAA,KAAA,MAAW,GAAA,IAAO,MAAA,CAAO,IAAA,EAAK,EAAG;AAC/B,IAAA,MAAM,IAAA,GAAO,KAAK,GAAG,CAAA;AACrB,IAAA,MAAM,WAAW,OAAA,CAAQ,GAAA,CAAI,IAAI,CAAA,wBAAS,GAAA,EAAI;AAC9C,IAAA,QAAA,CAAS,IAAI,GAAG,CAAA;AAChB,IAAA,OAAA,CAAQ,GAAA,CAAI,MAAM,QAAQ,CAAA;AAAA,EAC5B;AAGA,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAiC;AACpD,EAAA,KAAA,MAAW,OAAA,IAAW,OAAA,CAAQ,MAAA,EAAO,EAAG;AACtC,IAAA,KAAA,MAAW,UAAU,OAAA,EAAS;AAE5B,MAAA,MAAM,MAAA,GAAS,IAAI,GAAA,CAAI,OAAO,CAAA;AAC9B,MAAA,MAAA,CAAO,OAAO,MAAM,CAAA;AACpB,MAAA,MAAA,CAAO,GAAA,CAAI,QAAQ,MAAM,CAAA;AAAA,IAC3B;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,kBACP,SAAA,EAC6B;AAC7B,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAoB;AAEvC,EAAA,KAAA,MAAW,CAAC,CAAA,EAAG,CAAC,CAAA,IAAK,SAAA,EAAW;AAE9B,IAAA,IAAI,cAAc,CAAC,CAAA,IAAK,CAAC,aAAA,CAAc,CAAC,CAAA,EAAG;AACzC,MAAA,MAAA,CAAO,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,IACjB,WAAW,aAAA,CAAc,CAAC,KAAK,CAAC,aAAA,CAAc,CAAC,CAAA,EAAG;AAChD,MAAA,MAAA,CAAO,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,IACjB;AAAA,EACF;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,qBACP,SAAA,EACqB;AACrB,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAY;AAE/B,EAAA,KAAA,MAAW,CAAC,CAAA,EAAG,CAAC,CAAA,IAAK,SAAA,EAAW;AAE9B,IAAA,MAAM,UAAA,GAAa,CAAA,GAAI,CAAA,GAAI,CAAA,EAAG,CAAC,CAAA,CAAA,EAAI,CAAC,CAAA,CAAA,GAAK,CAAA,EAAG,CAAC,CAAA,CAAA,EAAI,CAAC,CAAA,CAAA;AAClD,IAAA,MAAA,CAAO,IAAI,UAAU,CAAA;AAAA,EACvB;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,oBACP,SAAA,EAC6B;AAC7B,EAAA,MAAM,MAAA,uBAAa,GAAA,EAAoB;AAEvC,EAAA,KAAA,MAAW,CAAC,CAAA,EAAG,CAAC,CAAA,IAAK,SAAA,EAAW;AAE9B,IAAA,MAAA,CAAO,GAAA,CAAI,GAAG,CAAC,CAAA;AACf,IAAA,MAAA,CAAO,GAAA,CAAI,GAAG,CAAC,CAAA;AAAA,EACjB;AAEA,EAAA,OAAO,MAAA;AACT;;;AC1hBO,SAAS,eAAA,CACd,OACA,OAAA,EACkB;AAClB,EAAA,MAAM,KAAA,GAAQ,eAAe,KAAK,CAAA;AAClC,EAAA,MAAM,KAAA,GAAQ,eAAe,KAAK,CAAA;AAClC,EAAA,MAAM,QAAA,GAAW,iBAAA,CAAkB,KAAA,CAAM,QAAQ,CAAA;AAEjD,EAAA,OAAO;AAAA,IACL,SAAS,KAAA,CAAM,EAAA;AAAA,IACf,OAAA;AAAA,IACA,aAAaC,wBAAA,EAAO;AAAA,IACpB,KAAA;AAAA,IACA,KAAA;AAAA,IACA,QAAA;AAAA,IACA,QAAA,EAAU;AAAA,MACR,YAAA,EAAc,MAAM,QAAA,CAAS,YAAA;AAAA,MAC7B,YAAA,EAAc,MAAM,QAAA,CAAS;AAAA;AAC/B,GACF;AACF;AASA,SAAS,eACP,KAAA,EACmC;AACnC,EAAA,MAAM,SAA4C,EAAC;AAEnD,EAAA,KAAA,MAAW,QAAA,IAAYC,8BAAA,CAAa,KAAK,CAAA,EAAG;AAC1C,IAAA,MAAM,YAAA,GAAe,KAAA,CAAM,KAAA,CAAM,QAAQ,CAAA;AACzC,IAAA,IAAI,iBAAiB,MAAA,EAAW;AAChC,IAAA,MAAA,CAAO,QAAQ,CAAA,GAAI,gBAAA,CAAiB,YAAY,CAAA;AAAA,EAClD;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,iBAAiB,YAAA,EAAmD;AAC3E,EAAA,MAAM,OAAO,YAAA,CAAa,IAAA;AAE1B,EAAA,OAAO;AAAA,IACL,MAAM,IAAA,CAAK,IAAA;AAAA,IACX,UAAA,EAAY,kBAAA,CAAmB,IAAA,CAAK,MAAM,CAAA;AAAA,IAC1C,iBAAA,EAAmB,0BAAA,CAA2B,YAAA,CAAa,MAAA,IAAU,EAAE,CAAA;AAAA,IACvE,QAAA,EAAU,aAAa,QAAA,IAAY,UAAA;AAAA,IACnC,aAAa,IAAA,CAAK;AAAA,GACpB;AACF;AAKA,SAAS,2BACP,WAAA,EACuC;AACvC,EAAA,OAAO,WAAA,CAAY,GAAA,CAAI,CAAC,UAAA,MAAgB;AAAA,IACtC,MAAM,UAAA,CAAW,IAAA;AAAA,IACjB,MAAA,EAAQ,CAAC,GAAG,UAAA,CAAW,MAAM,CAAA;AAAA,IAC7B,OACE,UAAA,CAAW,KAAA,GAAQ,uBAAA,CAAwB,UAAA,CAAW,KAAK,CAAA,GAAI,MAAA;AAAA,IACjE,OAAO,UAAA,CAAW,KAAA;AAAA,IAClB,WAAW,UAAA,CAAW;AAAA,GACxB,CAAE,CAAA;AACJ;AA8BA,SAAS,wBACP,aAAA,EACQ;AAER,EAAA,MAAM,OAAA,GAAU,IAAI,KAAA,CAAM,EAAC,EAAuB;AAAA,IAChD,GAAA,CAAI,SAAS,KAAA,EAAsC;AACjD,MAAA,OAAO;AAAA,QACL,QAAQ,OAA4B;AAAA,UAClC,MAAA,EAAQ,kBAAA;AAAA,UACR,KAAA;AAAA,UACA,EAAA,EAAI;AAAA,SACN,CAAA;AAAA,QACA,WAAW,OAA4B;AAAA,UACrC,MAAA,EAAQ,kBAAA;AAAA,UACR,KAAA;AAAA,UACA,EAAA,EAAI;AAAA,SACN;AAAA,OACF;AAAA,IACF;AAAA,GACD,CAAA;AAGD,EAAA,MAAM,SAAA,GAAY,cAAc,OAAO,CAAA;AAGvC,EAAA,OAAO,IAAA,CAAK,UAAU,EAAE,KAAA,EAAO,UAAU,KAAA,EAAO,EAAA,EAAI,SAAA,CAAU,EAAA,EAAI,CAAA;AACpE;AAoBO,SAAS,0BACd,UAAA,EACgD;AAChD,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,KAAA,CAAM,UAAU,CAAA;AAKpC,EAAA,OAAO,CAAC,OAAA,KAA+C;AACrD,IAAA,MAAM,YAAA,GAAe,OAAA,CAAQ,MAAA,CAAO,KAAK,CAAA;AACzC,IAAA,IAAI,CAAC,YAAA,EAAc;AACjB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kCAAA,EAAqC,MAAA,CAAO,KAAK,CAAA,CAAE,CAAA;AAAA,IACrE;AAEA,IAAA,MAAM,MAAA,GACJ,OAAO,EAAA,KAAO,QAAA,GAAW,aAAa,MAAA,EAAO,GAAI,aAAa,SAAA,EAAU;AAC1E,IAAA,OAAO;AAAA,MACL,MAAA,EAAQ,kBAAA;AAAA,MACR,OAAO,MAAA,CAAO,KAAA;AAAA,MACd,IAAI,MAAA,CAAO;AAAA,KACb;AAAA,EACF,CAAA;AACF;AASA,SAAS,eACP,KAAA,EACmC;AACnC,EAAA,MAAM,SAA4C,EAAC;AAEnD,EAAA,KAAA,MAAW,QAAA,IAAYC,8BAAA,CAAa,KAAK,CAAA,EAAG;AAC1C,IAAA,MAAM,YAAA,GAAe,KAAA,CAAM,KAAA,CAAM,QAAQ,CAAA;AACzC,IAAA,IAAI,iBAAiB,MAAA,EAAW;AAChC,IAAA,MAAA,CAAO,QAAQ,CAAA,GAAI,gBAAA,CAAiB,YAAY,CAAA;AAAA,EAClD;AAEA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,iBAAiB,YAAA,EAAmD;AAC3E,EAAA,MAAM,OAAO,YAAA,CAAa,IAAA;AAE1B,EAAA,OAAO;AAAA,IACL,MAAM,IAAA,CAAK,IAAA;AAAA,IACX,WAAW,YAAA,CAAa,IAAA,CAAK,IAAI,CAAC,IAAA,KAAS,KAAK,IAAI,CAAA;AAAA,IACpD,SAAS,YAAA,CAAa,EAAA,CAAG,IAAI,CAAC,IAAA,KAAS,KAAK,IAAI,CAAA;AAAA,IAChD,UAAA,EAAY,kBAAA,CAAmB,IAAA,CAAK,MAAM,CAAA;AAAA,IAC1C,WAAA,EAAa,aAAa,WAAA,IAAe,MAAA;AAAA,IACzC,iBAAA,EAAmB,aAAa,iBAAA,IAAqB,YAAA;AAAA,IACrD,aAAa,IAAA,CAAK;AAAA,GACpB;AACF;AASA,SAAS,kBACP,SAAA,EACoB;AAEpB,EAAA,MAAM,WAAA,uBAAkB,GAAA,EAAsB;AAC9C,EAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,IAAA,MAAM,WAAW,QAAA,CAAS,QAAA;AAC1B,IAAA,IAAI,CAAC,WAAA,CAAY,GAAA,CAAI,QAAA,CAAS,IAAI,CAAA,EAAG;AACnC,MAAA,WAAA,CAAY,GAAA,CAAI,QAAA,CAAS,IAAA,EAAM,QAAQ,CAAA;AAAA,IACzC;AAAA,EACF;AAGA,EAAA,MAAM,YAAgD,EAAC;AACvD,EAAA,KAAA,MAAW,CAAC,IAAA,EAAM,QAAQ,CAAA,IAAK,WAAA,EAAa;AAC1C,IAAA,SAAA,CAAU,IAAI,CAAA,GAAI,iBAAA,CAAkB,QAAQ,CAAA;AAAA,EAC9C;AAGA,EAAA,MAAM,sBAAsB,SAAA,CAAU,GAAA;AAAA,IAAI,CAAC,QAAA,KACzC,yBAAA,CAA0B,QAAQ;AAAA,GACpC;AAGA,EAAA,MAAM,QAAA,GAAW,kBAAkB,SAAS,CAAA;AAE5C,EAAA,OAAO;AAAA,IACL,SAAA;AAAA,IACA,SAAA,EAAW,mBAAA;AAAA,IACX;AAAA,GACF;AACF;AAKA,SAAS,kBAAkB,QAAA,EAAwC;AACjE,EAAA,OAAO;AAAA,IACL,MAAM,QAAA,CAAS,IAAA;AAAA,IACf,UAAA,EAAY,SAAS,UAAA,CAAW,UAAA;AAAA,IAChC,SAAA,EAAW,SAAS,UAAA,CAAW,SAAA;AAAA,IAC/B,SAAA,EAAW,SAAS,UAAA,CAAW,SAAA;AAAA,IAC/B,OAAA,EAAS,SAAS,UAAA,CAAW,OAAA;AAAA,IAC7B,SAAA,EAAW,SAAS,UAAA,CAAW,SAAA;AAAA,IAC/B,WAAA,EAAa,SAAS,UAAA,CAAW;AAAA,GACnC;AACF;AAKA,SAAS,0BACP,QAAA,EAC4B;AAC5B,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,SAAS,QAAA,CAAS,IAAA;AAAA,IAC5B,IAAA,EAAM,WAAA,CAAY,QAAA,CAAS,IAAI,CAAA;AAAA,IAC/B,EAAA,EAAI,WAAA,CAAY,QAAA,CAAS,EAAE;AAAA,GAC7B;AACF;AAKA,SAAS,kBACP,SAAA,EACoB;AACpB,EAAA,IAAI,SAAA,CAAU,WAAW,CAAA,EAAG;AAC1B,IAAA,OAAO;AAAA,MACL,mBAAmB,EAAC;AAAA,MACpB,qBAAqB,EAAC;AAAA,MACtB,gBAAgB,EAAC;AAAA,MACjB,iBAAiB,EAAC;AAAA,MAClB,iBAAiB,EAAC;AAAA,MAClB,eAAe,EAAC;AAAA,MAChB,eAAe,EAAC;AAAA,MAChB,gBAAgB,EAAC;AAAA,MACjB,WAAW,EAAC;AAAA,MACZ,cAAc,EAAC;AAAA,MACf,yBAAyB,EAAC;AAAA,MAC1B,qBAAqB;AAAC,KACxB;AAAA,EACF;AAEA,EAAA,MAAM,QAAA,GAAW,4BAA4B,SAAS,CAAA;AAEtD,EAAA,OAAO;AAAA,IACL,iBAAA,EAAmB,WAAA,CAAY,QAAA,CAAS,iBAAiB,CAAA;AAAA,IACzD,mBAAA,EAAqB,WAAA,CAAY,QAAA,CAAS,mBAAmB,CAAA;AAAA,IAC7D,cAAA,EAAgB,WAAA,CAAY,QAAA,CAAS,cAAc,CAAA;AAAA,IACnD,eAAA,EAAiB,WAAA,CAAY,QAAA,CAAS,eAAe,CAAA;AAAA,IACrD,eAAA,EAAiB,WAAA,CAAY,QAAA,CAAS,eAAe,CAAA;AAAA,IACrD,aAAA,EAAe,CAAC,GAAG,QAAA,CAAS,aAAa,CAAA;AAAA,IACzC,aAAA,EAAe,WAAA,CAAY,QAAA,CAAS,aAAa,CAAA;AAAA,IACjD,cAAA,EAAgB,WAAA,CAAY,QAAA,CAAS,cAAc,CAAA;AAAA,IACnD,SAAA,EAAW,iBAAA,CAAkB,QAAA,CAAS,SAAS,CAAA;AAAA,IAC/C,YAAA,EAAc,iBAAA,CAAkB,QAAA,CAAS,YAAY,CAAA;AAAA,IACrD,uBAAA,EAAyB,WAAA,CAAY,QAAA,CAAS,uBAAuB,CAAA;AAAA,IACrE,mBAAA,EAAqB,WAAA,CAAY,QAAA,CAAS,mBAAmB;AAAA,GAC/D;AACF;AAKA,SAAS,YACP,GAAA,EACmC;AACnC,EAAA,MAAM,SAA4C,EAAC;AACnD,EAAA,KAAA,MAAW,CAAC,GAAA,EAAK,MAAM,CAAA,IAAK,GAAA,EAAK;AAC/B,IAAA,MAAA,CAAO,GAAG,CAAA,GAAI,CAAC,GAAG,MAAM,CAAA;AAAA,EAC1B;AACA,EAAA,OAAO,MAAA;AACT;AAKA,SAAS,kBACP,GAAA,EACwB;AACxB,EAAA,MAAM,SAAiC,EAAC;AACxC,EAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,CAAA,IAAK,GAAA,EAAK;AAC9B,IAAA,MAAA,CAAO,GAAG,CAAA,GAAI,KAAA;AAAA,EAChB;AACA,EAAA,OAAO,MAAA;AACT;AAWA,SAAS,mBAAmB,MAAA,EAA+B;AACzD,EAAA,IAAI;AAEF,IAAA,MAAM,UAAA,GAAaC,KAAA,CAAE,YAAA,CAAa,MAAM,CAAA;AACxC,IAAA,OAAO,UAAA;AAAA,EACT,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,EAAE,MAAM,QAAA,EAAS;AAAA,EAC1B;AACF;AAWA,eAAsB,kBACpB,MAAA,EACqB;AAErB,EAAA,MAAM,QAAA,GAAW;AAAA,IACf,SAAS,MAAA,CAAO,OAAA;AAAA,IAChB,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,OAAO,MAAA,CAAO,KAAA;AAAA,IACd,UAAU,MAAA,CAAO,QAAA;AAAA,IACjB,UAAU,MAAA,CAAO;AAAA,GACnB;AAGA,EAAA,MAAM,IAAA,GAAO,IAAA,CAAK,SAAA,CAAU,QAAA,EAAU,cAAc,CAAA;AACpD,EAAA,OAAO,WAAW,IAAI,CAAA;AACxB;AAKA,SAAS,cAAA,CAAe,MAAc,KAAA,EAAyB;AAC7D,EAAA,IAAI,KAAA,KAAU,QAAQ,OAAO,KAAA,KAAU,YAAY,CAAC,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA,EAAG;AACxE,IAAA,MAAM,SAAkC,EAAC;AACzC,IAAA,KAAA,MAAW,OAAO,MAAA,CAAO,IAAA,CAAK,KAAK,CAAA,CAAE,UAAS,EAAG;AAC/C,MAAA,MAAA,CAAO,GAAG,CAAA,GAAK,KAAA,CAAkC,GAAG,CAAA;AAAA,IACtD;AACA,IAAA,OAAO,MAAA;AAAA,EACT;AACA,EAAA,OAAO,KAAA;AACT;AAQA,eAAe,WAAW,KAAA,EAAgC;AACxD,EAAA,MAAM,OAAA,GAAU,IAAI,WAAA,EAAY,CAAE,OAAO,KAAK,CAAA;AAC9C,EAAA,MAAM,SAAS,MAAM,UAAA,CAAW,OAAO,MAAA,CAAO,MAAA,CAAO,WAAW,OAAO,CAAA;AACvE,EAAA,MAAM,KAAA,GAAQ,IAAI,UAAA,CAAW,MAAM,CAAA;AAEnC,EAAA,IAAI,GAAA,GAAM,EAAA;AACV,EAAA,KAAA,IAAS,KAAA,GAAQ,CAAA,EAAG,KAAA,GAAQ,CAAA,EAAG,KAAA,EAAA,EAAS;AACtC,IAAA,MAAM,IAAA,GAAO,MAAM,KAAK,CAAA;AACxB,IAAA,IAAI,SAAS,MAAA,EAAW;AACxB,IAAA,GAAA,IAAO,KAAK,QAAA,CAAS,EAAE,CAAA,CAAE,QAAA,CAAS,GAAG,GAAG,CAAA;AAAA,EAC1C;AACA,EAAA,OAAO,GAAA;AACT;;;ACjYA,eAAsB,YAAA,CACpB,OAAA,EACA,KAAA,EACA,OAAA,EACiC;AACjC,EAAA,MAAM,WAAA,GAAc,SAAS,WAAA,IAAe,IAAA;AAC5C,EAAA,MAAM,eAAA,GAAkB,SAAS,eAAA,IAAmB,IAAA;AAGpD,EAAA,MAAM,YAAA,GAAe,MAAM,OAAA,CAAQ,eAAA,CAAgB,MAAM,EAAE,CAAA;AAE3D,EAAA,IAAI,CAAC,YAAA,EAAc;AAEjB,IAAA,MAAM,MAAA,GAAS,MAAM,gBAAA,CAAiB,OAAA,EAAS,KAAK,CAAA;AACpD,IAAA,OAAO,EAAE,MAAA,EAAQ,aAAA,EAAe,OAAA,EAAS,OAAO,OAAA,EAAQ;AAAA,EAC1D;AAGA,EAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,CAAM,YAAA,CAAa,UAAU,CAAA;AAGvD,EAAA,MAAM,aAAA,GAAgB,eAAA,CAAgB,KAAA,EAAO,YAAA,CAAa,UAAU,CAAC,CAAA;AAGrE,EAAA,MAAM,aAAa,YAAA,CAAa,WAAA;AAChC,EAAA,MAAM,WAAA,GAAc,MAAM,iBAAA,CAAkB,aAAa,CAAA;AAEzD,EAAA,IAAI,eAAe,WAAA,EAAa;AAC9B,IAAA,OAAO,EAAE,MAAA,EAAQ,WAAA,EAAa,OAAA,EAAS,aAAa,OAAA,EAAQ;AAAA,EAC9D;AAGA,EAAA,MAAM,IAAA,GAAO,iBAAA,CAAkB,YAAA,EAAc,aAAa,CAAA;AAE1D,EAAA,IAAI,CAAC,KAAK,UAAA,EAAY;AAEpB,IAAA,OAAO,EAAE,MAAA,EAAQ,WAAA,EAAa,OAAA,EAAS,aAAa,OAAA,EAAQ;AAAA,EAC9D;AAGA,EAAA,IAAI,qBAAA,CAAsB,IAAI,CAAA,EAAG;AAC/B,IAAA,IAAI,WAAA,EAAa;AAEf,MAAA,MAAM,WAAA,GAAoC;AAAA,QACxC,SAAS,KAAA,CAAM,EAAA;AAAA,QACf,aAAa,YAAA,CAAa,OAAA;AAAA,QAC1B,SAAA,EAAW,aAAa,OAAA,GAAU,CAAA;AAAA,QAClC;AAAA,OACF;AACA,MAAA,MAAM,OAAA,EAAS,kBAAkB,WAAW,CAAA;AAC5C,MAAA,MAAM,aAAa,MAAM,aAAA;AAAA,QACvB,OAAA;AAAA,QACA,KAAA;AAAA,QACA,YAAA,CAAa;AAAA,OACf;AACA,MAAA,MAAM,OAAA,EAAS,iBAAiB,WAAW,CAAA;AAC3C,MAAA,OAAO;AAAA,QACL,MAAA,EAAQ,UAAA;AAAA,QACR,aAAa,YAAA,CAAa,OAAA;AAAA,QAC1B,SAAA,EAAW,UAAA;AAAA,QACX;AAAA,OACF;AAAA,IACF;AAEA,IAAA,OAAO;AAAA,MACL,MAAA,EAAQ,SAAA;AAAA,MACR,SAAS,YAAA,CAAa,OAAA;AAAA,MACtB;AAAA,KACF;AAAA,EACF;AAGA,EAAA,MAAM,OAAA,GAAU,oBAAoB,IAAI,CAAA;AAExC,EAAA,IAAI,eAAA,EAAiB;AACnB,IAAA,MAAM,IAAIC,gCAAA;AAAA,MACR,CAAA,2BAAA,EAA8B,IAAA,CAAK,OAAO,CAAA,EAAA,EACrC,QAAQ,MAAM,CAAA,6FAAA,CAAA;AAAA,MAEnB;AAAA,QACE,SAAS,KAAA,CAAM,EAAA;AAAA,QACf,aAAa,YAAA,CAAa,OAAA;AAAA,QAC1B,SAAA,EAAW,aAAa,OAAA,GAAU;AAAA;AACpC,KACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAE,MAAA,EAAQ,UAAA,EAAY,IAAA,EAAM,OAAA,EAAQ;AAC7C;AAWA,eAAsB,gBAAA,CACpB,SACA,KAAA,EAC2B;AAC3B,EAAA,MAAM,MAAA,GAAS,eAAA,CAAgB,KAAA,EAAO,CAAC,CAAA;AACvC,EAAA,MAAM,IAAA,GAAO,MAAM,iBAAA,CAAkB,MAAM,CAAA;AAE3C,EAAA,OAAO,QAAQ,YAAA,CAAa;AAAA,IAC1B,SAAS,KAAA,CAAM,EAAA;AAAA,IACf,OAAA,EAAS,CAAA;AAAA,IACT,UAAA,EAAY,IAAA;AAAA,IACZ,SAAA,EAAW,MAAA;AAAA,IACX,QAAA,EAAU;AAAA,GACX,CAAA;AACH;AAaA,eAAsB,aAAA,CACpB,OAAA,EACA,KAAA,EACA,cAAA,EACiB;AACjB,EAAA,MAAM,aAAa,cAAA,GAAiB,CAAA;AACpC,EAAA,MAAM,MAAA,GAAS,eAAA,CAAgB,KAAA,EAAO,UAAU,CAAA;AAChD,EAAA,MAAM,IAAA,GAAO,MAAM,iBAAA,CAAkB,MAAM,CAAA;AAG3C,EAAA,MAAM,QAAQ,YAAA,CAAa;AAAA,IACzB,SAAS,KAAA,CAAM,EAAA;AAAA,IACf,OAAA,EAAS,UAAA;AAAA,IACT,UAAA,EAAY,IAAA;AAAA,IACZ,SAAA,EAAW,MAAA;AAAA,IACX,QAAA,EAAU;AAAA,GACX,CAAA;AAGD,EAAA,MAAM,OAAA,CAAQ,eAAA,CAAgB,KAAA,CAAM,EAAA,EAAI,UAAU,CAAA;AAElD,EAAA,OAAO,UAAA;AACT;AAaA,eAAsB,cAAA,CACpB,OAAA,EACA,OAAA,EACA,aAAA,EACe;AACf,EAAA,MAAM,GAAA,GAAM,MAAM,OAAA,CAAQ,gBAAA,CAAiB,SAAS,aAAa,CAAA;AACjE,EAAA,IAAI,CAAC,GAAA,EAAK;AACR,IAAA,MAAM,IAAIA,gCAAA;AAAA,MACR,8BAA8B,aAAa,CAAA,yBAAA,CAAA;AAAA,MAC3C,EAAE,OAAA,EAAS,WAAA,EAAa,aAAA,EAAe,WAAW,aAAA;AAAc,KAClE;AAAA,EACF;AACA,EAAA,MAAM,OAAA,CAAQ,eAAA,CAAgB,OAAA,EAAS,aAAa,CAAA;AACtD;AASA,eAAsB,eAAA,CACpB,SACA,OAAA,EACuC;AACvC,EAAA,MAAM,GAAA,GAAM,MAAM,OAAA,CAAQ,eAAA,CAAgB,OAAO,CAAA;AACjD,EAAA,IAAI,CAAC,KAAK,OAAO,MAAA;AACjB,EAAA,OAAO,IAAA,CAAK,KAAA,CAAM,GAAA,CAAI,UAAU,CAAA;AAClC;AASA,eAAsB,mBAAA,CACpB,SACA,OAAA,EACkB;AAClB,EAAA,MAAM,GAAA,GAAM,MAAM,OAAA,CAAQ,eAAA,CAAgB,OAAO,CAAA;AACjD,EAAA,OAAO,GAAA,KAAQ,MAAA;AACjB;AASA,eAAsB,gBAAA,CACpB,SACA,KAAA,EACiC;AACjC,EAAA,MAAM,YAAA,GAAe,MAAM,OAAA,CAAQ,eAAA,CAAgB,MAAM,EAAE,CAAA;AAC3D,EAAA,IAAI,CAAC,cAAc,OAAO,MAAA;AAE1B,EAAA,MAAM,YAAA,GAAe,IAAA,CAAK,KAAA,CAAM,YAAA,CAAa,UAAU,CAAA;AACvD,EAAA,MAAM,aAAA,GAAgB,eAAA,CAAgB,KAAA,EAAO,YAAA,CAAa,UAAU,CAAC,CAAA;AAErE,EAAA,OAAO,iBAAA,CAAkB,cAAc,aAAa,CAAA;AACtD","file":"chunk-DD6ONEBN.cjs","sourcesContent":["import { type EdgeType, type NodeType } from \"../core/types\";\n\n// ============================================================\n// Brand Key\n// ============================================================\n\n/** Brand key for MetaEdge */\nexport const META_EDGE_BRAND = \"__metaEdge\" as const;\n\n// ============================================================\n// Inference Types\n// ============================================================\n\n/**\n * How a meta-edge affects queries and validation.\n */\nexport type InferenceType =\n | \"subsumption\" // Query for X includes instances of subclasses\n | \"hierarchy\" // Enables broader/narrower traversal\n | \"substitution\" // Can substitute equivalent types\n | \"constraint\" // Validation rules\n | \"composition\" // Part-whole navigation\n | \"association\" // Discovery/recommendation\n | \"none\"; // No automatic inference\n\n// ============================================================\n// Meta-Edge Properties\n// ============================================================\n\n/**\n * Properties of a meta-edge.\n */\nexport type MetaEdgeProperties = Readonly<{\n transitive: boolean; // A→B, B→C implies A→C\n symmetric: boolean; // A→B implies B→A\n reflexive: boolean; // A→A is always true\n inverse: string | undefined; // Name of inverse meta-edge\n inference: InferenceType; // How this affects queries\n description: string | undefined;\n}>;\n\n// ============================================================\n// Meta-Edge Type\n// ============================================================\n\n/**\n * A meta-edge definition.\n *\n * Meta-edges represent type-level relationships (between kinds),\n * not instance-level relationships (between nodes).\n */\nexport type MetaEdge<K extends string = string> = Readonly<{\n [META_EDGE_BRAND]: true;\n name: K;\n properties: MetaEdgeProperties;\n}>;\n\n// ============================================================\n// Ontology Relation\n// ============================================================\n\n/**\n * A relation in the ontology (instance of meta-edge between types).\n *\n * @example\n * ```typescript\n * // Podcast subClassOf Media\n * subClassOf(Podcast, Media)\n *\n * // Person equivalentTo schema:Person\n * equivalentTo(Person, \"https://schema.org/Person\")\n * ```\n */\nexport type OntologyRelation = Readonly<{\n metaEdge: MetaEdge;\n from: NodeType | EdgeType | string; // string for external IRIs\n to: NodeType | EdgeType | string;\n}>;\n\n// ============================================================\n// Type Guards\n// ============================================================\n\n/**\n * Checks if a value is a MetaEdge.\n */\nexport function isMetaEdge(value: unknown): value is MetaEdge {\n return (\n typeof value === \"object\" &&\n value !== null &&\n META_EDGE_BRAND in value &&\n (value as Record<string, unknown>)[META_EDGE_BRAND] === true\n );\n}\n\n/**\n * Gets the type name from a NodeType, EdgeType, or IRI string.\n */\nexport function getTypeName(typeOrIri: NodeType | EdgeType | string): string {\n if (typeof typeOrIri === \"string\") {\n return typeOrIri;\n }\n return typeOrIri.kind;\n}\n","/**\n * Contextual Validation Utilities\n *\n * Provides Zod validation wrappers that include full context about\n * which entity (node/edge) and operation (create/update) failed.\n *\n * @example\n * ```typescript\n * const props = validateNodeProps(schema, input, {\n * kind: \"Person\",\n * operation: \"create\",\n * });\n * ```\n */\n\nimport { type ZodError, type ZodType } from \"zod\";\n\nimport { ValidationError, type ValidationIssue } from \"./index\";\n\n// ============================================================\n// Types\n// ============================================================\n\n/**\n * Context for validation operations.\n */\nexport type ValidationContext = Readonly<{\n /** Type of entity being validated */\n entityType: \"node\" | \"edge\";\n /** Kind/type name of the entity */\n kind: string;\n /** Operation being performed */\n operation: \"create\" | \"update\";\n /** Entity ID (for updates) */\n id?: string;\n}>;\n\n// ============================================================\n// Validation Functions\n// ============================================================\n\n/**\n * Converts Zod issues to ValidationIssue format.\n */\nfunction zodIssuesToValidationIssues(error: ZodError): ValidationIssue[] {\n return error.issues.map((issue) => ({\n path: issue.path.join(\".\"),\n message: issue.message,\n code: issue.code,\n }));\n}\n\n/**\n * Builds a descriptive location string for error messages.\n */\nfunction buildLocationString(context: ValidationContext): string {\n if (context.id) {\n return `${context.kind}/${context.id}`;\n }\n return `new ${context.kind}`;\n}\n\n/**\n * Validates props with full context for error messages.\n *\n * @param schema - Zod schema to validate against\n * @param props - Properties to validate\n * @param context - Context about the entity and operation\n * @returns Validated and transformed props\n * @throws ValidationError with full context if validation fails\n *\n * @example\n * ```typescript\n * const validatedProps = validateProps(personSchema, input, {\n * entityType: \"node\",\n * kind: \"Person\",\n * operation: \"create\",\n * });\n * ```\n */\nexport function validateProps<T>(\n schema: ZodType<T>,\n props: unknown,\n context: ValidationContext,\n): T {\n const result = schema.safeParse(props);\n\n if (result.success) {\n return result.data;\n }\n\n const issues = zodIssuesToValidationIssues(result.error);\n const location = buildLocationString(context);\n\n throw new ValidationError(\n `Invalid ${context.entityType} props for ${location}: ${result.error.message}`,\n {\n entityType: context.entityType,\n kind: context.kind,\n operation: context.operation,\n ...(context.id !== undefined && { id: context.id }),\n issues,\n },\n { cause: result.error },\n );\n}\n\n/**\n * Validates node props with full context.\n *\n * Convenience wrapper around validateProps for node operations.\n *\n * @example\n * ```typescript\n * const props = validateNodeProps(schema, input, {\n * kind: \"Person\",\n * operation: \"create\",\n * });\n * ```\n */\nexport function validateNodeProps<T>(\n schema: ZodType<T>,\n props: unknown,\n context: Readonly<{\n kind: string;\n operation: \"create\" | \"update\";\n id?: string;\n }>,\n): T {\n return validateProps(schema, props, {\n entityType: \"node\",\n ...context,\n });\n}\n\n/**\n * Validates edge props with full context.\n *\n * Convenience wrapper around validateProps for edge operations.\n *\n * @example\n * ```typescript\n * const props = validateEdgeProps(schema, input, {\n * kind: \"worksAt\",\n * operation: \"create\",\n * });\n * ```\n */\nexport function validateEdgeProps<T>(\n schema: ZodType<T>,\n props: unknown,\n context: Readonly<{\n kind: string;\n operation: \"create\" | \"update\";\n id?: string;\n }>,\n): T {\n return validateProps(schema, props, {\n entityType: \"edge\",\n ...context,\n });\n}\n\n/**\n * Wraps a Zod error with TypeGraph context.\n *\n * Use this when you've already caught a ZodError and want to\n * convert it to a ValidationError with context.\n *\n * @example\n * ```typescript\n * try {\n * schema.parse(input);\n * } catch (error) {\n * if (error instanceof ZodError) {\n * throw wrapZodError(error, {\n * entityType: \"node\",\n * kind: \"Person\",\n * operation: \"create\",\n * });\n * }\n * throw error;\n * }\n * ```\n */\nexport function wrapZodError(\n error: ZodError,\n context: ValidationContext,\n): ValidationError {\n const issues = zodIssuesToValidationIssues(error);\n const location = buildLocationString(context);\n\n return new ValidationError(\n `Validation failed for ${context.entityType} ${location}: ${error.message}`,\n {\n entityType: context.entityType,\n kind: context.kind,\n operation: context.operation,\n ...(context.id !== undefined && { id: context.id }),\n issues,\n },\n { cause: error },\n );\n}\n\n/**\n * Creates a simple ValidationError without Zod context.\n *\n * Use this for custom validation rules that aren't part of a Zod schema.\n *\n * @example\n * ```typescript\n * if (startDate > endDate) {\n * throw createValidationError(\n * \"Start date must be before end date\",\n * [{ path: \"startDate\", message: \"Must be before endDate\" }],\n * { entityType: \"edge\", kind: \"employment\", operation: \"create\" }\n * );\n * }\n * ```\n */\nexport function createValidationError(\n message: string,\n issues: ValidationIssue[],\n context?: Partial<ValidationContext>,\n): ValidationError {\n return new ValidationError(message, {\n ...(context?.entityType !== undefined && {\n entityType: context.entityType,\n }),\n ...(context?.kind !== undefined && { kind: context.kind }),\n ...(context?.operation !== undefined && { operation: context.operation }),\n ...(context?.id !== undefined && { id: context.id }),\n issues,\n });\n}\n","/**\n * Schema migration utilities.\n *\n * Provides diff detection between schema versions to identify\n * what has changed and what migrations might be needed.\n */\nimport {\n type SerializedEdgeDef,\n type SerializedNodeDef,\n type SerializedOntology,\n type SerializedSchema,\n} from \"./types\";\n\n// ============================================================\n// Change Types\n// ============================================================\n\n/**\n * Types of changes that can occur in a schema.\n */\nexport type ChangeType = \"added\" | \"removed\" | \"modified\" | \"renamed\";\n\n/**\n * Severity of a change for migration purposes.\n */\nexport type ChangeSeverity =\n | \"safe\" // No data migration needed\n | \"warning\" // Might need attention\n | \"breaking\"; // Requires data migration\n\n// ============================================================\n// Node Changes\n// ============================================================\n\n/**\n * A change to a node definition.\n */\nexport type NodeChange = Readonly<{\n type: ChangeType;\n kind: string;\n severity: ChangeSeverity;\n details: string;\n before?: SerializedNodeDef | undefined;\n after?: SerializedNodeDef | undefined;\n}>;\n\n// ============================================================\n// Edge Changes\n// ============================================================\n\n/**\n * A change to an edge definition.\n */\nexport type EdgeChange = Readonly<{\n type: ChangeType;\n kind: string;\n severity: ChangeSeverity;\n details: string;\n before?: SerializedEdgeDef | undefined;\n after?: SerializedEdgeDef | undefined;\n}>;\n\n// ============================================================\n// Ontology Changes\n// ============================================================\n\n/**\n * A change to the ontology.\n */\nexport type OntologyChange = Readonly<{\n type: ChangeType;\n entity: \"metaEdge\" | \"relation\";\n name: string;\n severity: ChangeSeverity;\n details: string;\n}>;\n\n// ============================================================\n// Schema Diff\n// ============================================================\n\n/**\n * A complete diff between two schema versions.\n */\nexport type SchemaDiff = Readonly<{\n fromVersion: number;\n toVersion: number;\n\n /** Changes to node definitions */\n nodes: readonly NodeChange[];\n\n /** Changes to edge definitions */\n edges: readonly EdgeChange[];\n\n /** Changes to ontology */\n ontology: readonly OntologyChange[];\n\n /** Whether any breaking changes exist */\n hasBreakingChanges: boolean;\n\n /** Whether the change is backwards compatible (no breaking changes) */\n isBackwardsCompatible: boolean;\n\n /** Whether any changes exist at all */\n hasChanges: boolean;\n\n /** Summary of changes */\n summary: string;\n}>;\n\n// ============================================================\n// Diff Computation\n// ============================================================\n\n/**\n * Computes the diff between two schema versions.\n *\n * @param before - The previous schema version\n * @param after - The new schema version\n * @returns A diff describing all changes\n */\nexport function computeSchemaDiff(\n before: SerializedSchema,\n after: SerializedSchema,\n): SchemaDiff {\n const nodeChanges = diffNodes(before.nodes, after.nodes);\n const edgeChanges = diffEdges(before.edges, after.edges);\n const ontologyChanges = diffOntology(before.ontology, after.ontology);\n\n const allChanges = [...nodeChanges, ...edgeChanges, ...ontologyChanges];\n const hasBreakingChanges = allChanges.some(\n (change) => change.severity === \"breaking\",\n );\n const hasChanges = allChanges.length > 0;\n\n const summary = generateSummary(nodeChanges, edgeChanges, ontologyChanges);\n\n return {\n fromVersion: before.version,\n toVersion: after.version,\n nodes: nodeChanges,\n edges: edgeChanges,\n ontology: ontologyChanges,\n hasBreakingChanges,\n isBackwardsCompatible: !hasBreakingChanges,\n hasChanges,\n summary,\n };\n}\n\n// ============================================================\n// Node Diff\n// ============================================================\n\n/**\n * Computes changes between node definitions.\n */\nfunction diffNodes(\n before: Record<string, SerializedNodeDef>,\n after: Record<string, SerializedNodeDef>,\n): readonly NodeChange[] {\n const changes: NodeChange[] = [];\n const beforeNames = new Set(Object.keys(before));\n const afterNames = new Set(Object.keys(after));\n\n // Find removed nodes\n for (const name of beforeNames) {\n if (!afterNames.has(name)) {\n changes.push({\n type: \"removed\",\n kind: name,\n severity: \"breaking\",\n details: `Node kind \"${name}\" was removed`,\n before: before[name],\n });\n }\n }\n\n // Find added nodes\n for (const name of afterNames) {\n if (!beforeNames.has(name)) {\n changes.push({\n type: \"added\",\n kind: name,\n severity: \"safe\",\n details: `Node kind \"${name}\" was added`,\n after: after[name],\n });\n }\n }\n\n // Find modified nodes\n for (const name of beforeNames) {\n if (afterNames.has(name)) {\n const nodeBefore = before[name]!;\n const nodeAfter = after[name]!;\n const nodeChanges = diffNodeDef(name, nodeBefore, nodeAfter);\n changes.push(...nodeChanges);\n }\n }\n\n return changes;\n}\n\n/**\n * Computes changes to a single node definition.\n */\nfunction diffNodeDef(\n name: string,\n before: SerializedNodeDef,\n after: SerializedNodeDef,\n): readonly NodeChange[] {\n const changes: NodeChange[] = [];\n\n // Check property schema changes\n const propsBefore = JSON.stringify(before.properties);\n const propsAfter = JSON.stringify(after.properties);\n if (propsBefore !== propsAfter) {\n // Determine if properties were added or removed\n const beforeProps = before.properties.properties ?? {};\n const afterProps = after.properties.properties ?? {};\n const beforeRequired = new Set(before.properties.required);\n const afterRequired = new Set(after.properties.required);\n\n const addedProps = Object.keys(afterProps).filter(\n (p) => !(p in beforeProps),\n );\n const removedProps = Object.keys(beforeProps).filter(\n (p) => !(p in afterProps),\n );\n const newRequired = [...afterRequired].filter(\n (p) => !beforeRequired.has(p),\n );\n\n const { severity, details } = computePropertyChangeSeverity(\n name,\n removedProps,\n addedProps,\n newRequired,\n );\n\n changes.push({\n type: \"modified\",\n kind: name,\n severity,\n details,\n before,\n after,\n });\n }\n\n // Check onDelete behavior\n if (before.onDelete !== after.onDelete) {\n changes.push({\n type: \"modified\",\n kind: name,\n severity: \"warning\",\n details: `onDelete changed from \"${before.onDelete}\" to \"${after.onDelete}\" for \"${name}\"`,\n before,\n after,\n });\n }\n\n // Check unique constraints\n const constraintsBefore = JSON.stringify(before.uniqueConstraints);\n const constraintsAfter = JSON.stringify(after.uniqueConstraints);\n if (constraintsBefore !== constraintsAfter) {\n changes.push({\n type: \"modified\",\n kind: name,\n severity: \"warning\",\n details: `Unique constraints changed for \"${name}\"`,\n before,\n after,\n });\n }\n\n return changes;\n}\n\n/**\n * Computes the severity and details message for property changes.\n */\nfunction computePropertyChangeSeverity(\n name: string,\n removedProps: readonly string[],\n addedProps: readonly string[],\n newRequired: readonly string[],\n): { severity: ChangeSeverity; details: string } {\n if (removedProps.length > 0) {\n return {\n severity: \"breaking\",\n details: `Properties removed from \"${name}\": ${removedProps.join(\", \")}`,\n };\n }\n if (newRequired.length > 0) {\n return {\n severity: \"breaking\",\n details: `New required properties in \"${name}\": ${newRequired.join(\", \")}`,\n };\n }\n if (addedProps.length > 0) {\n return {\n severity: \"safe\",\n details: `Properties added to \"${name}\": ${addedProps.join(\", \")}`,\n };\n }\n return {\n severity: \"safe\",\n details: `Properties changed in \"${name}\"`,\n };\n}\n\n// ============================================================\n// Edge Diff\n// ============================================================\n\n/**\n * Computes changes between edge definitions.\n */\nfunction diffEdges(\n before: Record<string, SerializedEdgeDef>,\n after: Record<string, SerializedEdgeDef>,\n): readonly EdgeChange[] {\n const changes: EdgeChange[] = [];\n const beforeNames = new Set(Object.keys(before));\n const afterNames = new Set(Object.keys(after));\n\n // Find removed edges\n for (const name of beforeNames) {\n if (!afterNames.has(name)) {\n changes.push({\n type: \"removed\",\n kind: name,\n severity: \"breaking\",\n details: `Edge kind \"${name}\" was removed`,\n before: before[name],\n });\n }\n }\n\n // Find added edges\n for (const name of afterNames) {\n if (!beforeNames.has(name)) {\n changes.push({\n type: \"added\",\n kind: name,\n severity: \"safe\",\n details: `Edge kind \"${name}\" was added`,\n after: after[name],\n });\n }\n }\n\n // Find modified edges\n for (const name of beforeNames) {\n if (afterNames.has(name)) {\n const edgeBefore = before[name]!;\n const edgeAfter = after[name]!;\n const edgeChanges = diffEdgeDef(name, edgeBefore, edgeAfter);\n changes.push(...edgeChanges);\n }\n }\n\n return changes;\n}\n\n/**\n * Computes changes to a single edge definition.\n */\nfunction diffEdgeDef(\n name: string,\n before: SerializedEdgeDef,\n after: SerializedEdgeDef,\n): readonly EdgeChange[] {\n const changes: EdgeChange[] = [];\n\n // Check endpoint kinds\n const fromBefore = JSON.stringify(before.fromKinds);\n const fromAfter = JSON.stringify(after.fromKinds);\n if (fromBefore !== fromAfter) {\n changes.push({\n type: \"modified\",\n kind: name,\n severity: \"warning\",\n details: `fromKinds changed for \"${name}\"`,\n before,\n after,\n });\n }\n\n const toBefore = JSON.stringify(before.toKinds);\n const toAfter = JSON.stringify(after.toKinds);\n if (toBefore !== toAfter) {\n changes.push({\n type: \"modified\",\n kind: name,\n severity: \"warning\",\n details: `toKinds changed for \"${name}\"`,\n before,\n after,\n });\n }\n\n // Check cardinality\n if (before.cardinality !== after.cardinality) {\n changes.push({\n type: \"modified\",\n kind: name,\n severity: \"warning\",\n details: `Cardinality changed from \"${before.cardinality}\" to \"${after.cardinality}\" for \"${name}\"`,\n before,\n after,\n });\n }\n\n // Check properties\n const propsBefore = JSON.stringify(before.properties);\n const propsAfter = JSON.stringify(after.properties);\n if (propsBefore !== propsAfter) {\n changes.push({\n type: \"modified\",\n kind: name,\n severity: \"safe\",\n details: `Properties changed for \"${name}\"`,\n before,\n after,\n });\n }\n\n return changes;\n}\n\n// ============================================================\n// Ontology Diff\n// ============================================================\n\n/**\n * Computes changes to the ontology.\n */\nfunction diffOntology(\n before: SerializedOntology,\n after: SerializedOntology,\n): readonly OntologyChange[] {\n const changes: OntologyChange[] = [];\n\n // Diff meta-edges\n const metaEdgesBefore = new Set(Object.keys(before.metaEdges));\n const metaEdgesAfter = new Set(Object.keys(after.metaEdges));\n\n for (const name of metaEdgesBefore) {\n if (!metaEdgesAfter.has(name)) {\n changes.push({\n type: \"removed\",\n entity: \"metaEdge\",\n name,\n severity: \"breaking\",\n details: `Meta-edge \"${name}\" was removed`,\n });\n }\n }\n\n for (const name of metaEdgesAfter) {\n if (!metaEdgesBefore.has(name)) {\n changes.push({\n type: \"added\",\n entity: \"metaEdge\",\n name,\n severity: \"safe\",\n details: `Meta-edge \"${name}\" was added`,\n });\n }\n }\n\n // Diff relations (simplified - just detect additions/removals)\n const relationsBefore = new Set(\n before.relations.map((r) => `${r.metaEdge}:${r.from}:${r.to}`),\n );\n const relationsAfter = new Set(\n after.relations.map((r) => `${r.metaEdge}:${r.from}:${r.to}`),\n );\n\n for (const relationKey of relationsBefore) {\n if (!relationsAfter.has(relationKey)) {\n const [metaEdge, from, to] = relationKey.split(\":\");\n changes.push({\n type: \"removed\",\n entity: \"relation\",\n name: relationKey,\n severity: \"warning\",\n details: `Relation ${metaEdge}(${from}, ${to}) was removed`,\n });\n }\n }\n\n for (const relationKey of relationsAfter) {\n if (!relationsBefore.has(relationKey)) {\n const [metaEdge, from, to] = relationKey.split(\":\");\n changes.push({\n type: \"added\",\n entity: \"relation\",\n name: relationKey,\n severity: \"safe\",\n details: `Relation ${metaEdge}(${from}, ${to}) was added`,\n });\n }\n }\n\n return changes;\n}\n\n// ============================================================\n// Summary Generation\n// ============================================================\n\n/**\n * Generates a human-readable summary of changes.\n */\nfunction generateSummary(\n nodeChanges: readonly NodeChange[],\n edgeChanges: readonly EdgeChange[],\n ontologyChanges: readonly OntologyChange[],\n): string {\n const parts: string[] = [];\n\n const nodeAdded = nodeChanges.filter((c) => c.type === \"added\").length;\n const nodeRemoved = nodeChanges.filter((c) => c.type === \"removed\").length;\n const nodeModified = nodeChanges.filter((c) => c.type === \"modified\").length;\n\n if (nodeAdded > 0 || nodeRemoved > 0 || nodeModified > 0) {\n parts.push(\n `Nodes: ${nodeAdded} added, ${nodeRemoved} removed, ${nodeModified} modified`,\n );\n }\n\n const edgeAdded = edgeChanges.filter((c) => c.type === \"added\").length;\n const edgeRemoved = edgeChanges.filter((c) => c.type === \"removed\").length;\n const edgeModified = edgeChanges.filter((c) => c.type === \"modified\").length;\n\n if (edgeAdded > 0 || edgeRemoved > 0 || edgeModified > 0) {\n parts.push(\n `Edges: ${edgeAdded} added, ${edgeRemoved} removed, ${edgeModified} modified`,\n );\n }\n\n const ontologyAdded = ontologyChanges.filter(\n (c) => c.type === \"added\",\n ).length;\n const ontologyRemoved = ontologyChanges.filter(\n (c) => c.type === \"removed\",\n ).length;\n\n if (ontologyAdded > 0 || ontologyRemoved > 0) {\n parts.push(`Ontology: ${ontologyAdded} added, ${ontologyRemoved} removed`);\n }\n\n if (parts.length === 0) {\n return \"No changes\";\n }\n\n return parts.join(\"; \");\n}\n\n// ============================================================\n// Migration Helpers\n// ============================================================\n\n/**\n * Checks if a schema change is backwards compatible.\n *\n * A change is backwards compatible if:\n * - No nodes or edges were removed\n * - No required properties were added\n * - No existing properties were removed\n */\nexport function isBackwardsCompatible(diff: SchemaDiff): boolean {\n return !diff.hasBreakingChanges;\n}\n\n/**\n * Gets a list of actions needed for migration.\n */\nexport function getMigrationActions(diff: SchemaDiff): readonly string[] {\n const actions: string[] = [];\n\n for (const change of diff.nodes) {\n if (change.type === \"removed\") {\n actions.push(`DELETE data for removed node kind \"${change.kind}\"`);\n }\n if (change.severity === \"breaking\" && change.type === \"modified\") {\n actions.push(\n `MIGRATE data for node kind \"${change.kind}\": ${change.details}`,\n );\n }\n }\n\n for (const change of diff.edges) {\n if (change.type === \"removed\") {\n actions.push(`DELETE data for removed edge kind \"${change.kind}\"`);\n }\n }\n\n return actions;\n}\n","/**\n * Transitive closure computation for ontology relationships.\n *\n * Uses Warshall's algorithm for efficient closure computation.\n */\n\n/**\n * Computes the transitive closure of a set of directed relations.\n *\n * Given relations like [A→B, B→C], computes all transitive paths [A→B, A→C, B→C].\n *\n * @param relations - Array of [from, to] pairs representing direct relationships\n * @returns Map from each 'from' to the set of all reachable 'to' values\n */\nexport function computeTransitiveClosure(\n relations: readonly (readonly [string, string])[],\n): ReadonlyMap<string, ReadonlySet<string>> {\n // Build mutable map for computation\n const closure = new Map<string, Set<string>>();\n\n // Collect all nodes\n const allNodes = new Set<string>();\n for (const [from, to] of relations) {\n allNodes.add(from);\n allNodes.add(to);\n }\n\n // Initialize empty sets for all nodes\n for (const node of allNodes) {\n closure.set(node, new Set());\n }\n\n // Add direct relationships\n for (const [from, to] of relations) {\n closure.get(from)?.add(to);\n }\n\n // Warshall's algorithm for transitive closure\n // For each intermediate node k, check if i→k and k→j implies i→j\n for (const k of allNodes) {\n for (const index of allNodes) {\n const indexReaches = closure.get(index);\n if (!indexReaches?.has(k)) continue;\n\n const kReaches = closure.get(k);\n if (!kReaches) continue;\n\n for (const index of kReaches) {\n indexReaches.add(index);\n }\n }\n }\n\n return closure;\n}\n\n/**\n * Computes the inverse of a transitive closure map.\n *\n * Given A→{B, C}, returns B→{A}, C→{A}.\n */\nexport function invertClosure(\n closure: ReadonlyMap<string, ReadonlySet<string>>,\n): ReadonlyMap<string, ReadonlySet<string>> {\n const result = new Map<string, Set<string>>();\n\n for (const [from, tos] of closure) {\n for (const to of tos) {\n const existing = result.get(to) ?? new Set();\n existing.add(from);\n result.set(to, existing);\n }\n }\n\n return result;\n}\n\n/**\n * Checks if there's a path from source to target in the closure.\n */\nexport function isReachable(\n closure: ReadonlyMap<string, ReadonlySet<string>>,\n source: string,\n target: string,\n): boolean {\n return closure.get(source)?.has(target) ?? false;\n}\n","/**\n * Named constants for ontology meta-edge names.\n *\n * Use these constants instead of string literals for type safety\n * and IDE support.\n */\n\n// ============================================================\n// Meta-Edge Names\n// ============================================================\n\n/** Type inheritance (Podcast subClassOf Media) */\nexport const META_EDGE_SUB_CLASS_OF = \"subClassOf\" as const;\n\n/** Broader concept (ML broader AI) */\nexport const META_EDGE_BROADER = \"broader\" as const;\n\n/** Narrower concept (AI narrower ML) */\nexport const META_EDGE_NARROWER = \"narrower\" as const;\n\n/** Non-hierarchical association */\nexport const META_EDGE_RELATED_TO = \"relatedTo\" as const;\n\n/** Same class, different representation */\nexport const META_EDGE_EQUIVALENT_TO = \"equivalentTo\" as const;\n\n/** Same individual (for deduplication) */\nexport const META_EDGE_SAME_AS = \"sameAs\" as const;\n\n/** Explicitly different individuals */\nexport const META_EDGE_DIFFERENT_FROM = \"differentFrom\" as const;\n\n/** Mutually exclusive types */\nexport const META_EDGE_DISJOINT_WITH = \"disjointWith\" as const;\n\n/** X is part of Y */\nexport const META_EDGE_PART_OF = \"partOf\" as const;\n\n/** Y has part X */\nexport const META_EDGE_HAS_PART = \"hasPart\" as const;\n\n/** Edge A is inverse of edge B */\nexport const META_EDGE_INVERSE_OF = \"inverseOf\" as const;\n\n/** Edge A implies edge B exists */\nexport const META_EDGE_IMPLIES = \"implies\" as const;\n\n// ============================================================\n// All Meta-Edge Names (for validation)\n// ============================================================\n\nexport const ALL_META_EDGE_NAMES = [\n META_EDGE_SUB_CLASS_OF,\n META_EDGE_BROADER,\n META_EDGE_NARROWER,\n META_EDGE_RELATED_TO,\n META_EDGE_EQUIVALENT_TO,\n META_EDGE_SAME_AS,\n META_EDGE_DIFFERENT_FROM,\n META_EDGE_DISJOINT_WITH,\n META_EDGE_PART_OF,\n META_EDGE_HAS_PART,\n META_EDGE_INVERSE_OF,\n META_EDGE_IMPLIES,\n] as const;\n\nexport type MetaEdgeName = (typeof ALL_META_EDGE_NAMES)[number];\n","import { type AnyEdgeType, type NodeType } from \"../core/types\";\nimport {\n computeTransitiveClosure,\n invertClosure,\n isReachable,\n} from \"../ontology/closures\";\nimport {\n META_EDGE_BROADER,\n META_EDGE_DISJOINT_WITH,\n META_EDGE_EQUIVALENT_TO,\n META_EDGE_HAS_PART,\n META_EDGE_IMPLIES,\n META_EDGE_INVERSE_OF,\n META_EDGE_NARROWER,\n META_EDGE_PART_OF,\n META_EDGE_SAME_AS,\n META_EDGE_SUB_CLASS_OF,\n} from \"../ontology/constants\";\nimport { type OntologyRelation } from \"../ontology/types\";\n\n/**\n * KindRegistry holds precomputed closures for ontological reasoning.\n *\n * Computed at store initialization and cached for fast query-time lookups.\n */\nexport class KindRegistry {\n // === Node & Edge Kinds ===\n readonly nodeKinds: ReadonlyMap<string, NodeType>;\n readonly edgeKinds: ReadonlyMap<string, AnyEdgeType>;\n\n // === Subsumption (subClassOf) ===\n // Transitive closure for inheritance\n readonly subClassAncestors: ReadonlyMap<string, ReadonlySet<string>>;\n readonly subClassDescendants: ReadonlyMap<string, ReadonlySet<string>>;\n\n // === Hierarchy (broader/narrower) ===\n // Transitive closure for concept hierarchy (separate from subClassOf!)\n readonly broaderClosure: ReadonlyMap<string, ReadonlySet<string>>;\n readonly narrowerClosure: ReadonlyMap<string, ReadonlySet<string>>;\n\n // === Equivalence ===\n readonly equivalenceSets: ReadonlyMap<string, ReadonlySet<string>>;\n readonly iriToKind: ReadonlyMap<string, string>;\n\n // === Constraints ===\n readonly disjointPairs: ReadonlySet<string>; // Normalized pairs: \"Organization|Person\"\n\n // === Composition ===\n readonly partOfClosure: ReadonlyMap<string, ReadonlySet<string>>;\n readonly hasPartClosure: ReadonlyMap<string, ReadonlySet<string>>;\n\n // === Edge Relationships ===\n readonly edgeInverses: ReadonlyMap<string, string>;\n readonly edgeImplicationsClosure: ReadonlyMap<string, ReadonlySet<string>>;\n readonly edgeImplyingClosure: ReadonlyMap<string, ReadonlySet<string>>;\n\n constructor(\n nodeKinds: ReadonlyMap<string, NodeType>,\n edgeKinds: ReadonlyMap<string, AnyEdgeType>,\n closures: {\n subClassAncestors: ReadonlyMap<string, ReadonlySet<string>>;\n subClassDescendants: ReadonlyMap<string, ReadonlySet<string>>;\n broaderClosure: ReadonlyMap<string, ReadonlySet<string>>;\n narrowerClosure: ReadonlyMap<string, ReadonlySet<string>>;\n equivalenceSets: ReadonlyMap<string, ReadonlySet<string>>;\n iriToKind: ReadonlyMap<string, string>;\n disjointPairs: ReadonlySet<string>;\n partOfClosure: ReadonlyMap<string, ReadonlySet<string>>;\n hasPartClosure: ReadonlyMap<string, ReadonlySet<string>>;\n edgeInverses: ReadonlyMap<string, string>;\n edgeImplicationsClosure: ReadonlyMap<string, ReadonlySet<string>>;\n edgeImplyingClosure: ReadonlyMap<string, ReadonlySet<string>>;\n },\n ) {\n this.nodeKinds = nodeKinds;\n this.edgeKinds = edgeKinds;\n this.subClassAncestors = closures.subClassAncestors;\n this.subClassDescendants = closures.subClassDescendants;\n this.broaderClosure = closures.broaderClosure;\n this.narrowerClosure = closures.narrowerClosure;\n this.equivalenceSets = closures.equivalenceSets;\n this.iriToKind = closures.iriToKind;\n this.disjointPairs = closures.disjointPairs;\n this.partOfClosure = closures.partOfClosure;\n this.hasPartClosure = closures.hasPartClosure;\n this.edgeInverses = closures.edgeInverses;\n this.edgeImplicationsClosure = closures.edgeImplicationsClosure;\n this.edgeImplyingClosure = closures.edgeImplyingClosure;\n }\n\n // === Subsumption Methods ===\n\n /**\n * Checks if child is a subclass of parent (directly or transitively).\n */\n isSubClassOf(child: string, parent: string): boolean {\n return isReachable(this.subClassAncestors, child, parent);\n }\n\n /**\n * Expands a kind to include all its subclasses.\n * Returns [kind, ...subclasses].\n */\n expandSubClasses(kind: string): readonly string[] {\n const descendants = this.subClassDescendants.get(kind) ?? new Set();\n return [kind, ...descendants];\n }\n\n /**\n * Gets all ancestors of a kind (via subClassOf).\n */\n getAncestors(kind: string): ReadonlySet<string> {\n return this.subClassAncestors.get(kind) ?? new Set();\n }\n\n /**\n * Gets all descendants of a kind (via subClassOf).\n */\n getDescendants(kind: string): ReadonlySet<string> {\n return this.subClassDescendants.get(kind) ?? new Set();\n }\n\n // === Hierarchy Methods ===\n\n /**\n * Checks if narrowerConcept is narrower than broaderConcept.\n */\n isNarrowerThan(narrowerConcept: string, broaderConcept: string): boolean {\n return isReachable(this.broaderClosure, narrowerConcept, broaderConcept);\n }\n\n /**\n * Checks if broaderConcept is broader than narrowerConcept.\n */\n isBroaderThan(broaderConcept: string, narrowerConcept: string): boolean {\n return isReachable(this.narrowerClosure, broaderConcept, narrowerConcept);\n }\n\n /**\n * Expands to include all narrower concepts.\n */\n expandNarrower(kind: string): readonly string[] {\n const narrower = this.narrowerClosure.get(kind) ?? new Set();\n return [kind, ...narrower];\n }\n\n /**\n * Expands to include all broader concepts.\n */\n expandBroader(kind: string): readonly string[] {\n const broader = this.broaderClosure.get(kind) ?? new Set();\n return [kind, ...broader];\n }\n\n // === Equivalence Methods ===\n\n /**\n * Checks if two kinds are equivalent.\n */\n areEquivalent(a: string, b: string): boolean {\n const equivalents = this.equivalenceSets.get(a);\n return equivalents?.has(b) ?? false;\n }\n\n /**\n * Gets all equivalents of a kind (including external IRIs).\n */\n getEquivalents(kind: string): readonly string[] {\n const equivalents = this.equivalenceSets.get(kind);\n return equivalents ? [...equivalents] : [];\n }\n\n /**\n * Resolves an external IRI to an internal kind name.\n */\n resolveIri(iri: string): string | undefined {\n return this.iriToKind.get(iri);\n }\n\n // === Constraint Methods ===\n\n /**\n * Checks if two kinds are disjoint.\n */\n areDisjoint(a: string, b: string): boolean {\n const normalizedPair = a < b ? `${a}|${b}` : `${b}|${a}`;\n return this.disjointPairs.has(normalizedPair);\n }\n\n /**\n * Gets all kinds that are disjoint with the given kind.\n */\n getDisjointKinds(kind: string): readonly string[] {\n const result: string[] = [];\n for (const pair of this.disjointPairs) {\n const parts = pair.split(\"|\");\n const a = parts[0]!;\n const b = parts[1]!;\n if (a === kind) result.push(b);\n else if (b === kind) result.push(a);\n }\n return result;\n }\n\n // === Composition Methods ===\n\n /**\n * Checks if part is part of whole (directly or transitively).\n */\n isPartOf(part: string, whole: string): boolean {\n return isReachable(this.partOfClosure, part, whole);\n }\n\n /**\n * Gets all wholes that contain this part.\n */\n getWholes(part: string): readonly string[] {\n const wholes = this.partOfClosure.get(part);\n return wholes ? [...wholes] : [];\n }\n\n /**\n * Gets all parts of this whole.\n */\n getParts(whole: string): readonly string[] {\n const parts = this.hasPartClosure.get(whole);\n return parts ? [...parts] : [];\n }\n\n // === Edge Relationship Methods ===\n\n /**\n * Gets the inverse edge kind for a given edge kind.\n * If edgeA inverseOf edgeB, then getInverseEdge(\"edgeA\") returns \"edgeB\".\n */\n getInverseEdge(edgeKind: string): string | undefined {\n return this.edgeInverses.get(edgeKind);\n }\n\n /**\n * Gets all edges implied by a given edge (transitively).\n * If A implies B and B implies C, then getImpliedEdges(\"A\") returns [\"B\", \"C\"].\n */\n getImpliedEdges(edgeKind: string): readonly string[] {\n const implied = this.edgeImplicationsClosure.get(edgeKind);\n return implied ? [...implied] : [];\n }\n\n /**\n * Gets all edges that imply a given edge (transitively).\n * If A implies B and B implies C, then getImplyingEdges(\"C\") returns [\"A\", \"B\"].\n * Used for query-time expansion: when querying for C, also include A and B edges.\n */\n getImplyingEdges(edgeKind: string): readonly string[] {\n const implying = this.edgeImplyingClosure.get(edgeKind);\n return implying ? [...implying] : [];\n }\n\n /**\n * Expands an edge kind to include all edges that imply it.\n * Returns [edgeKind, ...implyingEdges].\n */\n expandImplyingEdges(edgeKind: string): readonly string[] {\n const implying = this.edgeImplyingClosure.get(edgeKind) ?? new Set();\n return [edgeKind, ...implying];\n }\n\n // === Edge Endpoint Validation ===\n\n /**\n * Checks if a concrete kind is assignable to a target kind.\n * Uses subsumption: Company is assignable to Organization if Company subClassOf Organization.\n */\n isAssignableTo(concreteKind: string, targetKind: string): boolean {\n if (concreteKind === targetKind) return true;\n return this.isSubClassOf(concreteKind, targetKind);\n }\n\n /**\n * Validates that a kind exists in the registry.\n */\n hasNodeType(name: string): boolean {\n return this.nodeKinds.has(name);\n }\n\n /**\n * Validates that an edge kind exists in the registry.\n */\n hasEdgeType(name: string): boolean {\n return this.edgeKinds.has(name);\n }\n\n /**\n * Gets a node kind by name.\n */\n getNodeType(name: string): NodeType | undefined {\n return this.nodeKinds.get(name);\n }\n\n /**\n * Gets an edge kind by name.\n */\n getEdgeType(name: string): AnyEdgeType | undefined {\n return this.edgeKinds.get(name);\n }\n}\n\n/**\n * Builder function to create empty closures.\n */\nexport function createEmptyClosures(): {\n subClassAncestors: ReadonlyMap<string, ReadonlySet<string>>;\n subClassDescendants: ReadonlyMap<string, ReadonlySet<string>>;\n broaderClosure: ReadonlyMap<string, ReadonlySet<string>>;\n narrowerClosure: ReadonlyMap<string, ReadonlySet<string>>;\n equivalenceSets: ReadonlyMap<string, ReadonlySet<string>>;\n iriToKind: ReadonlyMap<string, string>;\n disjointPairs: ReadonlySet<string>;\n partOfClosure: ReadonlyMap<string, ReadonlySet<string>>;\n hasPartClosure: ReadonlyMap<string, ReadonlySet<string>>;\n edgeInverses: ReadonlyMap<string, string>;\n edgeImplicationsClosure: ReadonlyMap<string, ReadonlySet<string>>;\n edgeImplyingClosure: ReadonlyMap<string, ReadonlySet<string>>;\n} {\n return {\n subClassAncestors: new Map(),\n subClassDescendants: new Map(),\n broaderClosure: new Map(),\n narrowerClosure: new Map(),\n equivalenceSets: new Map(),\n iriToKind: new Map(),\n disjointPairs: new Set(),\n partOfClosure: new Map(),\n hasPartClosure: new Map(),\n edgeInverses: new Map(),\n edgeImplicationsClosure: new Map(),\n edgeImplyingClosure: new Map(),\n };\n}\n\n/**\n * Computes all closures from an ontology.\n */\nexport function computeClosuresFromOntology(\n ontology: readonly OntologyRelation[],\n): {\n subClassAncestors: ReadonlyMap<string, ReadonlySet<string>>;\n subClassDescendants: ReadonlyMap<string, ReadonlySet<string>>;\n broaderClosure: ReadonlyMap<string, ReadonlySet<string>>;\n narrowerClosure: ReadonlyMap<string, ReadonlySet<string>>;\n equivalenceSets: ReadonlyMap<string, ReadonlySet<string>>;\n iriToKind: ReadonlyMap<string, string>;\n disjointPairs: ReadonlySet<string>;\n partOfClosure: ReadonlyMap<string, ReadonlySet<string>>;\n hasPartClosure: ReadonlyMap<string, ReadonlySet<string>>;\n edgeInverses: ReadonlyMap<string, string>;\n edgeImplicationsClosure: ReadonlyMap<string, ReadonlySet<string>>;\n edgeImplyingClosure: ReadonlyMap<string, ReadonlySet<string>>;\n} {\n // Collect relations by type\n const subClassRelations: [string, string][] = [];\n const broaderRelations: [string, string][] = [];\n const equivalentRelations: [string, string][] = [];\n const disjointRelations: [string, string][] = [];\n const partOfRelations: [string, string][] = [];\n const inverseOfRelations: [string, string][] = [];\n const impliesRelations: [string, string][] = [];\n\n for (const relation of ontology) {\n const fromName = getKindName(relation.from);\n const toName = getKindName(relation.to);\n\n switch (relation.metaEdge.name) {\n case META_EDGE_SUB_CLASS_OF: {\n subClassRelations.push([fromName, toName]);\n break;\n }\n case META_EDGE_BROADER: {\n broaderRelations.push([fromName, toName]);\n break;\n }\n case META_EDGE_NARROWER: {\n // narrower is inverse of broader\n broaderRelations.push([toName, fromName]);\n break;\n }\n case META_EDGE_EQUIVALENT_TO:\n case META_EDGE_SAME_AS: {\n equivalentRelations.push([fromName, toName]);\n break;\n }\n case META_EDGE_DISJOINT_WITH: {\n disjointRelations.push([fromName, toName]);\n break;\n }\n case META_EDGE_PART_OF: {\n partOfRelations.push([fromName, toName]);\n break;\n }\n case META_EDGE_HAS_PART: {\n // hasPart is inverse of partOf\n partOfRelations.push([toName, fromName]);\n break;\n }\n case META_EDGE_INVERSE_OF: {\n // inverseOf is symmetric: if A inverseOf B, then B inverseOf A\n inverseOfRelations.push([fromName, toName]);\n break;\n }\n case META_EDGE_IMPLIES: {\n impliesRelations.push([fromName, toName]);\n break;\n }\n }\n }\n\n // Compute subClassOf closures\n const subClassAncestors = computeTransitiveClosure(subClassRelations);\n const subClassDescendants = invertClosure(subClassAncestors);\n\n // Compute broader/narrower closures\n const broaderClosure = computeTransitiveClosure(broaderRelations);\n const narrowerClosure = invertClosure(broaderClosure);\n\n // Compute equivalence sets and IRI mappings\n const equivalenceSets = computeEquivalenceSets(equivalentRelations);\n const iriToKind = computeIriMapping(equivalentRelations);\n\n // Compute disjoint pairs (normalize for symmetric lookup)\n const disjointPairs = computeDisjointPairs(disjointRelations);\n\n // Compute partOf closures\n const partOfClosure = computeTransitiveClosure(partOfRelations);\n const hasPartClosure = invertClosure(partOfClosure);\n\n // Compute edge inverses (symmetric: store both directions)\n const edgeInverses = computeEdgeInverses(inverseOfRelations);\n\n // Compute edge implications closure (transitive)\n // edgeImplicationsClosure: A -> [B, C] means A implies B and C\n // edgeImplyingClosure: C -> [A, B] means A and B imply C (inverse direction)\n const edgeImplicationsClosure = computeTransitiveClosure(impliesRelations);\n const edgeImplyingClosure = invertClosure(edgeImplicationsClosure);\n\n return {\n subClassAncestors,\n subClassDescendants,\n broaderClosure,\n narrowerClosure,\n equivalenceSets,\n iriToKind,\n disjointPairs,\n partOfClosure,\n hasPartClosure,\n edgeInverses,\n edgeImplicationsClosure,\n edgeImplyingClosure,\n };\n}\n\n/**\n * Gets the name from a NodeType, EdgeType, or string.\n */\nfunction getKindName(kindOrIri: NodeType | AnyEdgeType | string): string {\n if (typeof kindOrIri === \"string\") {\n return kindOrIri;\n }\n return kindOrIri.kind;\n}\n\n/**\n * Checks if a string is an external IRI (not a local kind name).\n */\nfunction isExternalIri(value: string): boolean {\n return value.startsWith(\"http://\") || value.startsWith(\"https://\");\n}\n\n/**\n * Computes equivalence sets (symmetric + transitive closure).\n */\nfunction computeEquivalenceSets(\n relations: readonly (readonly [string, string])[],\n): ReadonlyMap<string, ReadonlySet<string>> {\n // Use union-find to compute equivalence classes\n const parent = new Map<string, string>();\n\n function find(x: string): string {\n if (!parent.has(x)) {\n parent.set(x, x);\n return x;\n }\n // Safe: has() check above guarantees key exists\n const p = parent.get(x)!;\n if (p === x) return x;\n const root = find(p);\n parent.set(x, root); // Path compression\n return root;\n }\n\n function union(a: string, b: string): void {\n const rootA = find(a);\n const rootB = find(b);\n if (rootA !== rootB) {\n parent.set(rootA, rootB);\n }\n }\n\n // Build equivalence classes\n for (const [a, b] of relations) {\n union(a, b);\n }\n\n // Collect all members of each equivalence class\n const classes = new Map<string, Set<string>>();\n for (const key of parent.keys()) {\n const root = find(key);\n const existing = classes.get(root) ?? new Set();\n existing.add(key);\n classes.set(root, existing);\n }\n\n // Build result: each node maps to its equivalence set\n const result = new Map<string, ReadonlySet<string>>();\n for (const members of classes.values()) {\n for (const member of members) {\n // Exclude self from equivalence set\n const others = new Set(members);\n others.delete(member);\n result.set(member, others);\n }\n }\n\n return result;\n}\n\n/**\n * Computes mapping from external IRIs to internal kind names.\n */\nfunction computeIriMapping(\n relations: readonly (readonly [string, string])[],\n): ReadonlyMap<string, string> {\n const result = new Map<string, string>();\n\n for (const [a, b] of relations) {\n // If one is an IRI and other is a kind name, map IRI → kind\n if (isExternalIri(a) && !isExternalIri(b)) {\n result.set(a, b);\n } else if (isExternalIri(b) && !isExternalIri(a)) {\n result.set(b, a);\n }\n }\n\n return result;\n}\n\n/**\n * Computes normalized disjoint pairs.\n */\nfunction computeDisjointPairs(\n relations: readonly (readonly [string, string])[],\n): ReadonlySet<string> {\n const result = new Set<string>();\n\n for (const [a, b] of relations) {\n // Normalize pair for consistent lookup\n const normalized = a < b ? `${a}|${b}` : `${b}|${a}`;\n result.add(normalized);\n }\n\n return result;\n}\n\n/**\n * Computes edge inverse mapping (symmetric: stores both directions).\n */\nfunction computeEdgeInverses(\n relations: readonly (readonly [string, string])[],\n): ReadonlyMap<string, string> {\n const result = new Map<string, string>();\n\n for (const [a, b] of relations) {\n // inverseOf is symmetric: A inverseOf B means B inverseOf A too\n result.set(a, b);\n result.set(b, a);\n }\n\n return result;\n}\n","/**\n * Schema serializer for homoiconic storage.\n *\n * Converts a GraphDef to a SerializedSchema for database storage.\n * Uses Zod's toJSONSchema() for property schema serialization.\n */\nimport { z } from \"zod\";\n\nimport {\n getEdgeKinds,\n getNodeKinds,\n type GraphDef,\n} from \"../core/define-graph\";\nimport {\n type EdgeRegistration,\n type NodeRegistration,\n type UniqueConstraint,\n} from \"../core/types\";\nimport {\n getTypeName,\n type MetaEdge,\n type OntologyRelation,\n} from \"../ontology/types\";\nimport { computeClosuresFromOntology } from \"../registry/kind-registry\";\nimport { nowIso } from \"../utils/date\";\nimport {\n type JsonSchema,\n type SchemaHash,\n type SerializedClosures,\n type SerializedEdgeDef,\n type SerializedMetaEdge,\n type SerializedNodeDef,\n type SerializedOntology,\n type SerializedOntologyRelation,\n type SerializedSchema,\n type SerializedUniqueConstraint,\n} from \"./types\";\n\n// ============================================================\n// Main Serialization\n// ============================================================\n\n/**\n * Serializes a GraphDef to a SerializedSchema.\n *\n * @param graph - The graph definition to serialize\n * @param version - The schema version number\n * @returns The serialized schema\n */\nexport function serializeSchema<G extends GraphDef>(\n graph: G,\n version: number,\n): SerializedSchema {\n const nodes = serializeNodes(graph);\n const edges = serializeEdges(graph);\n const ontology = serializeOntology(graph.ontology);\n\n return {\n graphId: graph.id,\n version,\n generatedAt: nowIso(),\n nodes,\n edges,\n ontology,\n defaults: {\n onNodeDelete: graph.defaults.onNodeDelete,\n temporalMode: graph.defaults.temporalMode,\n },\n };\n}\n\n// ============================================================\n// Node Serialization\n// ============================================================\n\n/**\n * Serializes all node definitions.\n */\nfunction serializeNodes<G extends GraphDef>(\n graph: G,\n): Record<string, SerializedNodeDef> {\n const result: Record<string, SerializedNodeDef> = {};\n\n for (const kindName of getNodeKinds(graph)) {\n const registration = graph.nodes[kindName];\n if (registration === undefined) continue;\n result[kindName] = serializeNodeDef(registration);\n }\n\n return result;\n}\n\n/**\n * Serializes a single node registration.\n */\nfunction serializeNodeDef(registration: NodeRegistration): SerializedNodeDef {\n const node = registration.type;\n\n return {\n kind: node.kind,\n properties: serializeZodSchema(node.schema),\n uniqueConstraints: serializeUniqueConstraints(registration.unique ?? []),\n onDelete: registration.onDelete ?? \"restrict\",\n description: node.description,\n };\n}\n\n/**\n * Serializes unique constraints.\n */\nfunction serializeUniqueConstraints(\n constraints: readonly UniqueConstraint[],\n): readonly SerializedUniqueConstraint[] {\n return constraints.map((constraint) => ({\n name: constraint.name,\n fields: [...constraint.fields],\n where:\n constraint.where ? serializeWherePredicate(constraint.where) : undefined,\n scope: constraint.scope,\n collation: constraint.collation,\n }));\n}\n\n/**\n * A serialized predicate structure (matches UniqueConstraintPredicate from core/types).\n */\ntype SerializedPredicate = Readonly<{\n __type: \"unique_predicate\";\n field: string;\n op: \"isNull\" | \"isNotNull\";\n}>;\n\n/**\n * Field builder returned by the predicate proxy.\n */\ntype FieldPredicateBuilder = Readonly<{\n isNull: () => SerializedPredicate;\n isNotNull: () => SerializedPredicate;\n}>;\n\n/**\n * Predicate builder type for where clause serialization.\n */\ntype PredicateBuilder = Readonly<Record<string, FieldPredicateBuilder>>;\n\n/**\n * Serializes a where predicate function to a JSON-serializable structure.\n *\n * The where function is called with a proxy builder that captures the\n * field and operation, which can then be serialized and later deserialized.\n */\nfunction serializeWherePredicate(\n whereFunction: (builder: PredicateBuilder) => SerializedPredicate,\n): string {\n // Create a proxy builder that captures the predicate structure\n const builder = new Proxy({} as PredicateBuilder, {\n get(_target, field: string): FieldPredicateBuilder {\n return {\n isNull: (): SerializedPredicate => ({\n __type: \"unique_predicate\",\n field,\n op: \"isNull\" as const,\n }),\n isNotNull: (): SerializedPredicate => ({\n __type: \"unique_predicate\",\n field,\n op: \"isNotNull\" as const,\n }),\n };\n },\n });\n\n // Call the where function to get the predicate\n const predicate = whereFunction(builder);\n\n // Serialize the predicate structure as JSON\n return JSON.stringify({ field: predicate.field, op: predicate.op });\n}\n\n/**\n * Deserializes a where predicate JSON back to a predicate function.\n *\n * This can be used to reconstruct a UniqueConstraint's where clause\n * from a serialized schema.\n *\n * @param serialized - The JSON string from serialization\n * @returns A where function that returns the predicate structure\n */\n/**\n * Unique predicate result type.\n */\ntype UniquePredicate = Readonly<{\n __type: \"unique_predicate\";\n field: string;\n op: \"isNull\" | \"isNotNull\";\n}>;\n\nexport function deserializeWherePredicate(\n serialized: string,\n): (builder: PredicateBuilder) => UniquePredicate {\n const parsed = JSON.parse(serialized) as {\n field: string;\n op: \"isNull\" | \"isNotNull\";\n };\n\n return (builder: PredicateBuilder): UniquePredicate => {\n const fieldBuilder = builder[parsed.field];\n if (!fieldBuilder) {\n throw new Error(`Unknown field in where predicate: ${parsed.field}`);\n }\n\n const result =\n parsed.op === \"isNull\" ? fieldBuilder.isNull() : fieldBuilder.isNotNull();\n return {\n __type: \"unique_predicate\",\n field: result.field,\n op: result.op,\n };\n };\n}\n\n// ============================================================\n// Edge Serialization\n// ============================================================\n\n/**\n * Serializes all edge definitions.\n */\nfunction serializeEdges<G extends GraphDef>(\n graph: G,\n): Record<string, SerializedEdgeDef> {\n const result: Record<string, SerializedEdgeDef> = {};\n\n for (const kindName of getEdgeKinds(graph)) {\n const registration = graph.edges[kindName];\n if (registration === undefined) continue;\n result[kindName] = serializeEdgeDef(registration);\n }\n\n return result;\n}\n\n/**\n * Serializes a single edge registration.\n */\nfunction serializeEdgeDef(registration: EdgeRegistration): SerializedEdgeDef {\n const edge = registration.type;\n\n return {\n kind: edge.kind,\n fromKinds: registration.from.map((node) => node.kind),\n toKinds: registration.to.map((node) => node.kind),\n properties: serializeZodSchema(edge.schema),\n cardinality: registration.cardinality ?? \"many\",\n endpointExistence: registration.endpointExistence ?? \"notDeleted\",\n description: edge.description,\n };\n}\n\n// ============================================================\n// Ontology Serialization\n// ============================================================\n\n/**\n * Serializes the complete ontology.\n */\nfunction serializeOntology(\n relations: readonly OntologyRelation[],\n): SerializedOntology {\n // Collect unique meta-edges\n const metaEdgeMap = new Map<string, MetaEdge>();\n for (const relation of relations) {\n const metaEdge = relation.metaEdge;\n if (!metaEdgeMap.has(metaEdge.name)) {\n metaEdgeMap.set(metaEdge.name, metaEdge);\n }\n }\n\n // Serialize meta-edges\n const metaEdges: Record<string, SerializedMetaEdge> = {};\n for (const [name, metaEdge] of metaEdgeMap) {\n metaEdges[name] = serializeMetaEdge(metaEdge);\n }\n\n // Serialize relations\n const serializedRelations = relations.map((relation) =>\n serializeOntologyRelation(relation),\n );\n\n // Compute and serialize closures\n const closures = serializeClosures(relations);\n\n return {\n metaEdges,\n relations: serializedRelations,\n closures,\n };\n}\n\n/**\n * Serializes a meta-edge.\n */\nfunction serializeMetaEdge(metaEdge: MetaEdge): SerializedMetaEdge {\n return {\n name: metaEdge.name,\n transitive: metaEdge.properties.transitive,\n symmetric: metaEdge.properties.symmetric,\n reflexive: metaEdge.properties.reflexive,\n inverse: metaEdge.properties.inverse,\n inference: metaEdge.properties.inference,\n description: metaEdge.properties.description,\n };\n}\n\n/**\n * Serializes an ontology relation.\n */\nfunction serializeOntologyRelation(\n relation: OntologyRelation,\n): SerializedOntologyRelation {\n return {\n metaEdge: relation.metaEdge.name,\n from: getTypeName(relation.from),\n to: getTypeName(relation.to),\n };\n}\n\n/**\n * Serializes precomputed closures.\n */\nfunction serializeClosures(\n relations: readonly OntologyRelation[],\n): SerializedClosures {\n if (relations.length === 0) {\n return {\n subClassAncestors: {},\n subClassDescendants: {},\n broaderClosure: {},\n narrowerClosure: {},\n equivalenceSets: {},\n disjointPairs: [],\n partOfClosure: {},\n hasPartClosure: {},\n iriToKind: {},\n edgeInverses: {},\n edgeImplicationsClosure: {},\n edgeImplyingClosure: {},\n };\n }\n\n const computed = computeClosuresFromOntology(relations);\n\n return {\n subClassAncestors: mapToRecord(computed.subClassAncestors),\n subClassDescendants: mapToRecord(computed.subClassDescendants),\n broaderClosure: mapToRecord(computed.broaderClosure),\n narrowerClosure: mapToRecord(computed.narrowerClosure),\n equivalenceSets: mapToRecord(computed.equivalenceSets),\n disjointPairs: [...computed.disjointPairs],\n partOfClosure: mapToRecord(computed.partOfClosure),\n hasPartClosure: mapToRecord(computed.hasPartClosure),\n iriToKind: mapToSimpleRecord(computed.iriToKind),\n edgeInverses: mapToSimpleRecord(computed.edgeInverses),\n edgeImplicationsClosure: mapToRecord(computed.edgeImplicationsClosure),\n edgeImplyingClosure: mapToRecord(computed.edgeImplyingClosure),\n };\n}\n\n/**\n * Converts a Map<string, Set<string>> to Record<string, string[]>.\n */\nfunction mapToRecord(\n map: ReadonlyMap<string, ReadonlySet<string>>,\n): Record<string, readonly string[]> {\n const result: Record<string, readonly string[]> = {};\n for (const [key, values] of map) {\n result[key] = [...values];\n }\n return result;\n}\n\n/**\n * Converts a Map<string, string> to Record<string, string>.\n */\nfunction mapToSimpleRecord(\n map: ReadonlyMap<string, string>,\n): Record<string, string> {\n const result: Record<string, string> = {};\n for (const [key, value] of map) {\n result[key] = value;\n }\n return result;\n}\n\n// ============================================================\n// Zod Schema Serialization\n// ============================================================\n\n/**\n * Serializes a Zod schema to JSON Schema.\n *\n * Uses Zod 4's toJSONSchema() method for conversion.\n */\nfunction serializeZodSchema(schema: z.ZodType): JsonSchema {\n try {\n // Zod 4 has toJSONSchema as a standard export\n const jsonSchema = z.toJSONSchema(schema);\n return jsonSchema as JsonSchema;\n } catch {\n // Fallback for schemas that can't be converted\n return { type: \"object\" };\n }\n}\n\n// ============================================================\n// Schema Hashing\n// ============================================================\n\n/**\n * Computes a hash of the schema content for change detection.\n *\n * Excludes version and generatedAt since those change on every save.\n */\nexport async function computeSchemaHash(\n schema: SerializedSchema,\n): Promise<SchemaHash> {\n // Create a hashable representation excluding dynamic fields\n const hashable = {\n graphId: schema.graphId,\n nodes: schema.nodes,\n edges: schema.edges,\n ontology: schema.ontology,\n defaults: schema.defaults,\n };\n\n // Serialize with sorted keys for deterministic output\n const json = JSON.stringify(hashable, sortedReplacer);\n return sha256Hash(json);\n}\n\n/**\n * JSON replacer that sorts object keys for deterministic serialization.\n */\nfunction sortedReplacer(_key: string, value: unknown): unknown {\n if (value !== null && typeof value === \"object\" && !Array.isArray(value)) {\n const sorted: Record<string, unknown> = {};\n for (const key of Object.keys(value).toSorted()) {\n sorted[key] = (value as Record<string, unknown>)[key];\n }\n return sorted;\n }\n return value;\n}\n\n/**\n * Computes SHA-256 hash of a string using the Web Crypto API.\n *\n * Works in Node.js 16+, Cloudflare Workers, Deno, and browsers.\n * Returns first 16 hex characters (64 bits) for a compact but collision-resistant hash.\n */\nasync function sha256Hash(input: string): Promise<string> {\n const encoded = new TextEncoder().encode(input);\n const digest = await globalThis.crypto.subtle.digest(\"SHA-256\", encoded);\n const bytes = new Uint8Array(digest);\n // Only need 8 bytes (16 hex chars) — avoid converting the full 32-byte digest\n let hex = \"\";\n for (let index = 0; index < 8; index++) {\n const byte = bytes[index];\n if (byte === undefined) break;\n hex += byte.toString(16).padStart(2, \"0\");\n }\n return hex;\n}\n","/**\n * Schema manager for TypeGraph.\n *\n * Provides schema lifecycle management:\n * - Initialization on first store creation\n * - Validation on store open\n * - Auto-migration for safe changes\n * - Error reporting for breaking changes\n */\nimport { type GraphBackend, type SchemaVersionRow } from \"../backend/types\";\nimport { type GraphDef } from \"../core/define-graph\";\nimport { MigrationError } from \"../errors\";\nimport {\n computeSchemaDiff,\n getMigrationActions,\n isBackwardsCompatible,\n type SchemaDiff,\n} from \"./migration\";\nimport { computeSchemaHash, serializeSchema } from \"./serializer\";\nimport { type SerializedSchema } from \"./types\";\n\n// ============================================================\n// Types\n// ============================================================\n\n/**\n * Result of schema validation.\n */\nexport type SchemaValidationResult =\n | { status: \"initialized\"; version: number }\n | { status: \"unchanged\"; version: number }\n | {\n status: \"migrated\";\n fromVersion: number;\n toVersion: number;\n diff: SchemaDiff;\n }\n | { status: \"pending\"; version: number; diff: SchemaDiff }\n | { status: \"breaking\"; diff: SchemaDiff; actions: readonly string[] };\n\n/**\n * Context passed to migration lifecycle hooks.\n *\n * Hooks are intended for observability (logging, metrics, alerts),\n * not for data transformations. Use an explicit migration runner\n * for backfill scripts — see the schema evolution guide.\n */\nexport type MigrationHookContext = Readonly<{\n graphId: string;\n fromVersion: number;\n toVersion: number;\n diff: SchemaDiff;\n}>;\n\n/**\n * Options for schema management.\n */\nexport type SchemaManagerOptions = Readonly<{\n /** If true, auto-migrate safe changes. Default: true */\n autoMigrate?: boolean;\n /** If true, throw on breaking changes. Default: true */\n throwOnBreaking?: boolean;\n /** Called before a safe auto-migration is applied. For observability only. */\n onBeforeMigrate?: (context: MigrationHookContext) => void | Promise<void>;\n /** Called after a safe auto-migration is applied. For observability only. */\n onAfterMigrate?: (context: MigrationHookContext) => void | Promise<void>;\n}>;\n\n// ============================================================\n// Schema Manager\n// ============================================================\n\n/**\n * Ensures the schema is initialized and up-to-date.\n *\n * This is the main entry point for schema management. It:\n * 1. Initializes the schema if this is the first run (version 1)\n * 2. Returns \"unchanged\" if the schema matches the current graph\n * 3. Auto-migrates safe changes if autoMigrate is true\n * 4. Throws MigrationError for breaking changes if throwOnBreaking is true\n *\n * @param backend - The database backend\n * @param graph - The current graph definition\n * @param options - Schema management options\n * @returns The result of schema validation\n * @throws MigrationError if breaking changes detected and throwOnBreaking is true\n */\nexport async function ensureSchema<G extends GraphDef>(\n backend: GraphBackend,\n graph: G,\n options?: SchemaManagerOptions,\n): Promise<SchemaValidationResult> {\n const autoMigrate = options?.autoMigrate ?? true;\n const throwOnBreaking = options?.throwOnBreaking ?? true;\n\n // Get the active schema from the database\n const activeSchema = await backend.getActiveSchema(graph.id);\n\n if (!activeSchema) {\n // No schema exists - initialize with version 1\n const result = await initializeSchema(backend, graph);\n return { status: \"initialized\", version: result.version };\n }\n\n // Parse the stored schema\n const storedSchema = JSON.parse(activeSchema.schema_doc) as SerializedSchema;\n\n // Serialize the current graph for comparison\n const currentSchema = serializeSchema(graph, activeSchema.version + 1);\n\n // Quick hash check - if hashes match, schemas are identical\n const storedHash = activeSchema.schema_hash;\n const currentHash = await computeSchemaHash(currentSchema);\n\n if (storedHash === currentHash) {\n return { status: \"unchanged\", version: activeSchema.version };\n }\n\n // Hashes differ - compute the diff\n const diff = computeSchemaDiff(storedSchema, currentSchema);\n\n if (!diff.hasChanges) {\n // Hash changed but no semantic changes (shouldn't happen, but handle it)\n return { status: \"unchanged\", version: activeSchema.version };\n }\n\n // Check if changes are backwards compatible\n if (isBackwardsCompatible(diff)) {\n if (autoMigrate) {\n // Safe changes - auto-migrate\n const hookContext: MigrationHookContext = {\n graphId: graph.id,\n fromVersion: activeSchema.version,\n toVersion: activeSchema.version + 1,\n diff,\n };\n await options?.onBeforeMigrate?.(hookContext);\n const newVersion = await migrateSchema(\n backend,\n graph,\n activeSchema.version,\n );\n await options?.onAfterMigrate?.(hookContext);\n return {\n status: \"migrated\",\n fromVersion: activeSchema.version,\n toVersion: newVersion,\n diff,\n };\n }\n // Auto-migrate disabled but changes are safe\n return {\n status: \"pending\",\n version: activeSchema.version,\n diff,\n };\n }\n\n // Breaking changes detected\n const actions = getMigrationActions(diff);\n\n if (throwOnBreaking) {\n throw new MigrationError(\n `Schema migration required: ${diff.summary}. ` +\n `${actions.length} migration action(s) needed. ` +\n `Use getSchemaChanges() to review, then migrateSchema() to apply.`,\n {\n graphId: graph.id,\n fromVersion: activeSchema.version,\n toVersion: activeSchema.version + 1,\n },\n );\n }\n\n return { status: \"breaking\", diff, actions };\n}\n\n/**\n * Initializes the schema for a new graph.\n *\n * Creates version 1 of the schema and marks it as active.\n *\n * @param backend - The database backend\n * @param graph - The graph definition\n * @returns The created schema version row\n */\nexport async function initializeSchema<G extends GraphDef>(\n backend: GraphBackend,\n graph: G,\n): Promise<SchemaVersionRow> {\n const schema = serializeSchema(graph, 1);\n const hash = await computeSchemaHash(schema);\n\n return backend.insertSchema({\n graphId: graph.id,\n version: 1,\n schemaHash: hash,\n schemaDoc: schema,\n isActive: true,\n });\n}\n\n/**\n * Migrates the schema to match the current graph definition.\n *\n * This creates a new schema version and marks it as active.\n * The old version is preserved for history/rollback.\n *\n * @param backend - The database backend\n * @param graph - The current graph definition\n * @param currentVersion - The current active schema version\n * @returns The new version number\n */\nexport async function migrateSchema<G extends GraphDef>(\n backend: GraphBackend,\n graph: G,\n currentVersion: number,\n): Promise<number> {\n const newVersion = currentVersion + 1;\n const schema = serializeSchema(graph, newVersion);\n const hash = await computeSchemaHash(schema);\n\n // Insert new version (not active yet)\n await backend.insertSchema({\n graphId: graph.id,\n version: newVersion,\n schemaHash: hash,\n schemaDoc: schema,\n isActive: false,\n });\n\n // Atomically switch to the new version\n await backend.setActiveSchema(graph.id, newVersion);\n\n return newVersion;\n}\n\n/**\n * Rolls back the active schema to a previous version.\n *\n * The target version must already exist in the version history.\n * This does not delete newer versions — it simply switches the active pointer.\n *\n * @param backend - The database backend\n * @param graphId - The graph ID\n * @param targetVersion - The version to roll back to\n * @throws MigrationError if the target version does not exist\n */\nexport async function rollbackSchema(\n backend: GraphBackend,\n graphId: string,\n targetVersion: number,\n): Promise<void> {\n const row = await backend.getSchemaVersion(graphId, targetVersion);\n if (!row) {\n throw new MigrationError(\n `Cannot rollback to version ${targetVersion}: version does not exist.`,\n { graphId, fromVersion: targetVersion, toVersion: targetVersion },\n );\n }\n await backend.setActiveSchema(graphId, targetVersion);\n}\n\n/**\n * Gets the current active schema for a graph.\n *\n * @param backend - The database backend\n * @param graphId - The graph ID\n * @returns The active schema or undefined if not initialized\n */\nexport async function getActiveSchema(\n backend: GraphBackend,\n graphId: string,\n): Promise<SerializedSchema | undefined> {\n const row = await backend.getActiveSchema(graphId);\n if (!row) return undefined;\n return JSON.parse(row.schema_doc) as SerializedSchema;\n}\n\n/**\n * Checks if a graph's schema has been initialized.\n *\n * @param backend - The database backend\n * @param graphId - The graph ID\n * @returns True if the schema has been initialized\n */\nexport async function isSchemaInitialized(\n backend: GraphBackend,\n graphId: string,\n): Promise<boolean> {\n const row = await backend.getActiveSchema(graphId);\n return row !== undefined;\n}\n\n/**\n * Gets the schema diff between the stored schema and current graph.\n *\n * @param backend - The database backend\n * @param graph - The current graph definition\n * @returns The diff, or undefined if schema not initialized\n */\nexport async function getSchemaChanges<G extends GraphDef>(\n backend: GraphBackend,\n graph: G,\n): Promise<SchemaDiff | undefined> {\n const activeSchema = await backend.getActiveSchema(graph.id);\n if (!activeSchema) return undefined;\n\n const storedSchema = JSON.parse(activeSchema.schema_doc) as SerializedSchema;\n const currentSchema = serializeSchema(graph, activeSchema.version + 1);\n\n return computeSchemaDiff(storedSchema, currentSchema);\n}\n"]}
@@ -0,0 +1,388 @@
1
+ import { createNodeRowMapper, SQLITE_ROW_MAPPER_CONFIG, createEdgeRowMapper, createUniqueRowMapper, createSchemaVersionRowMapper, createSqliteOperationStrategy, createCommonOperationBackend, nowIso, D1_CAPABILITIES, SQLITE_CAPABILITIES, compileQueryWithDialect, dropSqliteVectorIndex, createSqliteVectorIndex } from './chunk-4HARSV2G.js';
2
+ import { tables } from './chunk-OGGLFYFA.js';
3
+ import { ConfigurationError } from './chunk-O5XPCJLF.js';
4
+ import { getTableName, sql } from 'drizzle-orm';
5
+
6
+ var DEFAULT_PREPARED_STATEMENT_CACHE_MAX = 256;
7
+ function getSessionName(db) {
8
+ const databaseWithSession = db;
9
+ const primarySessionName = databaseWithSession.session?.constructor?.name;
10
+ if (primarySessionName !== void 0) {
11
+ return primarySessionName;
12
+ }
13
+ return databaseWithSession._?.session?.constructor?.name;
14
+ }
15
+ function isD1DatabaseBySessionName(db) {
16
+ return getSessionName(db) === "SQLiteD1Session";
17
+ }
18
+ function isSyncDatabaseBySessionName(db) {
19
+ const sessionName = getSessionName(db);
20
+ return sessionName === "BetterSQLiteSession" || sessionName === "BunSQLiteSession";
21
+ }
22
+ function detectSyncProfile(db, profileHints) {
23
+ if (profileHints.isSync !== void 0) {
24
+ return profileHints.isSync;
25
+ }
26
+ const sessionName = getSessionName(db);
27
+ if (sessionName === "BetterSQLiteSession" || sessionName === "BunSQLiteSession") {
28
+ return true;
29
+ }
30
+ if (sessionName === "SQLiteD1Session") {
31
+ return false;
32
+ }
33
+ try {
34
+ const probeResult = db.get(sql`SELECT 1 AS __typegraph_sync_probe__`);
35
+ return !(probeResult instanceof Promise);
36
+ } catch {
37
+ return isSyncDatabaseBySessionName(db);
38
+ }
39
+ }
40
+ function detectD1Profile(db, profileHints) {
41
+ if (profileHints.isD1 !== void 0) {
42
+ return profileHints.isD1;
43
+ }
44
+ return isD1DatabaseBySessionName(db);
45
+ }
46
+ function resolveSqliteClient(db) {
47
+ const databaseWithClient = db;
48
+ const sqliteClient = databaseWithClient.$client;
49
+ if (sqliteClient?.prepare === void 0) {
50
+ return void 0;
51
+ }
52
+ return sqliteClient;
53
+ }
54
+ function getOrCreatePreparedStatement(cache, sqliteClient, sqlText, cacheMax) {
55
+ const cachedStatement = cache.get(sqlText);
56
+ if (cachedStatement !== void 0) {
57
+ cache.delete(sqlText);
58
+ cache.set(sqlText, cachedStatement);
59
+ return cachedStatement;
60
+ }
61
+ const preparedStatement = sqliteClient.prepare(sqlText);
62
+ cache.set(sqlText, preparedStatement);
63
+ if (cache.size > cacheMax) {
64
+ const oldestSqlText = cache.keys().next().value;
65
+ if (typeof oldestSqlText === "string") {
66
+ cache.delete(oldestSqlText);
67
+ }
68
+ }
69
+ return preparedStatement;
70
+ }
71
+ async function executeDrizzleQuery(db, query) {
72
+ const rows = db.all(query);
73
+ return rows instanceof Promise ? await rows : rows;
74
+ }
75
+ function createPreparedStatementExecutor(sqliteClient, cache, sqlText, cacheMax) {
76
+ return {
77
+ execute(params) {
78
+ const preparedStatement = getOrCreatePreparedStatement(
79
+ cache,
80
+ sqliteClient,
81
+ sqlText,
82
+ cacheMax
83
+ );
84
+ const rows = preparedStatement.all(...params);
85
+ return Promise.resolve(rows);
86
+ }
87
+ };
88
+ }
89
+ function createSqliteExecutionAdapter(db, statementCacheMaxOrOptions = {}) {
90
+ const options = typeof statementCacheMaxOrOptions === "number" ? { statementCacheMax: statementCacheMaxOrOptions } : statementCacheMaxOrOptions;
91
+ const statementCacheMax = options.statementCacheMax ?? DEFAULT_PREPARED_STATEMENT_CACHE_MAX;
92
+ const profileHints = options.profileHints ?? {};
93
+ const profileBase = {
94
+ isD1: detectD1Profile(db, profileHints),
95
+ isSync: detectSyncProfile(db, profileHints),
96
+ sqliteClient: resolveSqliteClient(db)
97
+ };
98
+ const supportsCompiledExecution = profileBase.isSync && !profileBase.isD1 && profileBase.sqliteClient !== void 0;
99
+ const profile = {
100
+ isD1: profileBase.isD1,
101
+ isSync: profileBase.isSync,
102
+ supportsCompiledExecution
103
+ };
104
+ const compile = (query) => compileQueryWithDialect(db, query, "SQLite");
105
+ if (supportsCompiledExecution) {
106
+ let executeCompiled2 = function(compiledQuery) {
107
+ const preparedStatement = getOrCreatePreparedStatement(
108
+ statementCache,
109
+ sqliteClient,
110
+ compiledQuery.sql,
111
+ statementCacheMax
112
+ );
113
+ const rows = preparedStatement.all(...compiledQuery.params);
114
+ return Promise.resolve(rows);
115
+ };
116
+ const sqliteClient = profileBase.sqliteClient;
117
+ const statementCache = /* @__PURE__ */ new Map();
118
+ return {
119
+ clearStatementCache() {
120
+ statementCache.clear();
121
+ },
122
+ compile,
123
+ execute(query) {
124
+ const compiledQuery = compile(query);
125
+ return executeCompiled2(compiledQuery);
126
+ },
127
+ executeCompiled: executeCompiled2,
128
+ prepare(sqlText) {
129
+ return createPreparedStatementExecutor(
130
+ sqliteClient,
131
+ statementCache,
132
+ sqlText,
133
+ statementCacheMax
134
+ );
135
+ },
136
+ profile
137
+ };
138
+ }
139
+ return {
140
+ clearStatementCache() {
141
+ },
142
+ compile,
143
+ execute(query) {
144
+ return executeDrizzleQuery(db, query);
145
+ },
146
+ profile
147
+ };
148
+ }
149
+
150
+ // src/backend/drizzle/sqlite.ts
151
+ var SQLITE_MAX_BIND_PARAMETERS = 999;
152
+ var NODE_INSERT_PARAM_COUNT = 9;
153
+ var EDGE_INSERT_PARAM_COUNT = 12;
154
+ var SQLITE_NODE_INSERT_BATCH_SIZE = Math.max(
155
+ 1,
156
+ Math.floor(SQLITE_MAX_BIND_PARAMETERS / NODE_INSERT_PARAM_COUNT)
157
+ );
158
+ var SQLITE_EDGE_INSERT_BATCH_SIZE = Math.max(
159
+ 1,
160
+ Math.floor(SQLITE_MAX_BIND_PARAMETERS / EDGE_INSERT_PARAM_COUNT)
161
+ );
162
+ var SQLITE_GET_NODES_ID_CHUNK_SIZE = Math.max(
163
+ 1,
164
+ SQLITE_MAX_BIND_PARAMETERS - 2
165
+ );
166
+ var SQLITE_GET_EDGES_ID_CHUNK_SIZE = Math.max(
167
+ 1,
168
+ SQLITE_MAX_BIND_PARAMETERS - 1
169
+ );
170
+ var toNodeRow = createNodeRowMapper(SQLITE_ROW_MAPPER_CONFIG);
171
+ var toEdgeRow = createEdgeRowMapper(SQLITE_ROW_MAPPER_CONFIG);
172
+ var toUniqueRow = createUniqueRowMapper(SQLITE_ROW_MAPPER_CONFIG);
173
+ var toSchemaVersionRow = createSchemaVersionRowMapper(SQLITE_ROW_MAPPER_CONFIG);
174
+ function createSerializedExecutionQueue() {
175
+ let tail = Promise.resolve();
176
+ return {
177
+ async runExclusive(task) {
178
+ const runTask = async () => task();
179
+ const result = tail.then(runTask, runTask);
180
+ tail = result.then(
181
+ () => 0,
182
+ () => 0
183
+ );
184
+ return result;
185
+ }
186
+ };
187
+ }
188
+ async function runWithSerializedQueue(queue, task) {
189
+ if (queue === void 0) return task();
190
+ return queue.runExclusive(task);
191
+ }
192
+ function createSqliteOperationBackend(options) {
193
+ const {
194
+ capabilities,
195
+ db,
196
+ executionAdapter,
197
+ operationStrategy,
198
+ serializedQueue,
199
+ tableNames
200
+ } = options;
201
+ async function execGet(query) {
202
+ return runWithSerializedQueue(serializedQueue, async () => {
203
+ const result = db.get(query);
204
+ return result instanceof Promise ? await result : result;
205
+ });
206
+ }
207
+ async function execAll(query) {
208
+ return runWithSerializedQueue(serializedQueue, async () => {
209
+ const result = db.all(query);
210
+ return result instanceof Promise ? await result : result;
211
+ });
212
+ }
213
+ async function execRun(query) {
214
+ await runWithSerializedQueue(serializedQueue, async () => {
215
+ const result = db.run(query);
216
+ if (result instanceof Promise) await result;
217
+ });
218
+ }
219
+ const commonBackend = createCommonOperationBackend({
220
+ batchConfig: {
221
+ edgeInsertBatchSize: SQLITE_EDGE_INSERT_BATCH_SIZE,
222
+ getEdgesChunkSize: SQLITE_GET_EDGES_ID_CHUNK_SIZE,
223
+ getNodesChunkSize: SQLITE_GET_NODES_ID_CHUNK_SIZE,
224
+ nodeInsertBatchSize: SQLITE_NODE_INSERT_BATCH_SIZE
225
+ },
226
+ execution: {
227
+ execAll,
228
+ execGet,
229
+ execRun
230
+ },
231
+ nowIso,
232
+ operationStrategy,
233
+ rowMappers: {
234
+ toEdgeRow,
235
+ toNodeRow,
236
+ toSchemaVersionRow,
237
+ toUniqueRow
238
+ }
239
+ });
240
+ const executeCompiled = executionAdapter.executeCompiled;
241
+ const executeRawMethod = executeCompiled === void 0 ? {} : {
242
+ executeRaw(sqlText, params) {
243
+ return runWithSerializedQueue(
244
+ serializedQueue,
245
+ async () => executeCompiled({ params, sql: sqlText })
246
+ );
247
+ }
248
+ };
249
+ const operationBackend = {
250
+ ...commonBackend,
251
+ ...executeRawMethod,
252
+ capabilities,
253
+ dialect: "sqlite",
254
+ tableNames,
255
+ createVectorIndex(params) {
256
+ const indexOptions = {
257
+ graphId: params.graphId,
258
+ nodeKind: params.nodeKind,
259
+ fieldPath: params.fieldPath,
260
+ dimensions: params.dimensions,
261
+ indexType: params.indexType,
262
+ metric: params.metric,
263
+ ...params.indexParams?.m === void 0 ? {} : { hnswM: params.indexParams.m },
264
+ ...params.indexParams?.efConstruction === void 0 ? {} : { hnswEfConstruction: params.indexParams.efConstruction },
265
+ ...params.indexParams?.lists === void 0 ? {} : { ivfflatLists: params.indexParams.lists }
266
+ };
267
+ const result = createSqliteVectorIndex(indexOptions);
268
+ if (!result.success) {
269
+ throw new Error(result.message ?? "Failed to create SQLite vector index");
270
+ }
271
+ return Promise.resolve();
272
+ },
273
+ dropVectorIndex(params) {
274
+ const result = dropSqliteVectorIndex(
275
+ params.graphId,
276
+ params.nodeKind,
277
+ params.fieldPath
278
+ );
279
+ if (!result.success) {
280
+ throw new Error(result.message ?? "Failed to drop SQLite vector index");
281
+ }
282
+ return Promise.resolve();
283
+ },
284
+ // === Query Execution ===
285
+ async execute(query) {
286
+ return runWithSerializedQueue(
287
+ serializedQueue,
288
+ async () => executionAdapter.execute(query)
289
+ );
290
+ },
291
+ compileSql(query) {
292
+ return executionAdapter.compile(query);
293
+ }
294
+ };
295
+ return operationBackend;
296
+ }
297
+ function createSqliteBackend(db, options = {}) {
298
+ const tables2 = options.tables ?? tables;
299
+ const profileHints = options.executionProfile ?? {};
300
+ const executionAdapter = createSqliteExecutionAdapter(db, { profileHints });
301
+ const { isD1, isSync } = executionAdapter.profile;
302
+ const capabilities = isD1 ? D1_CAPABILITIES : SQLITE_CAPABILITIES;
303
+ const tableNames = {
304
+ nodes: getTableName(tables2.nodes),
305
+ edges: getTableName(tables2.edges),
306
+ embeddings: getTableName(tables2.embeddings)
307
+ };
308
+ const operationStrategy = createSqliteOperationStrategy(tables2);
309
+ const serializedQueue = isSync ? createSerializedExecutionQueue() : void 0;
310
+ const operations = createSqliteOperationBackend({
311
+ capabilities,
312
+ db,
313
+ executionAdapter,
314
+ operationStrategy,
315
+ tableNames,
316
+ ...serializedQueue === void 0 ? {} : { serializedQueue }
317
+ });
318
+ const backend = {
319
+ ...operations,
320
+ async setActiveSchema(graphId, version) {
321
+ await backend.transaction(async (txBackend) => {
322
+ await txBackend.setActiveSchema(graphId, version);
323
+ });
324
+ },
325
+ async transaction(fn, _options) {
326
+ if (isD1) {
327
+ throw new ConfigurationError(
328
+ "Cloudflare D1 does not support atomic transactions. Operations within a transaction are not rolled back on failure. Use backend.capabilities.transactions to check for transaction support, or use individual operations with manual error handling.",
329
+ {
330
+ backend: "D1",
331
+ capability: "transactions",
332
+ supportsTransactions: false
333
+ }
334
+ );
335
+ }
336
+ if (isSync) {
337
+ return runWithSerializedQueue(serializedQueue, async () => {
338
+ const txBackend = createTransactionBackend({
339
+ capabilities,
340
+ db,
341
+ executionAdapter,
342
+ operationStrategy,
343
+ profileHints: { isD1: false, isSync: true },
344
+ tableNames
345
+ });
346
+ db.run(sql`BEGIN`);
347
+ try {
348
+ const result = await fn(txBackend);
349
+ db.run(sql`COMMIT`);
350
+ return result;
351
+ } catch (error) {
352
+ db.run(sql`ROLLBACK`);
353
+ throw error;
354
+ }
355
+ });
356
+ }
357
+ return db.transaction(async (tx) => {
358
+ const txBackend = createTransactionBackend({
359
+ capabilities,
360
+ db: tx,
361
+ operationStrategy,
362
+ profileHints: { isD1: false, isSync: false },
363
+ tableNames
364
+ });
365
+ return fn(txBackend);
366
+ });
367
+ },
368
+ async close() {
369
+ }
370
+ };
371
+ return backend;
372
+ }
373
+ function createTransactionBackend(options) {
374
+ const txExecutionAdapter = options.executionAdapter ?? createSqliteExecutionAdapter(options.db, {
375
+ profileHints: options.profileHints
376
+ });
377
+ return createSqliteOperationBackend({
378
+ capabilities: options.capabilities,
379
+ db: options.db,
380
+ executionAdapter: txExecutionAdapter,
381
+ operationStrategy: options.operationStrategy,
382
+ tableNames: options.tableNames
383
+ });
384
+ }
385
+
386
+ export { createSqliteBackend };
387
+ //# sourceMappingURL=chunk-F2BZSEFE.js.map
388
+ //# sourceMappingURL=chunk-F2BZSEFE.js.map